repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
werdeil/pibooth | pibooth/controls/light.py | 1 | 2328 | # -*- coding: utf-8 -*-
import threading
from pibooth.controls import GPIO
class BlinkingThread(threading.Thread):
"""Thread which manage blinking LEDs synchronously.
"""
def __init__(self):
threading.Thread.__init__(self)
self.daemon = True
self._leds = []
self._tick = 0.3
self._lock = threading.Lock()
self._stop_event = threading.Event()
self.start()
def register(self, led):
"""Add a new LED to manage.
"""
with self._lock:
if led not in self._leds:
self._leds.append(led)
def unregister(self, led):
"""Remove the given LED from the blinking management.
"""
with self._lock:
if led in self._leds:
self._leds.remove(led)
def run(self):
"""Cyclic call to the method :py:meth:`PtbLed.switch_on` and
:py:meth:`PtbLed.switch_off` of the registered LED.
"""
sequence = ['switch_on', 'switch_off']
while not self._stop_event.is_set():
for func_name in sequence:
with self._lock:
for led in self._leds:
getattr(led, func_name)()
if self._stop_event.wait(self._tick):
return # Stop requested
def stop(self):
"""Stop the thread.
"""
self._stop_event.set()
self.join()
class PtbLed(object):
"""LED management.
"""
_blinking_thread = BlinkingThread()
def __init__(self, pin):
self.pin = pin
GPIO.setup(pin, GPIO.OUT)
def switch_on(self):
"""Switch on the LED.
"""
if threading.current_thread() != self._blinking_thread:
self._blinking_thread.unregister(self)
GPIO.output(self.pin, GPIO.HIGH)
def switch_off(self):
"""Switch off the LED.
"""
if threading.current_thread() != self._blinking_thread:
self._blinking_thread.unregister(self)
GPIO.output(self.pin, GPIO.LOW)
def blink(self):
"""Blink the LED.
"""
self._blinking_thread.register(self)
def quit(self):
"""Switch off and stop the blinking thread.
"""
self.switch_off()
self._blinking_thread.stop()
| mit | 8,372,595,440,265,631,000 | 24.866667 | 68 | 0.531357 | false |
agconti/njode | env/lib/python2.7/site-packages/allauth/account/adapter.py | 1 | 11030 | import warnings
import json
from django.conf import settings
from django.http import HttpResponse
from django.template.loader import render_to_string
from django.template import TemplateDoesNotExist
from django.contrib.sites.models import Site
from django.core.mail import EmailMultiAlternatives, EmailMessage
from django.utils.translation import ugettext_lazy as _
from django import forms
from django.contrib import messages
try:
from django.utils.encoding import force_text
except ImportError:
from django.utils.encoding import force_unicode as force_text
from ..utils import (import_attribute, get_user_model,
generate_unique_username,
resolve_url)
from . import app_settings
class DefaultAccountAdapter(object):
def stash_verified_email(self, request, email):
request.session['account_verified_email'] = email
def unstash_verified_email(self, request):
ret = request.session.get('account_verified_email')
request.session['account_verified_email'] = None
return ret
def is_email_verified(self, request, email):
"""
Checks whether or not the email address is already verified
beyond allauth scope, for example, by having accepted an
invitation before signing up.
"""
ret = False
verified_email = request.session.get('account_verified_email')
if verified_email:
ret = verified_email.lower() == email.lower()
return ret
def format_email_subject(self, subject):
prefix = app_settings.EMAIL_SUBJECT_PREFIX
if prefix is None:
site = Site.objects.get_current()
prefix = u"[{name}] ".format(name=site.name)
return prefix + force_text(subject)
def render_mail(self, template_prefix, email, context):
"""
Renders an e-mail to `email`. `template_prefix` identifies the
e-mail that is to be sent, e.g. "account/email/email_confirmation"
"""
subject = render_to_string('{0}_subject.txt'.format(template_prefix),
context)
# remove superfluous line breaks
subject = " ".join(subject.splitlines()).strip()
subject = self.format_email_subject(subject)
bodies = {}
for ext in ['html', 'txt']:
try:
template_name = '{0}_message.{1}'.format(template_prefix, ext)
bodies[ext] = render_to_string(template_name,
context).strip()
except TemplateDoesNotExist:
if ext == 'txt' and not bodies:
# We need at least one body
raise
if 'txt' in bodies:
msg = EmailMultiAlternatives(subject,
bodies['txt'],
settings.DEFAULT_FROM_EMAIL,
[email])
if 'html' in bodies:
msg.attach_alternative(bodies['html'], 'text/html')
else:
msg = EmailMessage(subject,
bodies['html'],
settings.DEFAULT_FROM_EMAIL,
[email])
msg.content_subtype = 'html' # Main content is now text/html
return msg
def send_mail(self, template_prefix, email, context):
msg = self.render_mail(template_prefix, email, context)
msg.send()
def get_login_redirect_url(self, request):
"""
Returns the default URL to redirect to after logging in. Note
that URLs passed explicitly (e.g. by passing along a `next`
GET parameter) take precedence over the value returned here.
"""
assert request.user.is_authenticated()
url = getattr(settings, "LOGIN_REDIRECT_URLNAME", None)
if url:
warnings.warn("LOGIN_REDIRECT_URLNAME is deprecated, simply"
" use LOGIN_REDIRECT_URL with a URL name",
DeprecationWarning)
else:
url = settings.LOGIN_REDIRECT_URL
return resolve_url(url)
def get_logout_redirect_url(self, request):
"""
Returns the URL to redriect to after the user logs out. Note that
this method is also invoked if you attempt to log out while no users
is logged in. Therefore, request.user is not guaranteed to be an
authenticated user.
"""
return resolve_url(app_settings.LOGOUT_REDIRECT_URL)
def get_email_confirmation_redirect_url(self, request):
"""
The URL to return to after successful e-mail confirmation.
"""
if request.user.is_authenticated():
if app_settings.EMAIL_CONFIRMATION_AUTHENTICATED_REDIRECT_URL:
return \
app_settings.EMAIL_CONFIRMATION_AUTHENTICATED_REDIRECT_URL
else:
return self.get_login_redirect_url(request)
else:
return app_settings.EMAIL_CONFIRMATION_ANONYMOUS_REDIRECT_URL
def is_open_for_signup(self, request):
"""
Checks whether or not the site is open for signups.
Next to simply returning True/False you can also intervene the
regular flow by raising an ImmediateHttpResponse
"""
return True
def new_user(self, request):
"""
Instantiates a new User instance.
"""
user = get_user_model()()
return user
def populate_username(self, request, user):
"""
Fills in a valid username, if required and missing. If the
username is already present it is assumed to be valid
(unique).
"""
from .utils import user_username, user_email, user_field
first_name = user_field(user, 'first_name')
last_name = user_field(user, 'last_name')
email = user_email(user)
username = user_username(user)
if app_settings.USER_MODEL_USERNAME_FIELD:
user_username(user,
username
or generate_unique_username([first_name,
last_name,
email,
'user']))
def save_user(self, request, user, form, commit=True):
"""
Saves a new `User` instance using information provided in the
signup form.
"""
from .utils import user_username, user_email, user_field
data = form.cleaned_data
first_name = data.get('first_name')
last_name = data.get('last_name')
email = data.get('email')
username = data.get('username')
user_email(user, email)
user_username(user, username)
user_field(user, 'first_name', first_name or '')
user_field(user, 'last_name', last_name or '')
if 'password1' in data:
user.set_password(data["password1"])
else:
user.set_unusable_password()
self.populate_username(request, user)
if commit:
# Ability not to commit makes it easier to derive from
# this adapter by adding
user.save()
return user
def clean_username(self, username):
"""
Validates the username. You can hook into this if you want to
(dynamically) restrict what usernames can be chosen.
"""
from django.contrib.auth.forms import UserCreationForm
USERNAME_REGEX = UserCreationForm().fields['username'].regex
if not USERNAME_REGEX.match(username):
raise forms.ValidationError(_("Usernames can only contain "
"letters, digits and @/./+/-/_."))
# TODO: Add regexp support to USERNAME_BLACKLIST
username_blacklist_lower = [ub.lower() for ub in app_settings.USERNAME_BLACKLIST]
if username.lower() in username_blacklist_lower:
raise forms.ValidationError(_("Username can not be used. "
"Please use other username."))
username_field = app_settings.USER_MODEL_USERNAME_FIELD
assert username_field
user_model = get_user_model()
try:
query = {username_field + '__iexact': username}
user_model.objects.get(**query)
except user_model.DoesNotExist:
return username
raise forms.ValidationError(_("This username is already taken. Please "
"choose another."))
def clean_email(self, email):
"""
Validates an email value. You can hook into this if you want to
(dynamically) restrict what email addresses can be chosen.
"""
return email
def add_message(self, request, level, message_template,
message_context={}, extra_tags=''):
"""
Wrapper of `django.contrib.messages.add_message`, that reads
the message text from a template.
"""
if 'django.contrib.messages' in settings.INSTALLED_APPS:
try:
message = render_to_string(message_template,
message_context).strip()
if message:
messages.add_message(request, level, message,
extra_tags=extra_tags)
except TemplateDoesNotExist:
pass
def ajax_response(self, request, response, redirect_to=None, form=None):
data = {}
if redirect_to:
status = 200
data['location'] = redirect_to
if form:
if form.is_valid():
status = 200
else:
status = 400
data['form_errors'] = form._errors
if hasattr(response, 'render'):
response.render()
data['html'] = response.content.decode('utf8')
return HttpResponse(json.dumps(data),
status=status,
content_type='application/json')
def login(self, request, user):
from django.contrib.auth import login
# HACK: This is not nice. The proper Django way is to use an
# authentication backend
if not hasattr(user, 'backend'):
user.backend \
= "allauth.account.auth_backends.AuthenticationBackend"
login(request, user)
def confirm_email(self, request, email_address):
"""
Marks the email address as confirmed on the db
"""
email_address.verified = True
email_address.set_as_primary(conditional=True)
email_address.save()
def set_password(self, user, password):
user.set_password(password)
user.save()
def get_adapter():
return import_attribute(app_settings.ADAPTER)()
| bsd-3-clause | 4,666,278,003,445,794,000 | 37.566434 | 89 | 0.568722 | false |
yunify/qingstor-sdk-python | qingstor/sdk/build.py | 1 | 7514 | # +-------------------------------------------------------------------------
# | Copyright (C) 2016 Yunify, Inc.
# +-------------------------------------------------------------------------
# | Licensed under the Apache License, Version 2.0 (the "License");
# | you may not use this work except in compliance with the License.
# | You may obtain a copy of the License in the LICENSE file, or at:
# |
# | http://www.apache.org/licenses/LICENSE-2.0
# |
# | Unless required by applicable law or agreed to in writing, software
# | distributed under the License is distributed on an "AS IS" BASIS,
# | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# | See the License for the specific language governing permissions and
# | limitations under the License.
# +-------------------------------------------------------------------------
# -*- coding: utf-8 -*-
import sys
import json
import base64
import hashlib
import logging
import platform
import mimetypes
from urllib.parse import urlparse, quote, urlunparse
from requests import Request as Req
from requests.structures import CaseInsensitiveDict
from . import __version__
from .constant import BINARY_MIME_TYPE, JSON_MIME_TYPE
from .utils.helper import current_time, url_quote, should_quote, should_url_quote
class Builder:
def __init__(self, config, operation):
self.config = config
self.operation = operation
self.logger = logging.getLogger("qingstor-sdk")
self.properties = self.parse_request_properties()
def __repr__(self):
return "<Builder>"
def parse(self):
parsed_operation = dict()
parsed_operation["Method"] = self.operation["Method"]
parsed_operation["URI"] = self.parse_request_uri()
self.logger.debug(f'parsed_uri: {parsed_operation["URI"]}')
parsed_body, _ = self.parse_request_body()
if parsed_body:
parsed_operation["Body"] = parsed_body
parsed_headers = self.parse_request_headers()
if parsed_headers:
parsed_operation["Headers"] = parsed_headers
req = Req(
parsed_operation["Method"],
parsed_operation["URI"],
data=parsed_body,
headers=parsed_headers
)
return req
def parse_request_params(self):
parsed_params = dict()
if "Params" in self.operation:
for (k, v) in self.operation["Params"].items():
if v != "" and v is not None:
parsed_params[k] = quote(str(v))
return parsed_params
def parse_request_headers(self):
parsed_headers = CaseInsensitiveDict()
# Parse headers from operation.
if "Headers" in self.operation:
for (k, v) in self.operation["Headers"].items():
k = k.lower()
if should_quote(k):
v = quote(v)
elif should_url_quote(k):
v = url_quote(v)
parsed_headers[k] = v
# Handle header Host
parsed_headers["Host"] = self.parse_request_host()
# Handle header Date
parsed_headers["Date"] = self.operation["Headers"].get(
"Date", current_time()
)
# Handle header User-Agent
parsed_headers["User-Agent"] = (
"qingstor-sdk-python/{sdk_version} "
"(Python v{python_version}; {system})"
).format(
sdk_version=__version__,
python_version=platform.python_version(),
system=sys.platform
)
# Handle header X-QS-MetaData dict, for example:
# {'X-QS-MetaData': {'x': 'vx', 'y': 'vy'}} => {'X-QS-Meta-x': 'vx', 'X-QS-Meta-y': 'vy'}
# https://docs.qingcloud.com/qingstor/api/common/metadata#%E5%A6%82%E4%BD%95%E5%88%9B%E5%BB%BA%E5%AF%B9%E8%B1%A1%E5%85%83%E6%95%B0%E6%8D%AE
if 'X-QS-MetaData' in parsed_headers:
metadata = parsed_headers.get('X-QS-MetaData')
if isinstance(metadata, dict) and len(metadata) != 0:
for k, v in parsed_headers['X-QS-MetaData'].items():
parsed_headers["X-QS-Meta-{}".format(k)] = v
del parsed_headers['X-QS-MetaData']
# Handle header Content-Type
parsed_body, is_json = self.parse_request_body()
filename = urlparse(self.parse_request_uri()).path
parsed_headers["Content-Type"] = self.operation[
"Headers"].get("Content-Type") or mimetypes.guess_type(filename)[0]
if is_json:
parsed_headers["Content-Type"] = JSON_MIME_TYPE
if parsed_headers["Content-Type"] is None:
parsed_headers["Content-Type"] = BINARY_MIME_TYPE
# Handle specific API
if "API" in self.operation:
if self.operation["API"] == "DeleteMultipleObjects":
md5obj = hashlib.md5()
md5obj.update(parsed_body.encode())
parsed_headers["Content-MD5"] = base64.b64encode(
md5obj.digest()
).decode()
return parsed_headers
def parse_request_body(self):
parsed_body = None
is_json = False
if "Body" in self.operation and self.operation["Body"]:
parsed_body = self.operation["Body"]
elif "Elements" in self.operation and self.operation["Elements"]:
parsed_body = json.dumps(self.operation["Elements"], sort_keys=True)
is_json = True
return parsed_body, is_json
def parse_request_properties(self):
parsed_properties = dict()
for (k, v) in self.operation["Properties"].items():
if v != "" and v is not None:
parsed_properties[k] = quote(v)
return parsed_properties
def parse_request_host(self):
zone = self.properties.get("zone", "")
bucket_name = self.properties.get("bucket-name", "")
(protocol, endpoint,
port) = (self.config.protocol, self.config.host, self.config.port)
# Omit port if https:443 or http:80
if not ((protocol == "https" and port == 443) or
(protocol == "http" and port == 80)):
endpoint = f"{endpoint}:{port}"
if zone != "":
endpoint = f"{zone}.{endpoint}"
if bucket_name != "" and self.config.enable_virtual_host_style:
endpoint = f"{bucket_name}.{endpoint}"
return endpoint
def parse_request_uri(self):
request_uri = self.operation["URI"]
if self.config.enable_virtual_host_style and request_uri.startswith(
"/<bucket-name>"):
request_uri = request_uri.replace("/<bucket-name>", "")
if len(self.properties):
for (k, v) in self.properties.items():
request_uri = request_uri.replace("<%s>" % k, v)
parsed_uri = f"{self.config.protocol}://{self.parse_request_host()}{request_uri}"
parsed_params = self.parse_request_params()
if len(parsed_params):
scheme, netloc, path, params, req_query, fragment = urlparse(
parsed_uri, allow_fragments=False
)
query = [req_query]
for (k, v) in parsed_params.items():
query.append("%s=%s" % (k, v))
if not req_query:
query.pop(0)
parsed_uri = urlunparse(
(scheme, netloc, path, params, "", fragment)
) + "?" + "&".join(sorted(query))
return parsed_uri
| apache-2.0 | 5,161,267,017,462,213,000 | 37.142132 | 147 | 0.560687 | false |
uclouvain/osis | base/migrations/0577_auto_20210201_1741.py | 1 | 4928 | # Generated by Django 2.2.14 on 2021-02-01 17:41
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('base', '0576_populate_not_null_fields'),
# ('continuing_education', '0084_auto_20210127_1119'),
('dissertation', '0051_auto_20191211_1458'),
]
operations = [
migrations.AlterField(
model_name='offerenrollment',
name='education_group_year',
field=models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='base.EducationGroupYear'),
),
migrations.AlterField(
model_name='offeryearcalendar',
name='education_group_year',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='base.EducationGroupYear'),
),
migrations.AlterField(
model_name='sessionexam',
name='education_group_year',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='base.EducationGroupYear'),
),
migrations.RemoveField(
model_name='offeryear',
name='academic_year',
),
migrations.RemoveField(
model_name='offeryear',
name='campus',
),
migrations.RemoveField(
model_name='offeryear',
name='country',
),
migrations.RemoveField(
model_name='offeryear',
name='entity_administration',
),
migrations.RemoveField(
model_name='offeryear',
name='entity_administration_fac',
),
migrations.RemoveField(
model_name='offeryear',
name='entity_management',
),
migrations.RemoveField(
model_name='offeryear',
name='entity_management_fac',
),
migrations.RemoveField(
model_name='offeryear',
name='grade_type',
),
migrations.RemoveField(
model_name='offeryear',
name='offer',
),
migrations.RemoveField(
model_name='offeryear',
name='offer_type',
),
migrations.RemoveField(
model_name='offeryear',
name='parent',
),
migrations.RemoveField(
model_name='offeryeardomain',
name='domain',
),
migrations.RemoveField(
model_name='offeryeardomain',
name='offer_year',
),
migrations.AlterUniqueTogether(
name='offeryearentity',
unique_together=None,
),
migrations.RemoveField(
model_name='offeryearentity',
name='education_group_year',
),
migrations.RemoveField(
model_name='offeryearentity',
name='entity',
),
migrations.RemoveField(
model_name='offeryearentity',
name='offer_year',
),
migrations.RemoveField(
model_name='structure',
name='organization',
),
migrations.RemoveField(
model_name='structure',
name='part_of',
),
migrations.RemoveField(
model_name='structureaddress',
name='country',
),
migrations.RemoveField(
model_name='structureaddress',
name='structure',
),
migrations.RemoveField(
model_name='entitymanager',
name='structure',
),
migrations.RemoveField(
model_name='learningunityear',
name='structure',
),
migrations.RemoveField(
model_name='offerenrollment',
name='offer_year',
),
migrations.RemoveField(
model_name='offeryearcalendar',
name='offer_year',
),
migrations.RemoveField(
model_name='sessionexam',
name='offer_year',
),
migrations.AlterUniqueTogether(
name='programmanager',
unique_together={('person', 'education_group')},
),
migrations.DeleteModel(
name='ExternalOffer',
),
migrations.DeleteModel(
name='Offer',
),
migrations.DeleteModel(
name='OfferType',
),
migrations.DeleteModel(
name='OfferYearDomain',
),
migrations.DeleteModel(
name='OfferYearEntity',
),
migrations.DeleteModel(
name='Structure',
),
migrations.DeleteModel(
name='StructureAddress',
),
migrations.RemoveField(
model_name='programmanager',
name='offer_year',
),
migrations.DeleteModel(
name='OfferYear',
),
]
| agpl-3.0 | -2,530,800,407,347,505,000 | 28.508982 | 111 | 0.52638 | false |
xcme/briseis | devices/SW-Common-26.py | 1 | 5590 | # coding=UTF8
# Строчка выше нужна на случай использования Non-ASCII символов, например кириллицы.
ms_RxTx = {
# RX .1.3.6.1.2.1.31.1.1.1.6 ifHCInOctets
'~RX.1' : '.1.3.6.1.2.1.31.1.1.1.6.1',
'~RX.2' : '.1.3.6.1.2.1.31.1.1.1.6.2',
'~RX.3' : '.1.3.6.1.2.1.31.1.1.1.6.3',
'~RX.4' : '.1.3.6.1.2.1.31.1.1.1.6.4',
'~RX.5' : '.1.3.6.1.2.1.31.1.1.1.6.5',
'~RX.6' : '.1.3.6.1.2.1.31.1.1.1.6.6',
'~RX.7' : '.1.3.6.1.2.1.31.1.1.1.6.7',
'~RX.8' : '.1.3.6.1.2.1.31.1.1.1.6.8',
'~RX.9' : '.1.3.6.1.2.1.31.1.1.1.6.9',
'~RX.10' : '.1.3.6.1.2.1.31.1.1.1.6.10',
'~RX.11' : '.1.3.6.1.2.1.31.1.1.1.6.11',
'~RX.12' : '.1.3.6.1.2.1.31.1.1.1.6.12',
'~RX.13' : '.1.3.6.1.2.1.31.1.1.1.6.13',
'~RX.14' : '.1.3.6.1.2.1.31.1.1.1.6.14',
'~RX.15' : '.1.3.6.1.2.1.31.1.1.1.6.15',
'~RX.16' : '.1.3.6.1.2.1.31.1.1.1.6.16',
'~RX.17' : '.1.3.6.1.2.1.31.1.1.1.6.17',
'~RX.18' : '.1.3.6.1.2.1.31.1.1.1.6.18',
'~RX.19' : '.1.3.6.1.2.1.31.1.1.1.6.19',
'~RX.20' : '.1.3.6.1.2.1.31.1.1.1.6.20',
'~RX.21' : '.1.3.6.1.2.1.31.1.1.1.6.21',
'~RX.22' : '.1.3.6.1.2.1.31.1.1.1.6.22',
'~RX.23' : '.1.3.6.1.2.1.31.1.1.1.6.23',
'~RX.24' : '.1.3.6.1.2.1.31.1.1.1.6.24',
'~RX.25' : '.1.3.6.1.2.1.31.1.1.1.6.25',
'~RX.26' : '.1.3.6.1.2.1.31.1.1.1.6.26',
# TX .1.3.6.1.2.1.31.1.1.1.10 ifHCOutOctets
'~TX.1' : '.1.3.6.1.2.1.31.1.1.1.10.1',
'~TX.2' : '.1.3.6.1.2.1.31.1.1.1.10.2',
'~TX.3' : '.1.3.6.1.2.1.31.1.1.1.10.3',
'~TX.4' : '.1.3.6.1.2.1.31.1.1.1.10.4',
'~TX.5' : '.1.3.6.1.2.1.31.1.1.1.10.5',
'~TX.6' : '.1.3.6.1.2.1.31.1.1.1.10.6',
'~TX.7' : '.1.3.6.1.2.1.31.1.1.1.10.7',
'~TX.8' : '.1.3.6.1.2.1.31.1.1.1.10.8',
'~TX.9' : '.1.3.6.1.2.1.31.1.1.1.10.9',
'~TX.10' : '.1.3.6.1.2.1.31.1.1.1.10.10',
'~TX.11' : '.1.3.6.1.2.1.31.1.1.1.10.11',
'~TX.12' : '.1.3.6.1.2.1.31.1.1.1.10.12',
'~TX.13' : '.1.3.6.1.2.1.31.1.1.1.10.13',
'~TX.14' : '.1.3.6.1.2.1.31.1.1.1.10.14',
'~TX.15' : '.1.3.6.1.2.1.31.1.1.1.10.15',
'~TX.16' : '.1.3.6.1.2.1.31.1.1.1.10.16',
'~TX.17' : '.1.3.6.1.2.1.31.1.1.1.10.17',
'~TX.18' : '.1.3.6.1.2.1.31.1.1.1.10.18',
'~TX.19' : '.1.3.6.1.2.1.31.1.1.1.10.19',
'~TX.20' : '.1.3.6.1.2.1.31.1.1.1.10.20',
'~TX.21' : '.1.3.6.1.2.1.31.1.1.1.10.21',
'~TX.22' : '.1.3.6.1.2.1.31.1.1.1.10.22',
'~TX.23' : '.1.3.6.1.2.1.31.1.1.1.10.23',
'~TX.24' : '.1.3.6.1.2.1.31.1.1.1.10.24',
'~TX.25' : '.1.3.6.1.2.1.31.1.1.1.10.25',
'~TX.26' : '.1.3.6.1.2.1.31.1.1.1.10.26',
}
ms_RX_CRC = {
# RX_CRC .1.3.6.1.2.1.16.1.1.1.8 etherStatsCRCAlignErrors
'RX_CRC.1' : '.1.3.6.1.2.1.16.1.1.1.8.1',
'RX_CRC.2' : '.1.3.6.1.2.1.16.1.1.1.8.2',
'RX_CRC.3' : '.1.3.6.1.2.1.16.1.1.1.8.3',
'RX_CRC.4' : '.1.3.6.1.2.1.16.1.1.1.8.4',
'RX_CRC.5' : '.1.3.6.1.2.1.16.1.1.1.8.5',
'RX_CRC.6' : '.1.3.6.1.2.1.16.1.1.1.8.6',
'RX_CRC.7' : '.1.3.6.1.2.1.16.1.1.1.8.7',
'RX_CRC.8' : '.1.3.6.1.2.1.16.1.1.1.8.8',
'RX_CRC.9' : '.1.3.6.1.2.1.16.1.1.1.8.9',
'RX_CRC.10' : '.1.3.6.1.2.1.16.1.1.1.8.10',
'RX_CRC.11' : '.1.3.6.1.2.1.16.1.1.1.8.11',
'RX_CRC.12' : '.1.3.6.1.2.1.16.1.1.1.8.12',
'RX_CRC.13' : '.1.3.6.1.2.1.16.1.1.1.8.13',
'RX_CRC.14' : '.1.3.6.1.2.1.16.1.1.1.8.14',
'RX_CRC.15' : '.1.3.6.1.2.1.16.1.1.1.8.15',
'RX_CRC.16' : '.1.3.6.1.2.1.16.1.1.1.8.16',
'RX_CRC.17' : '.1.3.6.1.2.1.16.1.1.1.8.17',
'RX_CRC.18' : '.1.3.6.1.2.1.16.1.1.1.8.18',
'RX_CRC.19' : '.1.3.6.1.2.1.16.1.1.1.8.19',
'RX_CRC.20' : '.1.3.6.1.2.1.16.1.1.1.8.20',
'RX_CRC.21' : '.1.3.6.1.2.1.16.1.1.1.8.21',
'RX_CRC.22' : '.1.3.6.1.2.1.16.1.1.1.8.22',
'RX_CRC.23' : '.1.3.6.1.2.1.16.1.1.1.8.23',
'RX_CRC.24' : '.1.3.6.1.2.1.16.1.1.1.8.24',
'RX_CRC.25' : '.1.3.6.1.2.1.16.1.1.1.8.25',
'RX_CRC.26' : '.1.3.6.1.2.1.16.1.1.1.8.26',
}
ms_DS = {
# DS .1.3.6.1.2.1.10.7.2.1.19 dot3StatsDuplexStatus
'DS.1' : '.1.3.6.1.2.1.10.7.2.1.19.1',
'DS.2' : '.1.3.6.1.2.1.10.7.2.1.19.2',
'DS.3' : '.1.3.6.1.2.1.10.7.2.1.19.3',
'DS.4' : '.1.3.6.1.2.1.10.7.2.1.19.4',
'DS.5' : '.1.3.6.1.2.1.10.7.2.1.19.5',
'DS.6' : '.1.3.6.1.2.1.10.7.2.1.19.6',
'DS.7' : '.1.3.6.1.2.1.10.7.2.1.19.7',
'DS.8' : '.1.3.6.1.2.1.10.7.2.1.19.8',
'DS.9' : '.1.3.6.1.2.1.10.7.2.1.19.9',
'DS.10' : '.1.3.6.1.2.1.10.7.2.1.19.10',
'DS.11' : '.1.3.6.1.2.1.10.7.2.1.19.11',
'DS.12' : '.1.3.6.1.2.1.10.7.2.1.19.12',
'DS.13' : '.1.3.6.1.2.1.10.7.2.1.19.13',
'DS.14' : '.1.3.6.1.2.1.10.7.2.1.19.14',
'DS.15' : '.1.3.6.1.2.1.10.7.2.1.19.15',
'DS.16' : '.1.3.6.1.2.1.10.7.2.1.19.16',
'DS.17' : '.1.3.6.1.2.1.10.7.2.1.19.17',
'DS.18' : '.1.3.6.1.2.1.10.7.2.1.19.18',
'DS.19' : '.1.3.6.1.2.1.10.7.2.1.19.19',
'DS.20' : '.1.3.6.1.2.1.10.7.2.1.19.20',
'DS.21' : '.1.3.6.1.2.1.10.7.2.1.19.21',
'DS.22' : '.1.3.6.1.2.1.10.7.2.1.19.22',
'DS.23' : '.1.3.6.1.2.1.10.7.2.1.19.23',
'DS.24' : '.1.3.6.1.2.1.10.7.2.1.19.24',
'DS.25' : '.1.3.6.1.2.1.10.7.2.1.19.25',
'DS.26' : '.1.3.6.1.2.1.10.7.2.1.19.26',
}
ms_UpTime = {
# UP .1.3.6.1.2.1.1.3.0 sysUpTimeInstance
'UP.' : '.1.3.6.1.2.1.1.3.0'
}
| gpl-2.0 | -1,915,295,778,705,781,200 | 43.580645 | 84 | 0.398878 | false |
USGSDenverPychron/pychron | pychron/image/rpi_camera.py | 1 | 5433 | # ===============================================================================
# Copyright 2016 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
import os
from threading import Thread
import picamera
import picamera.array
# ============= standard library imports ========================
# ============= local library imports ==========================
import time
from pychron.core.helpers.filetools import unique_path2
from pychron.headless_config_loadable import HeadlessConfigLoadable
from pychron.paths import paths
class RPiCamera(HeadlessConfigLoadable):
sharpness = 0
contrast = 0
brightness = 50
saturation = 0
ISO = 0
video_stabilization = False
exposure_compensation = 0
# exposure modes
# off, auto, night, nightpreview, backlight, spotlight, sports, snow, beach,
# verylong, fixedfps, antishake, fireworks,
exposure_mode = 'auto'
meter_mode = 'average' # stop, average, backlit, matrix
# awb_modes
# off, auto, sunlight, cloudy, shade, tungsten, fluorescent, incandescent, flash, horizon
awb_mode = 'auto'
# image effects
# none, negative, solarize, sketch, denoise, emboss, oilpaint, hatch,
# gpen, pastel, watercolor,film, blur, saturation, colorswap, washedout,
# posterise, colorpoint, colorbalance, cartoon, deinterlace1, deinterlace2
image_effect = 'none'
color_effects = None # (u,v)
rotation = 0 # 0,90,180,270
hflip = False
vflip = False
crop = (0.0, 0.0, 1.0, 1.0)
frame_rate = 10
def load_additional_args(self, *args, **kw):
config = self.get_configuration()
self.set_attribute(config, 'sharpness', 'Settings', 'sharpness', cast='int')
self.set_attribute(config, 'contrast', 'Settings', 'contrast', cast='int')
self.set_attribute(config, 'brightness', 'Settings', 'brightness', cast='int')
self.set_attribute(config, 'saturation', 'Settings', 'saturation', cast='int')
self.set_attribute(config, 'ISO', 'Settings', 'ISO', cast='int')
self.set_attribute(config, 'video_stabilization', 'Settings', 'video_stabilization', cast='boolean')
self.set_attribute(config, 'exposure_compensation', 'Settings', 'exposure_compensation', cast='int')
self.set_attribute(config, 'exposure_mode', 'Settings', 'exposure_mode')
self.set_attribute(config, 'meter_mode', 'Settings', 'meter_mode')
self.set_attribute(config, 'awb_mode', 'Settings', 'awb_mode')
self.set_attribute(config, 'image_effect', 'Settings', 'image_effect')
self.set_attribute(config, 'color_effects', 'Settings', 'color_effects')
self.set_attribute(config, 'rotation', 'Settings', 'rotation', cast='int')
self.set_attribute(config, 'hflip', 'Settings', 'hflip', cast='boolean')
self.set_attribute(config, 'vflip', 'Settings', 'vflip', cast='boolean')
crop = self.config_get(config, 'Settings', 'crop')
if crop:
self.crop = tuple(map(float, crop.split(',')))
return True
def start_video_service(self):
def func():
root = '/var/www/firm_cam'
if not os.path.isdir(root):
os.mkdir(root)
path = os.path.join(root, 'image.jpg')
with picamera.PiCamera() as camera:
self._setup_camera(camera)
camera.capture(path)
while 1:
camera.capture(path)
time.sleep(1/float(self.frame_rate))
t = Thread(target=func)
t.setDaemon(True)
t.start()
def get_image_array(self):
with picamera.PiCamera() as camera:
self._setup_camera(camera)
with picamera.array.PiRGBArray(camera) as output:
camera.capture(output, 'rgb')
return output.array
def capture(self, path=None, name=None, **options):
with picamera.PiCamera() as camera:
self._setup_camera(camera)
if path is None:
if name is None:
path, _ = unique_path2(paths.snapshot_dir, name, extension='.jpg')
else:
path, _ = unique_path2(paths.snapshot_dir, 'rpi', extension='.jpg')
camera.capture(path, **options)
# private
def _setup_camera(self, camera):
attrs = ('sharpness', 'contrast', 'brightness', 'saturation', 'ISO',
'video_stabilization', 'exposure_compensation', 'exposure_mode',
'meter_mode', 'awb_mode', 'image_effect', 'color_effects',
'rotation', 'hflip', 'vflip', 'crop')
for attr in attrs:
setattr(camera, attr, getattr(self, attr))
# ============= EOF =============================================
| apache-2.0 | -6,190,239,584,110,987,000 | 38.369565 | 108 | 0.589361 | false |
iancze/Starfish | Starfish/emulator/kernels.py | 1 | 1437 | import scipy as sp
import numpy as np
def rbf_kernel(X, Z, variance, lengthscale):
"""
A classic radial-basis function (Gaussian; exponential squared) covariance kernel
.. math::
\\kappa(X, Z | \\sigma^2, \\Lambda) = \\sigma^2 \\exp\\left[-\\frac12 (X-Z)^T \\Lambda^{-1} (X-Z) \\right]
Parameters
----------
X : np.ndarray
The first set of points
Z : np.ndarray
The second set of points. Must have same second dimension as `X`
variance : double
The amplitude for the RBF kernel
lengthscale : np.ndarray or double
The lengthscale for the RBF kernel. Must have same second dimension as `X`
"""
sq_dist = sp.spatial.distance.cdist(X / lengthscale, Z / lengthscale, "sqeuclidean")
return variance * np.exp(-0.5 * sq_dist)
def batch_kernel(X, Z, variances, lengthscales):
"""
Batched RBF kernel
Parameters
----------
X : np.ndarray
The first set of points
Z : np.ndarray
The second set of points. Must have same second dimension as `X`
variances : np.ndarray
The amplitude for the RBF kernel
lengthscales : np.ndarray
The lengthscale for the RBF kernel. Must have same second dimension as `X`
See Also
--------
:function:`rbf_kernel`
"""
blocks = [rbf_kernel(X, Z, var, ls) for var, ls in zip(variances, lengthscales)]
return sp.linalg.block_diag(*blocks)
| bsd-3-clause | -4,077,361,603,139,102,700 | 28.326531 | 114 | 0.622129 | false |
Arish813/Assignment-Solution | data_helpers.py | 1 | 6074 | import numpy as np
import re
import itertools
from collections import Counter
import cPickle as pickle
import os
def clean_str(string):
"""
Tokenization/string cleaning for all datasets except for SST.
"""
string = re.sub(r"[^A-Za-z0-9:(),!?\'\`]", " ", string)
string = re.sub(r" : ", ":", string)
string = re.sub(r"\'s", " \'s", string)
string = re.sub(r"\'ve", " \'ve", string)
string = re.sub(r"n\'t", " n\'t", string)
string = re.sub(r"\'re", " \'re", string)
string = re.sub(r"\'d", " \'d", string)
string = re.sub(r"\'ll", " \'ll", string)
string = re.sub(r",", " , ", string)
string = re.sub(r"!", " ! ", string)
string = re.sub(r"\(", " \( ", string)
string = re.sub(r"\)", " \) ", string)
string = re.sub(r"\?", " \? ", string)
string = re.sub(r"\s{2,}", " ", string)
return string.strip().lower()
def load_data_and_labels():
"""
Loads data from files, splits the data into words and generates labels.
Returns split sentences and labels.
"""
# Load data from files
folder_prefix = 'data' + '/'
x_train = list(open(folder_prefix+"train_csv").readlines())
x_test = list(open(folder_prefix+"test_csv").readlines())
test_size = len(x_test)
x_text = x_train + x_test
# Split by words
clean_func = clean_str
x_text = [clean_func(sent) for sent in x_text]
y = [s.split(' ')[0].split(',')[0] for s in x_text]
x_text = [s.split(" ")[2:] for s in x_text]
# Generate labels
all_label = dict()
for label in y:
if not label in all_label:
all_label[label] = len(all_label) + 1
one_hot = np.identity(len(all_label))
y = [one_hot[ all_label[label]-1 ] for label in y]
return [x_text, y, test_size,all_label]
def load_trained_vecs(en_file, vocabulary):
folder_prefix = 'data' + '/'
if not os.path.exists(folder_prefix + 'trained_vecs.PICKLE'):
trained_vecs = load_bin_vec(folder_prefix + en_file, vocabulary)
with open(folder_prefix + 'trained_vecs.PICKLE', 'wb') as f:
pickle.dump([trained_vecs],f,protocol=-1)
else:
with open(folder_prefix + 'trained_vecs.PICKLE', 'rb') as f:
trained_vecs = pickle.load(f)[0]
return trained_vecs
def pad_sentences(sentences, padding_word="<PAD/>"):
"""
Pads all sentences to the same length. The length is defined by the longest sentence.
Returns padded sentences.
"""
sequence_length = max(len(x) for x in sentences)
padded_sentences = []
for i in range(len(sentences)):
sentence = sentences[i]
num_padding = sequence_length - len(sentence)
new_sentence = sentence + [padding_word] * num_padding
padded_sentences.append(new_sentence)
return padded_sentences
def build_vocab(sentences):
"""
Builds a vocabulary mapping from word to index based on the sentences.
Returns vocabulary mapping and inverse vocabulary mapping.
"""
# Build vocabulary
word_counts = Counter(itertools.chain(*sentences))
# Mapping from index to word
# vocabulary_inv=['<PAD/>', 'the', ....]
vocabulary_inv = [x[0] for x in word_counts.most_common()]
# Mapping from word to index
# vocabulary = {'<PAD/>': 0, 'the': 1, ',': 2, 'a': 3, 'and': 4, ..}
vocabulary = {x: i for i, x in enumerate(vocabulary_inv)}
return [vocabulary, vocabulary_inv]
def build_input_data(sentences, labels, vocabulary):
"""
Maps sentences and labels to vectors based on a vocabulary.
"""
x = np.array([[vocabulary[word] for word in sentence] for sentence in sentences])
y = np.array(labels)
return [x, y]
def load_data():
"""
Loads and preprocessed data
Returns input vectors, labels, vocabulary, and inverse vocabulary.
"""
# Load and preprocess data
sentences, labels, test_size,all_label = load_data_and_labels()
sentences_padded = pad_sentences(sentences)
vocabulary, vocabulary_inv = build_vocab(sentences_padded)
x, y = build_input_data(sentences_padded, labels, vocabulary)
return [x, y, vocabulary, vocabulary_inv, test_size,all_label]
def load_bin_vec(fname, vocab):
"""
Loads 300x1 word vecs from Google (Mikolov) word2vec
"""
word_vecs = {}
with open(fname, "rb") as f:
header = f.readline()
vocab_size, layer1_size = map(int, header.split())
binary_len = np.dtype('float32').itemsize * layer1_size
for line in xrange(vocab_size):
word = []
while True:
ch = f.read(1)
if ch == ' ':
word = ''.join(word)
break
if ch != '\n':
word.append(ch)
if word in vocab:
word_vecs[word] = np.fromstring(f.read(binary_len), dtype='float32')
else:
f.read(binary_len)
return word_vecs
def add_unknown_words(word_vecs, vocab, min_df=0, k=300):
"""
0.25 is chosen so the unknown vectors have (approximately) same variance as pre-trained ones
"""
count = 0
for word in vocab:
if word not in word_vecs and vocab[word] >= min_df:
word_vecs[word] = np.random.uniform(-0.25,0.25,k)
else:
count += 1
return count
def batch_iter(data, batch_size, num_epochs):
"""
Generates a batch iterator for a dataset.
"""
data = np.array(data)
data_size = len(data)
num_batches_per_epoch = int(len(data)/batch_size) + 1
for epoch in range(num_epochs):
# Shuffle the data at each epoch
shuffle_indices = np.random.permutation(np.arange(data_size))
shuffled_data = data[shuffle_indices]
for batch_num in range(num_batches_per_epoch):
start_index = batch_num * batch_size
end_index = (batch_num + 1) * batch_size
if end_index > data_size:
end_index = data_size
start_index = end_index - batch_size
yield shuffled_data[start_index:end_index]
| gpl-3.0 | -3,709,110,022,735,993,300 | 35.154762 | 96 | 0.590715 | false |
shailr/vms | django_http_api/wsgi.py | 1 | 1482 | """
WSGI config for django_http_api project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from django.core.wsgi import get_wsgi_application
from dj_static import Cling
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "django_http_api.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_http_api.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = Cling(get_wsgi_application())
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| gpl-2.0 | -7,422,042,309,239,594,000 | 42.588235 | 79 | 0.792173 | false |
mleger45/turnex | turnex/urls.py | 1 | 1209 | """turnex URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from django.views.static import serve
from django.conf import settings
urlpatterns = [
url(r'^admin/', admin.site.urls, name="admin"),
url(r'^api-auth/', include('rest_framework.urls',
namespace='rest_framework'), name="api_auth"),
url(r'api/v1/', include('api.v1.urls'), name="api"),
url(r'turnex/', include('msn.urls', namespace='msn')),
url(r'^media/(?P<path>.*)$', serve, {
'document_root': settings.MEDIA_ROOT,
}),
]
| mit | -6,026,112,504,968,619,000 | 39.3 | 79 | 0.656741 | false |
ehliang/myo-unlock | Myo_unlock.py | 1 | 5681 | #This program takes simple input from the Mio to unlock a complex password string and log in to email
#Further developement would allow it to log into social media accounts/computerss
#A program by Ethan Liang
from __future__ import print_function
import myo as libmyo; libmyo.init()
import time
import sys
import smtplib
import getpass
import email.header
import re
import datetime
import json
import email
import requests
gesturesGiven = []
userInput = []
userPassword = []
originalUsernameString = ""
originalPasswordString = ""
activated = False
useringesture = ""
userpasscheck = ""
counter = 1
class Listener(libmyo.DeviceListener):
"""
Listener implementation. Return False from any function to
stop the Hub.
"""
interval = 0.05 # Output only 0.05 seconds
pose_run = False
def on_connect(self, myo, timestamp):
print("Hello, Myo!")
def on_disconnect(self, myo, timestamp):
print("Goodbye, Myo!")
# def on_orientation_data(self, myo, timestamp, quat):
# print("Orientation:", quat.x, quat.y, quat.z, quat.w)
def passcheck(self):
print("Enter the password. Please make a gesture. When finished with your combination, press 0, otherwise enter any character after each gesture.")
userPassword.append(userpasscheck)
print("Detected: " + str(userpasscheck))
confirm3 = raw_input()
if confirm3 == "3":
activated = True
if userpasscheck == userInput:
print(userInput)
fromaddr = originalUsernameString
toaddrs = "[email protected]"
msg = "Test"
username = originalUsernameString
password = originalPasswordString
server = smtplib.SMTP('smtp.gmail.com:587')
server.starttls()
server.login(username,password)
server.sendmail(fromaddr, toaddrs, msg)
server.quit()
activated=True
else:
print("Error")
activated=True
else:
activated = False
def record(self):
global count, activated
count = 1
print("Please make a gesture, this is the "+ str(count) + " character of the password. When finished with your combination, press 0, otherwise enter any character")
activated = True
userInput.append(useringesture)
print("Detected: " + str(useringesture))
confirm = raw_input()
if confirm == "0":
# activated = True
print("Was your password string " + str(userInput) + "? If yes, enter 3. Otherwise, enter 4. ")
confirm2 = int(raw_input())
if confirm2 == 3:
print("abc")
activated = False
self.passcheck()
confirm = "p"
elif confirm2 == 4:
del userInput[:]
activated = False
else:
activated = False
# print("Was your gesture" + str(useringesture) + "? Please enter yes or no")
# print("Was your gesture" + str(useringesture) + "? Please enter yes or no")
# confirm = raw_input()
# while confirm != "yes" and confirm != "no":
# print("Was your gesture" + str(useringesture) + "? Please enter yes or no")
# confirm = raw_input()
# if confirm == "yes":
#def keyPressHandler(event):
# if event.keysym == "0":
# activated = True
def on_pose(self, myo, timestamp, pose):
global useringesture, userpasscheck, confirm2
if activated == False:
if pose!= libmyo.Pose.rest and confirm2==3:
userpasscheck = pose
self.passcheck()
elif pose!= libmyo.Pose.rest:
useringesture = pose
self.record()
#count+=1
# if pose == libmyo.Pose.fist:
# print("Don't show me 'ya fist!")
# gesturesGiven.append(pose)
# print(gesturesGiven[0])
# #Stops the Hub
# if pose == libmyo.Pose.wave_out:
# print("abcd")
# gesturesGiven.append(pose)
# print(gesturesGiven)
# return False
# if self.pose_run:
# return
# self.pose_run = True
if userPassword == userInput:
originalUsernameString = ""
originalPasswordString = ""
fromaddr = originalUsernameString
toaddrs = ""
msg = "Test"
username = originalUsernameString
password = originalPasswordString
server = smtplib.SMTP('smtp.gmail.com:587')
server.starttls()
server.login(username,password)
server.sendmail(fromaddr, toaddrs, msg)
server.quit()
def main():
print("Connecting to Myo ... Use CTRL^C to exit.")
print("If nothing happens, make sure the Bluetooth adapter is plugged in,")
print("Myo Connect is running and your Myo is put on.")
hub = libmyo.Hub()
hub.set_locking_policy(libmyo.LockingPolicy.none)
hub.run(1, Listener())
# Listen to keyboard interrupts and stop the hub in that case.
try:
while hub.running:
time.sleep(0.25)
except KeyboardInterrupt:
print("\nQuitting ...")
finally:
print("Shutting down hub...")
hub.shutdown()
if __name__ == '__main__':
main()
| mit | 8,310,767,131,224,216,000 | 28.588542 | 172 | 0.556064 | false |
arhote/exchange | exchange/tests/thumbnail_test.py | 1 | 4572 | #
# Test Creation of Thumbnails.
#
from . import ExchangeTest
from base64 import b64encode
class ThumbnailTest(ExchangeTest):
def setUp(self):
super(ThumbnailTest, self).setUp()
self.login()
def get_thumbnail(self, path):
r = self.client.get(path)
self.assertEqual(r.status_code, 200, "Failed to get thumbnail")
return r
def test_blank(self):
r = self.client.get('/thumbnails/maps/no-id')
# TODO: should this really return a 404
# *and* a blank image?
self.assertEqual(r.status_code, 200)
# the 'no image' gif has 713 characters in it.
self.assertEqual(len(r.content), 713,
"This image does not appear to be the no image gif")
def test_basic_upload(self, img='test_thumbnail0.png'):
test_thumb = open(self.get_file_path(img), 'r').read()
# post up a legend
r = self.client.post('/thumbnails/maps/0',
test_thumb,
content_type='application/octet-stream')
# success!
self.assertEqual(r.status_code, 201)
def test_overwrite(self):
# The legend should overwrite with the new image
# without throwing an error.
self.test_basic_upload()
# yes, just do it again and see if the is an error
self.test_basic_upload(img='test_thumbnail1.png')
# and check that we have somehting more like test_thumbnail1.png
r = self.get_thumbnail('/thumbnails/maps/0')
self.assertEqual(len(r.content), 4911,
'This does not look like thumbnail 1')
def test_bad_image(self):
# first a test without any thumbnail
r = self.client.post('/thumbnails/maps/0')
self.assertEqual(r.status_code, 400,
'Tried to process a missing thumbnail.')
# now a post with a *bad* thumbnail string.
shp_file = open(self.get_file_path('test_point.shp'), 'r')
r = self.client.post('/thumbnails/maps/0',
data=shp_file,
content_type='application/octet-stream')
self.assertEqual(r.status_code, 400,
'Tried to process a poorly formatted thumbnail.')
def test_bad_object_type(self):
r = self.client.post('/thumbnails/chicken/feed')
self.assertEqual(r.status_code, 404)
def test_huge_thumbnail(self):
# thumbnails are limited in size, luckily we
# can use a big random file since the size check happens
# before the mimetype check.
big_string = '*' * 400001
r = self.client.post('/thumbnails/maps/0', big_string,
content_type='text/plain')
self.assertEqual(r.status_code, 400)
# The client needs to be able to upload the image as a
# base 64 encoded string. This tests that capability.
#
def test_base64_pngs(self):
thumbpng = open(
self.get_file_path('test_thumbnail0.png'), 'rb').read()
header = 'data:image/png;base64,'
base64_png = header + b64encode(thumbpng)
r = self.client.post('/thumbnails/maps/0',
base64_png,
content_type='image/png')
self.assertEqual(r.status_code, 201, 'Error: ' + r.content)
# then test the correct image came back.
r = self.client.get('/thumbnails/maps/0')
test_b64 = b64encode(r.content)
self.assertEqual(test_b64, b64encode(thumbpng),
'Images appear to differ.')
# Ensure that layer legends are preserved when set.
#
def test_two_layers(self):
png1 = open(self.get_file_path('test_thumbnail0.png'), 'rb').read()
png2 = open(self.get_file_path('test_thumbnail1.png'), 'rb').read()
self.client.post('/thumbnails/layers/layer1', png1,
content_type='image/png')
self.client.post('/thumbnails/layers/layer2', png2,
content_type='image/png')
r = self.client.get('/thumbnails/layers/layer1')
self.assertEqual(r.status_code, 200, 'failed to retrieve thumbnail')
data1 = r.content
r = self.client.get('/thumbnails/layers/layer2')
self.assertEqual(r.status_code, 200, 'failed to retrieve thumbnail')
data2 = r.content
self.assertEqual(data1, png1, 'Mismatch in thumbnail 1')
self.assertEqual(data2, png2, 'Mismatch in thumbnail 2')
| gpl-3.0 | -1,070,382,663,439,937,300 | 32.866667 | 77 | 0.58399 | false |
techinc/techinc_badge | pathtokicad/pathtokicad.py | 1 | 6404 | #!/usr/bin/python
import sys, math
fill_paths = [
("23", "soldermask.path"), # soldermask front
# ("21", "silkscreen.path"),
# ("15", "copper_top.path"),
("15", "copper_top_x.path"),
("0", "copper_bottom.path"),
("0", "battery_holder.path"),
("22", "battery_holder_mask.path"),
("21", "ispmark.path"),
("0", "safetypin.path"),
("22", "safetypin.path"),
]
segment_paths = [
("21", "silkscreen.segments", .9),
("28", "edges_round.segments", .9),
# ("28", "edges.segments", .9),
("20", "techincnl.segments", .9),
]
pads = [
( (-129.50091,49.85), 2, 3 )
]
vias = [
( 10, 10),
( 20, 10),
( 10, 20),
( 20, 20),
]
name = "techinc_badge"
start = cur = None
cubic_sections = 32
in_dpi, out_dpi = 90., 10000.
scale = out_dpi/in_dpi
def dist(a, b):
ax, ay = a
bx, by = b
return math.sqrt((ax-bx)**2 + (ay-by)**2)
def set_cur(newcur):
global cur
x, y = cur = newcur
def interpolate(pos1, pos2, d):
x1, y1 = pos1
x2, y2 = pos2
return ( x1*(1-d) + x2*d, y1*(1-d) + y2*d )
def get_abs(coords):
x, y = cur
dx, dy = coords
return (x+dx, y+dy)
def coord_fmt( coords ):
x, y = coords
return "%d %d" % ( round(x*scale), round(y*scale) )
def output_line( coords ):
set_cur(coords)
return [ "Dl " + coord_fmt(coords) ]
def output_rel_line( coords ):
return output_line(get_abs(coords))
def output_move( coords ):
global start
if start == None:
start = coords
set_cur(coords)
return [ "Dl " + coord_fmt(coords) ]
def output_rel_move( coords ):
return output_move(get_abs(coords))
def output_cubic( guide1, guide2, end ):
start = cur
n = min(int(dist(start, end)*scale/40.)+1, cubic_sections)
v = []
for i in xrange(1, n+1):
d = i/float(n)
a = interpolate(start, guide1, d)
b = interpolate(guide1, guide2, d)
c = interpolate(guide2, end, d)
ab = interpolate(a, b, d)
bc = interpolate(b, c, d)
abc = interpolate(ab, bc, d)
v += output_line(abc)
return v
def output_line_segment( coords, layer ):
print "DS %s %s %d %s" % (coord_fmt(cur), coord_fmt(coords),width*scale,layer)
set_cur(coords)
def output_cubic_segment( guide1, guide2, end, layer ):
start = cur
n = min(int(dist(start, end)*scale/40.)+1, cubic_sections)
for i in xrange(1, n+1):
d = i/float(n)
a = interpolate(start, guide1, d)
b = interpolate(guide1, guide2, d)
c = interpolate(guide2, end, d)
ab = interpolate(a, b, d)
bc = interpolate(b, c, d)
abc = interpolate(ab, bc, d)
output_line_segment(abc, layer)
def output_rel_cubic( guide1, guide2, end ):
return output_cubic( get_abs(guide1), get_abs(guide2), get_abs(end) )
def output_rel_move( coords ):
return output_move(get_abs(coords))
def output_close():
global start
set_cur(start)
start = None
return [ "Dl " + coord_fmt(cur) ]
def get_coords(s):
return map(float, s)
def pad_at(coords):
return """$PAD
Sh "1" C 600 600 0 0 0
Dr 400 0 0
At STD N 00E0FFFF
Ne 0 ""
Po """+coord_fmt(coords)+"""
$EndPAD"""
def via_at(coords):
return """$TRACK
Po 3 """+coord_fmt(coords)+" "+coord_fmt(coords)+""" 350 -1
De 15 1 0 0 0
$EndTRACK"""
def pad_grid(coords, w, h, pitch=.1):
x, y = coords
v = []
for i in xrange(w):
for j in xrange(h):
v += [ pad_at( (x + pitch*in_dpi*i, y + pitch*in_dpi*j) ) ]
return '\n'.join(v)
def print_path(data, layer):
global start, cur
values = (x for x in data.replace(',', ' ').split(' ') if x != '' )
mode = 'z'
cur = (0.,0.)
start = None
v = []
for x in values:
if x[-1] == '\n':
x = x[:-1]
if x in 'mclMCL':
mode = x
continue
if x in 'zZ':
mode = x
if mode in 'zZ':
v += output_close()
print 'DP 0 0 0 0 %d 1 %s' % (len(v), layer)
print '\n'.join(v)
v = []
elif mode == 'm':
v += output_rel_move(get_coords((x, values.next())))
mode = 'l'
elif mode == 'M':
v += output_move(get_coords((x, values.next())))
mode = 'L'
elif mode == 'c':
guide1 = x, values.next()
guide2 = values.next(), values.next()
end = values.next(), values.next()
v += output_rel_cubic(get_coords(guide1), get_coords(guide2), get_coords(end))
elif mode == 'C':
guide1 = x, values.next()
guide2 = values.next(), values.next()
end = values.next(), values.next()
v += output_cubic(get_coords(guide1), get_coords(guide2), get_coords(end))
elif mode == 'l':
v += output_rel_line(get_coords((x, values.next())))
elif mode == 'L':
v += output_line(get_coords((x, values.next())))
else:
print "ERROR: " + x
sys.exit(1)
def print_segments(data, layer, width):
global start
values = (x for x in data.replace(',', ' ').split(' ') if x != '' )
set_cur( (0.,0.) )
start = cur
for x in values:
if x[-1] == '\n':
x = x[:-1]
if x in 'mclMCL':
mode = x
continue
if x in 'zZ':
mode = x
if mode in 'zZ':
print "DS %s %s %d %s" % (coord_fmt(cur), coord_fmt(start),width*scale,layer)
set_cur(start)
elif mode == 'm':
set_cur(get_abs(get_coords((x, values.next()))))
start = cur
mode = 'l'
elif mode == 'M':
set_cur(get_coords((x, values.next())))
start = cur
mode = 'L'
elif mode == 'l':
pos = get_abs(get_coords((x, values.next())))
print "DS %s %s %d %s" % (coord_fmt(cur), coord_fmt(pos),width*scale,layer)
set_cur(pos)
elif mode == 'L':
pos = get_coords((x, values.next()))
print "DS %s %s %d %s" % (coord_fmt(cur), coord_fmt(pos),width*scale,layer)
set_cur(pos)
elif mode == 'c':
guide1 = x, values.next()
guide2 = values.next(), values.next()
end = values.next(), values.next()
output_cubic_segment(get_abs(get_coords(guide1)), get_abs(get_coords(guide2)), get_abs(get_coords(end)),layer)
elif mode == 'C':
guide1 = x, values.next()
guide2 = values.next(), values.next()
end = values.next(), values.next()
output_cubic_segment(get_coords(guide1), get_coords(guide2), get_coords(end),layer)
else:
print "ERROR: " + x
sys.exit(1)
print """PCBNEW-LibModule-V1
$INDEX
"""
print name
print """$EndINDEX
$MODULE """ + name + """
Po 0 0 0 15 00000000 00000000 ~~
Li """ + name
for layer, filename in fill_paths:
f = open(filename)
print_path(f.read(1000000), layer)
f.close()
for layer, filename, width in segment_paths:
f = open(filename)
print_segments(f.read(1000000), layer, width)
f.close()
for topleft, w, h in pads:
print pad_grid(topleft, w, h)
#for coords in vias:
# print via_at( coords )
print """$EndMODULE """ + name + """
$EndLIBRARY"""
| mit | -4,837,021,785,273,147,000 | 20.275748 | 113 | 0.593379 | false |
tobetter/linaro-image-tools | linaro_image_tools/hwpack/package_unpacker.py | 1 | 2545 | # Copyright (C) 2010, 2011, 2013 Linaro
#
# This file is part of Linaro Image Tools.
#
# Linaro Image Tools is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# Linaro Image Tools is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Linaro Image Tools; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
import logging
import os
import tempfile
from subprocess import PIPE
from shutil import rmtree
from linaro_image_tools import cmd_runner
logger = logging.getLogger(__name__)
class PackageUnpacker(object):
def __enter__(self):
self.tempdir = tempfile.mkdtemp()
return self
def __exit__(self, type, value, traceback):
if self.tempdir is not None and os.path.exists(self.tempdir):
rmtree(self.tempdir)
def get_path(self, package_file_name, file_name=''):
"""Get package or file path in unpacker tmp dir."""
package_dir = os.path.basename(package_file_name)
return os.path.join(self.tempdir, package_dir, file_name)
def unpack_package(self, package_file_name):
# We could extract only a single file, but since dpkg will pipe
# the entire package through tar anyway we might as well extract all.
unpack_dir = self.get_path(package_file_name)
if not os.path.isdir(unpack_dir):
os.mkdir(unpack_dir)
p = cmd_runner.run(["tar", "-C", unpack_dir, "-xf", "-"], stdin=PIPE)
cmd_runner.run(["dpkg", "--fsys-tarfile", package_file_name],
stdout=p.stdin).communicate()
p.communicate()
def get_file(self, package, file):
# File path passed here must not be absolute, or file from
# real filesystem will be referenced.
assert file and file[0] != '/'
self.unpack_package(package)
logger.debug("Unpacked package %s." % package)
temp_file = self.get_path(package, file)
assert os.path.exists(temp_file), "The file '%s' was " \
"not found in the package '%s'." % (file, package)
return temp_file
| gpl-3.0 | 7,460,180,015,763,455,000 | 37.560606 | 77 | 0.669155 | false |
SCUEvals/scuevals-api | scuevals_api/models/api_key.py | 1 | 1666 | from sqlalchemy import func
from . import db
from .assoc import api_key_permission
from .permission import Permission
API_KEY_TYPE = 'api_key'
class APIKey(db.Model):
__tablename__ = 'api_keys'
id = db.Column(db.Integer, primary_key=True)
key = db.Column(db.Text, nullable=False, unique=True)
issued_at = db.Column(db.DateTime(timezone=True), server_default=func.now(), nullable=False)
university_id = db.Column('university_id', db.Integer, db.ForeignKey('universities.id'), nullable=False)
university = db.relationship('University', back_populates='api_keys')
permissions = db.relationship(
'Permission', secondary=api_key_permission, back_populates='api_keys', passive_deletes=True
)
def _get_permissions(self):
return [permission.id for permission in self.permissions]
def _set_permissions(self, value):
while self.permissions:
del self.permissions[0]
for permission_id in value:
permission = Permission.query.get(permission_id)
if permission is None:
raise ValueError('permission does not exist: {}'.format(permission_id))
self.permissions.append(permission)
permissions_list = property(_get_permissions,
_set_permissions,
None,
'Property permissions_list is a simple wrapper for permissions relation')
def identity(self):
return {
'id': self.id,
'university_id': self.university_id,
'type': API_KEY_TYPE,
'permissions': self.permissions_list
}
| agpl-3.0 | -8,820,916,295,296,028,000 | 33 | 108 | 0.62425 | false |
AI-comp/Orientation2015Problems | rime/basic/consts.py | 1 | 3991 | #!/usr/bin/python
#
# Copyright (c) 2011 Rime Project.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
RIMEROOT_FILE = 'RIMEROOT'
PROBLEM_FILE = 'PROBLEM'
SOLUTION_FILE = 'SOLUTION'
TESTS_FILE = 'TESTS'
STAMP_FILE = '.stamp'
IN_EXT = '.in'
DIFF_EXT = '.diff'
OUT_EXT = '.out'
EXE_EXT = '.exe'
JUDGE_EXT = '.judge'
CACHE_EXT = '.cache'
LOG_EXT = '.log'
VALIDATION_EXT = '.validation'
RIME_OUT_DIR = 'rime-out'
### Limit the width of help messages to 75 characters!
GLOBAL_HELP = """\
Rime is a tool for programming contest organizers to automate usual, boring
and error-prone process of problem set preparation. It supports various
programming contest styles like ACM-ICPC, TopCoder, etc. by plugins.
To see a brief description and available options of a command, try:
rime.py help <command>
"""
BUILD_HELP = """\
If the target is a project, Rime builds all problems recursively.
If the target is a problem, Rime builds all solutions and a testset
recursively.
If the target is a solution, Rime compiles the solution program specified
in SOLUTION file.
If the target is a testset, Rime compiles all necessary programs including
input generators, input validators, output judges, and a reference solution
that is automatically selected or explicitly specified in PROBLEM file.
Then it copies static input/output files (*.in, *.diff) into rime-out
directory, runs input generators, runs input validators against all
static/generated input files, and finally runs a reference solution over
them to generate reference output files.
<target> can be omitted to imply the target in the current working
directory.
If -k (--keep_going) is set, build does not stop even if a compile error
happens.
-j (--jobs) can be used to make build faster to allow several processes to
run in parallel.
"""
TEST_HELP = """\
If the target is a project, Rime runs tests of all problems recursively.
If the target is a problem, Rime runs tests of all solutions recursively.
If the target is a solution, Rime builds it and the testset of the problem,
and then runs the solution against a series of tests.
If the target is a testset, Rime runs tests of all solutions recursively.
<target> can be omitted to imply the target in the current working
directory.
If -k (--keep_going) is set, build does not stop even if a compile error
or a test failure happens.
-j (--jobs) can be used to make build and test faster to allow several
processes to run in parallel. If a test failed by time limit exceed in
parallelized tests, the same test is re-run after all other concurrent
processes are finished to see if it really does not run in the specified
time limit. You can always force this behavior not to run tests
concurrently by -p (--precise).
If -C (--cache_tests) is set, Rime skips unchanged tests which passed
previously.
"""
CLEAN_HELP = """\
Deletes files under corresponding directory in rime-out.
<target> can be omitted to imply the target in the current working
directory.
"""
| mit | 1,220,425,375,560,909,600 | 34.008772 | 79 | 0.761964 | false |
wengzhilai/family | iSoft/entity/model.py | 1 | 28429 | # coding: utf-8
from sqlalchemy import Column, DateTime, ForeignKey, Integer, Numeric, String, Table, Text
from sqlalchemy.orm import relationship
from sqlalchemy.schema import FetchedValue
from flask_sqlalchemy import SQLAlchemy
from iSoft import db
class FaAppVersion(db.Model):
__tablename__ = 'fa_app_version'
ID = db.Column(db.Integer, primary_key=True)
IS_NEW = db.Column(db.Numeric(1, 0), nullable=False)
TYPE = db.Column(db.String(20), nullable=False)
REMARK = db.Column(db.String(1000))
UPDATE_TIME = db.Column(db.DateTime)
UPDATE_URL = db.Column(db.String(200))
class FaBulletin(db.Model):
__tablename__ = 'fa_bulletin'
ID = db.Column(db.Integer, primary_key=True)
TITLE = db.Column(db.String(255), nullable=False)
PIC = db.Column(db.String(255))
TYPE_CODE = db.Column(db.String(50))
CONTENT = db.Column(db.Text)
USER_ID = db.Column(db.Integer)
PUBLISHER = db.Column(db.String(255), nullable=False)
ISSUE_DATE = db.Column(db.DateTime, nullable=False)
IS_SHOW = db.Column(db.Numeric(1, 0), nullable=False)
IS_IMPORT = db.Column(db.Numeric(1, 0), nullable=False)
IS_URGENT = db.Column(db.Numeric(1, 0), nullable=False)
AUTO_PEN = db.Column(db.Numeric(1, 0), nullable=False)
CREATE_TIME = db.Column(db.DateTime, nullable=False)
UPDATE_TIME = db.Column(db.DateTime, nullable=False)
REGION = db.Column(db.String(10), nullable=False)
fa_files = db.relationship(u'FaFile', secondary=u'fa_bulletin_file', backref=u'fa_bulletins')
fa_role = db.relationship(u'FaRole', secondary=u'fa_bulletin_role', backref=u'fa_bulletins')
t_fa_bulletin_file = db.Table(
'fa_bulletin_file',
db.Column('BULLETIN_ID', db.ForeignKey(u'fa_bulletin.ID'), primary_key=True, nullable=False),
db.Column('FILE_ID', db.ForeignKey(u'fa_files.ID'), primary_key=True, nullable=False)
)
class FaBulletinLog(db.Model):
__tablename__ = 'fa_bulletin_log'
ID = db.Column(db.Integer, primary_key=True)
BULLETIN_ID = db.Column(db.ForeignKey(u'fa_bulletin.ID'), nullable=False)
USER_ID = db.Column(db.Integer, nullable=False)
LOOK_TIME = db.Column(db.DateTime, nullable=False)
fa_bulletin = db.relationship(u'FaBulletin', primaryjoin='FaBulletinLog.BULLETIN_ID == FaBulletin.ID', backref=u'fa_bulletin_logs')
class FaBulletinReview(db.Model):
__tablename__ = 'fa_bulletin_review'
ID = db.Column(db.Integer, primary_key=True)
PARENT_ID = db.Column(db.ForeignKey(u'fa_bulletin_review.ID'))
BULLETIN_ID = db.Column(db.ForeignKey(u'fa_bulletin.ID'), nullable=False)
NAME = db.Column(db.String(50))
CONTENT = db.Column(db.Text)
USER_ID = db.Column(db.Integer, nullable=False)
ADD_TIME = db.Column(db.DateTime, nullable=False)
STATUS = db.Column(db.String(10), nullable=False)
STATUS_TIME = db.Column(db.DateTime, nullable=False)
fa_bulletin = db.relationship(u'FaBulletin', primaryjoin='FaBulletinReview.BULLETIN_ID == FaBulletin.ID', backref=u'fa_bulletin_reviews')
parent = db.relationship(u'FaBulletinReview', remote_side=[ID], primaryjoin='FaBulletinReview.PARENT_ID == FaBulletinReview.ID', backref=u'fa_bulletin_reviews')
t_fa_bulletin_role = db.Table(
'fa_bulletin_role',
db.Column('BULLETIN_ID', db.ForeignKey(u'fa_bulletin.ID'), primary_key=True, nullable=False),
db.Column('ROLE_ID', db.ForeignKey(u'fa_role.ID'), primary_key=True, nullable=False)
)
class FaBulletinType(db.Model):
__tablename__ = 'fa_bulletin_type'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(80))
class FaConfig(db.Model):
__tablename__ = 'fa_config'
ID = db.Column(db.Integer, primary_key=True)
TYPE = db.Column(db.String(10))
CODE = db.Column(db.String(32), nullable=False)
NAME = db.Column(db.String(50))
VALUE = db.Column(db.String(300))
REMARK = db.Column(db.String(500))
REGION = db.Column(db.String(10), nullable=False)
ADD_USER_ID = db.Column(db.Integer)
ADD_TIEM = db.Column(db.DateTime)
class FaDbServer(db.Model):
__tablename__ = 'fa_db_server'
ID = db.Column(db.Integer, primary_key=True)
DB_TYPE_ID = db.Column(db.ForeignKey(u'fa_db_server_type.ID'), nullable=False)
TYPE = db.Column(db.String(10), nullable=False)
IP = db.Column(db.String(20), nullable=False)
PORT = db.Column(db.Integer, nullable=False)
DBNAME = db.Column(db.String(20))
DBUID = db.Column(db.String(20), nullable=False)
PASSWORD = db.Column(db.String(32), nullable=False)
REMARK = db.Column(db.String(500))
DB_LINK = db.Column(db.String(200))
NICKNAME = db.Column(db.String(32))
TO_PATH_M = db.Column(db.String(300))
TO_PATH_D = db.Column(db.String(300))
fa_db_server_type = db.relationship(u'FaDbServerType', primaryjoin='FaDbServer.DB_TYPE_ID == FaDbServerType.ID', backref=u'fa_db_servers')
class FaDbServerType(db.Model):
__tablename__ = 'fa_db_server_type'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(20))
REMARK = db.Column(db.String(500))
class FaDistrict(db.Model):
__tablename__ = 'fa_district'
ID = db.Column(db.Integer, primary_key=True)
PARENT_ID = db.Column(db.ForeignKey(u'fa_district.ID'))
NAME = db.Column(db.String(255), nullable=False)
CODE = db.Column(db.String(50))
IN_USE = db.Column(db.Numeric(1, 0), nullable=False)
LEVEL_ID = db.Column(db.Integer, nullable=False)
ID_PATH = db.Column(db.String(200))
REGION = db.Column(db.String(10), nullable=False)
parent = db.relationship(u'FaDistrict', remote_side=[ID], primaryjoin='FaDistrict.PARENT_ID == FaDistrict.ID', backref=u'fa_districts')
fa_user = db.relationship(u'FaUser', secondary=u'fa_user_district', backref=u'fa_districts')
class FaDynasty(db.Model):
__tablename__ = 'fa_dynasty'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(20), nullable=False)
class FaElder(db.Model):
__tablename__ = 'fa_elder'
ID = db.Column(db.Integer, primary_key=True)
FAMILY_ID = db.Column(db.ForeignKey(u'fa_family.ID'))
NAME = db.Column(db.String(2), nullable=False)
SORT = db.Column(db.Integer)
fa_family = db.relationship(u'FaFamily', primaryjoin='FaElder.FAMILY_ID == FaFamily.ID', backref=u'fa_elders')
t_fa_event_files = db.Table(
'fa_event_files',
db.Column('EVENT_ID', db.ForeignKey(u'fa_user_event.ID'), primary_key=True, nullable=False),
db.Column('FILES_ID', db.ForeignKey(u'fa_files.ID'), primary_key=True, nullable=False)
)
class FaExportLog(db.Model):
__tablename__ = 'fa_export_log'
ID = db.Column(db.Integer, primary_key=True)
USER_ID = db.Column(db.Integer)
LOGIN_NAME = db.Column(db.String(50))
NAME = db.Column(db.String(50))
SQL_CONTENT = db.Column(db.Text)
EXPORT_TIME = db.Column(db.DateTime)
REMARK = db.Column(db.String(100))
class FaFamily(db.Model):
__tablename__ = 'fa_family'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(20), nullable=False)
class FaFile(db.Model):
__tablename__ = 'fa_files'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(50), nullable=False)
PATH = db.Column(db.String(200), nullable=False)
USER_ID = db.Column(db.Integer)
LENGTH = db.Column(db.Integer, nullable=False)
UPLOAD_TIME = db.Column(db.DateTime)
REMARK = db.Column(db.String(2000))
URL = db.Column(db.String(254))
FILE_TYPE = db.Column(db.String(50))
fa_task_flow_handle = db.relationship(u'FaTaskFlowHandle', secondary=u'fa_task_flow_handle_files', backref=u'fa_files')
class FaFlow(db.Model):
__tablename__ = 'fa_flow'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(100), nullable=False)
FLOW_TYPE = db.Column(db.String(20), nullable=False)
REMARK = db.Column(db.String(100))
X_Y = db.Column(db.String(500))
REGION = db.Column(db.String(10))
class FaFlowFlownode(db.Model):
__tablename__ = 'fa_flow_flownode'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(100), nullable=False)
HANDLE_URL = db.Column(db.String(200))
SHOW_URL = db.Column(db.String(200))
class FaFlowFlownodeFlow(db.Model):
__tablename__ = 'fa_flow_flownode_flow'
ID = db.Column(db.Integer, primary_key=True)
FLOW_ID = db.Column(db.ForeignKey(u'fa_flow.ID'), nullable=False)
FROM_FLOWNODE_ID = db.Column(db.ForeignKey(u'fa_flow_flownode.ID'), nullable=False)
TO_FLOWNODE_ID = db.Column(db.Integer, nullable=False)
HANDLE = db.Column(db.Numeric(1, 0), nullable=False)
ASSIGNER = db.Column(db.Numeric(1, 0), nullable=False)
STATUS = db.Column(db.String(20))
REMARK = db.Column(db.String(20))
EXPIRE_HOUR = db.Column(db.Integer, nullable=False)
fa_flow = db.relationship(u'FaFlow', primaryjoin='FaFlowFlownodeFlow.FLOW_ID == FaFlow.ID', backref=u'fa_flow_flownode_flows')
fa_flow_flownode = db.relationship(u'FaFlowFlownode', primaryjoin='FaFlowFlownodeFlow.FROM_FLOWNODE_ID == FaFlowFlownode.ID', backref=u'fa_flow_flownode_flows')
fa_role = db.relationship(u'FaRole', secondary=u'fa_flow_flownode_role', backref=u'fa_flow_flownode_flows')
t_fa_flow_flownode_role = db.Table(
'fa_flow_flownode_role',
db.Column('FLOW_ID', db.ForeignKey(u'fa_flow_flownode_flow.ID'), primary_key=True, nullable=False),
db.Column('ROLE_ID', db.ForeignKey(u'fa_role.ID'), primary_key=True, nullable=False)
)
class FaFunction(db.Model):
__tablename__ = 'fa_function'
ID = db.Column(db.Integer, primary_key=True)
REMARK = db.Column(db.String(100))
FULL_NAME = db.Column(db.String(100))
NAMESPACE = db.Column(db.String(100))
CLASS_NAME = db.Column(db.String(100))
METHOD_NAME = db.Column(db.String(100))
DLL_NAME = db.Column(db.String(100))
XML_NOTE = db.Column(db.String(254))
fa_role = db.relationship(u'FaRole', secondary=u'fa_role_function', backref=u'fa_functions')
class FaLog(db.Model):
__tablename__ = 'fa_log'
ID = db.Column(db.Integer, primary_key=True)
ADD_TIME = db.Column(db.DateTime, nullable=False)
MODULE_NAME = db.Column(db.String(100), nullable=False)
USER_ID = db.Column(db.Integer, nullable=False)
class FaLogin(db.Model):
__tablename__ = 'fa_login'
ID = db.Column(db.Integer, primary_key=True)
LOGIN_NAME = db.Column(db.String(20))
PASSWORD = db.Column(db.String(255))
PHONE_NO = db.Column(db.String(20))
EMAIL_ADDR = db.Column(db.String(255))
VERIFY_CODE = db.Column(db.String(10))
VERIFY_TIME = db.Column(db.DateTime)
IS_LOCKED = db.Column(db.Integer)
PASS_UPDATE_DATE = db.Column(db.DateTime)
LOCKED_REASON = db.Column(db.String(255))
FAIL_COUNT = db.Column(db.Integer)
fa_oauth = db.relationship(u'FaOauth', secondary=u'fa_oauth_login', backref=u'fa_logins')
class FaLoginHistory(db.Model):
__tablename__ = 'fa_login_history'
ID = db.Column(db.Integer, primary_key=True)
USER_ID = db.Column(db.Integer)
LOGIN_TIME = db.Column(db.DateTime)
LOGIN_HOST = db.Column(db.String(255))
LOGOUT_TIME = db.Column(db.DateTime)
LOGIN_HISTORY_TYPE = db.Column(db.Integer)
MESSAGE = db.Column(db.String(255))
class FaMessage(db.Model):
__tablename__ = 'fa_message'
ID = db.Column(db.Integer, primary_key=True)
MESSAGE_TYPE_ID = db.Column(db.ForeignKey(u'fa_message_type.ID'))
KEY_ID = db.Column(db.Integer)
TITLE = db.Column(db.String(100))
CONTENT = db.Column(db.String(500))
CREATE_TIME = db.Column(db.DateTime)
CREATE_USERNAME = db.Column(db.String(50))
CREATE_USERID = db.Column(db.Integer)
STATUS = db.Column(db.String(10))
PUSH_TYPE = db.Column(db.String(10))
DISTRICT_ID = db.Column(db.Integer)
ALL_ROLE_ID = db.Column(db.String(500))
fa_message_type = db.relationship(u'FaMessageType', primaryjoin='FaMessage.MESSAGE_TYPE_ID == FaMessageType.ID', backref=u'fa_messages')
class FaMessageType(db.Model):
__tablename__ = 'fa_message_type'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(50))
TABLE_NAME = db.Column(db.String(50))
IS_USE = db.Column(db.Integer)
REMARK = db.Column(db.String(500))
class FaModule(db.Model):
__tablename__ = 'fa_module'
ID = db.Column(db.Integer, primary_key=True)
PARENT_ID = db.Column(db.ForeignKey(u'fa_module.ID'))
NAME = db.Column(db.String(60))
LOCATION = db.Column(db.String(2000))
CODE = db.Column(db.String(20))
IS_DEBUG = db.Column(db.Numeric(1, 0), nullable=False)
IS_HIDE = db.Column(db.Numeric(1, 0), nullable=False)
SHOW_ORDER = db.Column(db.Numeric(2, 0), nullable=False)
DESCRIPTION = db.Column(db.String(2000))
IMAGE_URL = db.Column(db.String(2000))
DESKTOP_ROLE = db.Column(db.String(200))
W = db.Column(db.Integer)
H = db.Column(db.Integer)
parent = db.relationship(u'FaModule', remote_side=[ID], primaryjoin='FaModule.PARENT_ID == FaModule.ID', backref=u'fa_modules')
fa_role = db.relationship(u'FaRole', secondary=u'fa_role_module', backref=u'fa_modules')
fa_user = db.relationship(u'FaUser', secondary=u'fa_user_module', backref=u'fa_modules')
class FaOauth(db.Model):
__tablename__ = 'fa_oauth'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(50))
REG_URL = db.Column(db.String(500))
LOGIN_URL = db.Column(db.String(500))
REMARK = db.Column(db.String(500))
t_fa_oauth_login = db.Table(
'fa_oauth_login',
db.Column('OAUTH_ID', db.ForeignKey(u'fa_oauth.ID'), primary_key=True, nullable=False),
db.Column('LOGIN_ID', db.ForeignKey(u'fa_login.ID'), primary_key=True, nullable=False)
)
class FaQuery(db.Model):
__tablename__ = 'fa_query'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(50), nullable=False)
CODE = db.Column(db.String(20), nullable=False)
AUTO_LOAD = db.Column(db.Numeric(1, 0), nullable=False)
PAGE_SIZE = db.Column(db.Integer, nullable=False)
SHOW_CHECKBOX = db.Column(db.Numeric(1, 0), nullable=False)
IS_DEBUG = db.Column(db.Numeric(1, 0), nullable=False)
FILTR_LEVEL = db.Column(db.Numeric(1, 0))
DB_SERVER_ID = db.Column(db.Integer)
QUERY_CONF = db.Column(db.Text)
QUERY_CFG_JSON = db.Column(db.Text)
IN_PARA_JSON = db.Column(db.Text)
JS_STR = db.Column(db.Text)
ROWS_BTN = db.Column(db.Text)
HEARD_BTN = db.Column(db.Text)
REPORT_SCRIPT = db.Column(db.Text)
CHARTS_CFG = db.Column(db.Text)
CHARTS_TYPE = db.Column(db.String(50))
FILTR_STR = db.Column(db.Text)
REMARK = db.Column(db.Text)
NEW_DATA = db.Column(db.String(50))
class FaRole(db.Model):
__tablename__ = 'fa_role'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(80))
REMARK = db.Column(db.String(255))
TYPE = db.Column(db.Integer)
fa_user = db.relationship(u'FaUser', secondary=u'fa_user_role', backref=u'fa_roles')
class FaRoleConfig(db.Model):
__tablename__ = 'fa_role_config'
ID = db.Column(db.Integer, primary_key=True)
ROLE_ID = db.Column(db.ForeignKey(u'fa_role.ID'), nullable=False)
TYPE = db.Column(db.String(10))
NAME = db.Column(db.String(50), nullable=False)
VALUE = db.Column(db.String(300))
REMARK = db.Column(db.String(500))
fa_role = db.relationship(u'FaRole', primaryjoin='FaRoleConfig.ROLE_ID == FaRole.ID', backref=u'fa_role_configs')
t_fa_role_function = db.Table(
'fa_role_function',
db.Column('FUNCTION_ID', db.ForeignKey(u'fa_function.ID'), primary_key=True, nullable=False),
db.Column('ROLE_ID', db.ForeignKey(u'fa_role.ID'), primary_key=True, nullable=False)
)
t_fa_role_module = db.Table(
'fa_role_module',
db.Column('ROLE_ID', db.ForeignKey(u'fa_role.ID'), primary_key=True, nullable=False),
db.Column('MODULE_ID', db.ForeignKey(u'fa_module.ID'), primary_key=True, nullable=False)
)
class FaRoleQueryAuthority(db.Model):
__tablename__ = 'fa_role_query_authority'
ROLE_ID = db.Column(db.ForeignKey(u'fa_role.ID'), primary_key=True, nullable=False)
QUERY_ID = db.Column(db.ForeignKey(u'fa_query.ID'), primary_key=True, nullable=False)
NO_AUTHORITY = db.Column(db.String(200))
fa_query = db.relationship(u'FaQuery', primaryjoin='FaRoleQueryAuthority.QUERY_ID == FaQuery.ID', backref=u'fa_role_query_authorities')
fa_role = db.relationship(u'FaRole', primaryjoin='FaRoleQueryAuthority.ROLE_ID == FaRole.ID', backref=u'fa_role_query_authorities')
class FaScript(db.Model):
__tablename__ = 'fa_script'
ID = db.Column(db.Integer, primary_key=True)
CODE = db.Column(db.String(20), nullable=False)
NAME = db.Column(db.String(255), nullable=False)
BODY_TEXT = db.Column(db.Text, nullable=False)
BODY_HASH = db.Column(db.String(255), nullable=False)
RUN_WHEN = db.Column(db.String(30))
RUN_ARGS = db.Column(db.String(255))
RUN_DATA = db.Column(db.String(20), nullable=False, server_default=db.FetchedValue())
STATUS = db.Column(db.String(10))
DISABLE_REASON = db.Column(db.String(50))
SERVICE_FLAG = db.Column(db.String(50))
REGION = db.Column(db.String(10))
IS_GROUP = db.Column(db.Numeric(1, 0), nullable=False)
class FaScriptGroupList(db.Model):
__tablename__ = 'fa_script_group_list'
SCRIPT_ID = db.Column(db.Integer, primary_key=True, nullable=False)
GROUP_ID = db.Column(db.ForeignKey(u'fa_script.ID'), primary_key=True, nullable=False)
ORDER_INDEX = db.Column(db.Integer, nullable=False)
fa_script = db.relationship(u'FaScript', primaryjoin='FaScriptGroupList.GROUP_ID == FaScript.ID', backref=u'fa_script_group_lists')
class FaScriptTask(db.Model):
__tablename__ = 'fa_script_task'
ID = db.Column(db.Integer, primary_key=True)
SCRIPT_ID = db.Column(db.ForeignKey(u'fa_script.ID'), nullable=False)
BODY_TEXT = db.Column(db.Text, nullable=False)
BODY_HASH = db.Column(db.String(255), nullable=False)
RUN_STATE = db.Column(db.String(10), nullable=False, server_default=db.FetchedValue())
RUN_WHEN = db.Column(db.String(30))
RUN_ARGS = db.Column(db.String(255))
RUN_DATA = db.Column(db.String(20), nullable=False, server_default=db.FetchedValue())
LOG_TYPE = db.Column(db.Numeric(1, 0), server_default=db.FetchedValue())
DSL_TYPE = db.Column(db.String(255))
RETURN_CODE = db.Column(db.String(10), server_default=db.FetchedValue())
START_TIME = db.Column(db.DateTime)
END_TIME = db.Column(db.DateTime)
DISABLE_DATE = db.Column(db.DateTime)
DISABLE_REASON = db.Column(db.String(50))
SERVICE_FLAG = db.Column(db.String(50))
REGION = db.Column(db.String(10))
GROUP_ID = db.Column(db.Integer)
fa_script = db.relationship(u'FaScript', primaryjoin='FaScriptTask.SCRIPT_ID == FaScript.ID', backref=u'fa_script_tasks')
class FaScriptTaskLog(db.Model):
__tablename__ = 'fa_script_task_log'
ID = db.Column(db.Integer, primary_key=True)
SCRIPT_TASK_ID = db.Column(db.ForeignKey(u'fa_script_task.ID'), nullable=False)
LOG_TIME = db.Column(db.DateTime, nullable=False)
LOG_TYPE = db.Column(db.Numeric(1, 0), nullable=False, server_default=db.FetchedValue())
MESSAGE = db.Column(db.Text)
SQL_TEXT = db.Column(db.Text)
fa_script_task = db.relationship(u'FaScriptTask', primaryjoin='FaScriptTaskLog.SCRIPT_TASK_ID == FaScriptTask.ID', backref=u'fa_script_task_logs')
class FaSmsSend(db.Model):
__tablename__ = 'fa_sms_send'
GUID = db.Column(db.String(32), primary_key=True)
MESSAGE_ID = db.Column(db.Integer)
PHONE_NO = db.Column(db.String(50), nullable=False)
ADD_TIME = db.Column(db.DateTime)
SEND_TIME = db.Column(db.DateTime)
CONTENT = db.Column(db.String(500), nullable=False)
STAUTS = db.Column(db.String(15))
TRY_NUM = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
class FaTask(db.Model):
__tablename__ = 'fa_task'
ID = db.Column(db.Integer, primary_key=True)
FLOW_ID = db.Column(db.ForeignKey(u'fa_flow.ID'))
TASK_NAME = db.Column(db.String(50))
CREATE_TIME = db.Column(db.DateTime)
CREATE_USER = db.Column(db.Integer)
CREATE_USER_NAME = db.Column(db.String(50))
STATUS = db.Column(db.String(50))
STATUS_TIME = db.Column(db.DateTime)
REMARK = db.Column(db.Text)
REGION = db.Column(db.String(10))
KEY_ID = db.Column(db.String(32))
START_TIME = db.Column(db.DateTime)
END_TIME = db.Column(db.DateTime)
DEAL_TIME = db.Column(db.DateTime)
ROLE_ID_STR = db.Column(db.String(200))
fa_flow = db.relationship(u'FaFlow', primaryjoin='FaTask.FLOW_ID == FaFlow.ID', backref=u'fa_tasks')
class FaTaskFlow(db.Model):
__tablename__ = 'fa_task_flow'
ID = db.Column(db.Integer, primary_key=True)
PARENT_ID = db.Column(db.ForeignKey(u'fa_task_flow.ID'))
TASK_ID = db.Column(db.ForeignKey(u'fa_task.ID'), nullable=False)
LEVEL_ID = db.Column(db.Integer)
FLOWNODE_ID = db.Column(db.Integer)
EQUAL_ID = db.Column(db.Integer)
IS_HANDLE = db.Column(db.Integer, nullable=False)
NAME = db.Column(db.String(100))
HANDLE_URL = db.Column(db.String(200))
SHOW_URL = db.Column(db.String(200))
EXPIRE_TIME = db.Column(db.DateTime)
START_TIME = db.Column(db.DateTime, nullable=False)
DEAL_STATUS = db.Column(db.String(50))
ROLE_ID_STR = db.Column(db.String(200))
HANDLE_USER_ID = db.Column(db.Integer)
DEAL_TIME = db.Column(db.DateTime)
ACCEPT_TIME = db.Column(db.DateTime)
parent = db.relationship(u'FaTaskFlow', remote_side=[ID], primaryjoin='FaTaskFlow.PARENT_ID == FaTaskFlow.ID', backref=u'fa_task_flows')
fa_task = db.relationship(u'FaTask', primaryjoin='FaTaskFlow.TASK_ID == FaTask.ID', backref=u'fa_task_flows')
class FaTaskFlowHandle(db.Model):
__tablename__ = 'fa_task_flow_handle'
ID = db.Column(db.Integer, primary_key=True)
TASK_FLOW_ID = db.Column(db.ForeignKey(u'fa_task_flow.ID'), nullable=False)
DEAL_USER_ID = db.Column(db.Integer, nullable=False)
DEAL_USER_NAME = db.Column(db.String(50), nullable=False)
DEAL_TIME = db.Column(db.DateTime, nullable=False)
CONTENT = db.Column(db.String(2000), nullable=False)
fa_task_flow = db.relationship(u'FaTaskFlow', primaryjoin='FaTaskFlowHandle.TASK_FLOW_ID == FaTaskFlow.ID', backref=u'fa_task_flow_handles')
t_fa_task_flow_handle_files = db.Table(
'fa_task_flow_handle_files',
db.Column('FLOW_HANDLE_ID', db.ForeignKey(u'fa_task_flow_handle.ID'), primary_key=True, nullable=False),
db.Column('FILES_ID', db.ForeignKey(u'fa_files.ID'), primary_key=True, nullable=False)
)
class FaTaskFlowHandleUser(db.Model):
__tablename__ = 'fa_task_flow_handle_user'
TASK_FLOW_ID = db.Column(db.ForeignKey(u'fa_task_flow.ID'), primary_key=True, nullable=False)
HANDLE_USER_ID = db.Column(db.Integer, primary_key=True, nullable=False)
fa_task_flow = db.relationship(u'FaTaskFlow', primaryjoin='FaTaskFlowHandleUser.TASK_FLOW_ID == FaTaskFlow.ID', backref=u'fa_task_flow_handle_users')
class FaUpdataLog(db.Model):
__tablename__ = 'fa_updata_log'
ID = db.Column(db.Integer, primary_key=True)
CREATE_TIME = db.Column(db.DateTime)
CREATE_USER_NAME = db.Column(db.String(50))
CREATE_USER_ID = db.Column(db.Integer)
OLD_CONTENT = db.Column(db.Text)
NEW_CONTENT = db.Column(db.Text)
TABLE_NAME = db.Column(db.String(50))
class FaUser(db.Model):
__tablename__ = 'fa_user'
ID = db.Column(db.Integer, primary_key=True)
NAME = db.Column(db.String(80))
LOGIN_NAME = db.Column(db.String(20))
ICON_FILES_ID = db.Column(db.Integer)
DISTRICT_ID = db.Column(db.ForeignKey(u'fa_district.ID'), nullable=False)
IS_LOCKED = db.Column(db.Numeric(1, 0))
CREATE_TIME = db.Column(db.DateTime)
LOGIN_COUNT = db.Column(db.Integer)
LAST_LOGIN_TIME = db.Column(db.DateTime)
LAST_LOGOUT_TIME = db.Column(db.DateTime)
LAST_ACTIVE_TIME = db.Column(db.DateTime)
REMARK = db.Column(db.String(2000))
fa_district = db.relationship(u'FaDistrict', primaryjoin='FaUser.DISTRICT_ID == FaDistrict.ID', backref=u'fa_users')
fa_user_info = db.relationship(u'FaUserInfo', secondary=u'fa_user_friend', backref=u'fa_users', lazy="select")
class FaUserInfo(FaUser):
__tablename__ = 'fa_user_info'
ID = db.Column(db.ForeignKey(u'fa_user.ID'), primary_key=True)
LEVEL_ID = db.Column(db.Integer)
FAMILY_ID = db.Column(db.ForeignKey(u'fa_family.ID'))
ELDER_ID = db.Column(db.ForeignKey(u'fa_elder.ID'))
LEVEL_NAME = db.Column(db.String(2))
FATHER_ID = db.Column(db.ForeignKey(u'fa_user_info.ID'))
MOTHER_ID = db.Column(db.Integer)
BIRTHDAY_TIME = db.Column(db.DateTime)
BIRTHDAY_PLACE = db.Column(db.String(500))
IS_LIVE = db.Column(db.Numeric(1, 0))
DIED_TIME = db.Column(db.DateTime)
DIED_PLACE = db.Column(db.String(500))
SEX = db.Column(db.String(2))
YEARS_TYPE = db.Column(db.String(10))
CONSORT_ID = db.Column(db.Integer)
STATUS = db.Column(db.String(10), nullable=False, server_default=db.FetchedValue())
CREATE_USER_NAME = db.Column(db.String(50), nullable=False, server_default=db.FetchedValue())
CREATE_USER_ID = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
UPDATE_TIME = db.Column(db.DateTime, nullable=False, server_default=db.FetchedValue())
UPDATE_USER_NAME = db.Column(db.String(50), nullable=False, server_default=db.FetchedValue())
UPDATE_USER_ID = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
fa_elder = db.relationship(u'FaElder', primaryjoin='FaUserInfo.ELDER_ID == FaElder.ID', backref=u'fa_user_infos')
fa_family = db.relationship(u'FaFamily', primaryjoin='FaUserInfo.FAMILY_ID == FaFamily.ID', backref=u'fa_user_infos')
parent = db.relationship(u'FaUserInfo', remote_side=[ID], primaryjoin='FaUserInfo.FATHER_ID == FaUserInfo.ID', backref=u'fa_user_infos')
t_fa_user_district = db.Table(
'fa_user_district',
db.Column('USER_ID', db.ForeignKey(u'fa_user.ID'), primary_key=True, nullable=False),
db.Column('DISTRICT_ID', db.ForeignKey(u'fa_district.ID'), primary_key=True, nullable=False)
)
class FaUserEvent(db.Model):
__tablename__ = 'fa_user_event'
ID = db.Column(db.Integer, primary_key=True)
USER_ID = db.Column(db.ForeignKey(u'fa_user_info.ID'))
NAME = db.Column(db.String(50))
HAPPEN_TIME = db.Column(db.DateTime)
CONTENT = db.Column(db.String(500))
ADDRESS = db.Column(db.String(500))
fa_user_info = db.relationship(u'FaUserInfo', primaryjoin='FaUserEvent.USER_ID == FaUserInfo.ID', backref=u'fa_user_events')
fa_files = db.relationship(u'FaFile', secondary=u'fa_event_files', backref=u'fa_user_events')
t_fa_user_friend = db.Table(
'fa_user_friend',
db.Column('USER_ID', db.ForeignKey(u'fa_user_info.ID'), primary_key=True, nullable=False),
db.Column('FRIEND_ID', db.ForeignKey(u'fa_user.ID'), primary_key=True, nullable=False)
)
class FaUserMessage(db.Model):
__tablename__ = 'fa_user_message'
MESSAGE_ID = db.Column(db.ForeignKey(u'fa_message.ID'), primary_key=True, nullable=False)
USER_ID = db.Column(db.Integer, primary_key=True, nullable=False)
PHONE_NO = db.Column(db.String(20))
STATUS = db.Column(db.String(10))
STATUS_TIME = db.Column(db.DateTime, nullable=False)
REPLY = db.Column(db.String(500))
PUSH_TYPE = db.Column(db.String(10))
fa_message = db.relationship(u'FaMessage', primaryjoin='FaUserMessage.MESSAGE_ID == FaMessage.ID', backref=u'fa_user_messages')
t_fa_user_module = db.Table(
'fa_user_module',
db.Column('USER_ID', db.ForeignKey(u'fa_user.ID'), primary_key=True, nullable=False),
db.Column('MODULE_ID', db.ForeignKey(u'fa_module.ID'), primary_key=True, nullable=False)
)
t_fa_user_role = db.Table(
'fa_user_role',
db.Column('ROLE_ID', db.ForeignKey(u'fa_role.ID'), primary_key=True, nullable=False),
db.Column('USER_ID', db.ForeignKey(u'fa_user.ID'), primary_key=True, nullable=False)
)
class Sequence(db.Model):
__tablename__ = 'sequence'
seq_name = db.Column(db.String(50), primary_key=True)
current_val = db.Column(db.Integer, nullable=False)
increment_val = db.Column(db.Integer, nullable=False, server_default=db.FetchedValue())
| bsd-3-clause | 1,656,765,812,792,908,000 | 35.307791 | 164 | 0.679236 | false |
Just-D/chromium-1 | tools/telemetry/telemetry/page/shared_page_state.py | 1 | 19615 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import logging
import os
import shutil
import sys
import tempfile
import zipfile
from catapult_base import cloud_storage
from telemetry.core import exceptions
from telemetry.core import util
from telemetry import decorators
from telemetry.internal.browser import browser_finder
from telemetry.internal.browser import browser_finder_exceptions
from telemetry.internal.browser import browser_info as browser_info_module
from telemetry.internal.platform.profiler import profiler_finder
from telemetry.internal.util import exception_formatter
from telemetry.internal.util import file_handle
from telemetry.page import action_runner as action_runner_module
from telemetry.page import page_test
from telemetry import story
from telemetry.util import wpr_modes
from telemetry.web_perf import timeline_based_measurement
def _PrepareFinderOptions(finder_options, test, device_type):
browser_options = finder_options.browser_options
# Set up user agent.
browser_options.browser_user_agent_type = device_type
test.CustomizeBrowserOptions(finder_options.browser_options)
if finder_options.profiler:
profiler_class = profiler_finder.FindProfiler(finder_options.profiler)
profiler_class.CustomizeBrowserOptions(browser_options.browser_type,
finder_options)
class SharedPageState(story.SharedState):
"""
This class contains all specific logic necessary to run a Chrome browser
benchmark.
"""
_device_type = None
def __init__(self, test, finder_options, story_set):
super(SharedPageState, self).__init__(test, finder_options, story_set)
if isinstance(test, timeline_based_measurement.TimelineBasedMeasurement):
# This is to avoid the cyclic-import caused by timeline_based_page_test.
from telemetry.web_perf import timeline_based_page_test
self._test = timeline_based_page_test.TimelineBasedPageTest(test)
else:
self._test = test
device_type = self._device_type
# TODO(aiolos, nednguyen): Remove this logic of pulling out user_agent_type
# from story_set once all page_set are converted to story_set
# (crbug.com/439512).
def _IsPageSetInstance(s):
# This is needed to avoid importing telemetry.page.page_set which will
# cause cyclic import.
return 'PageSet' == s.__class__.__name__ or 'PageSet' in (
list(c.__name__ for c in s.__class__.__bases__))
if not device_type and _IsPageSetInstance(story_set):
device_type = story_set.user_agent_type
_PrepareFinderOptions(finder_options, self._test, device_type)
self._browser = None
self._finder_options = finder_options
self._possible_browser = self._GetPossibleBrowser(
self._test, finder_options)
# TODO(slamm): Remove _append_to_existing_wpr when replay lifetime changes.
self._append_to_existing_wpr = False
self._first_browser = True
self._did_login_for_current_page = False
self._current_page = None
self._current_tab = None
self._migrated_profile = None
self._pregenerated_profile_archive = None
self._test.SetOptions(self._finder_options)
@property
def browser(self):
return self._browser
def _FindBrowser(self, finder_options):
possible_browser = browser_finder.FindBrowser(finder_options)
if not possible_browser:
raise browser_finder_exceptions.BrowserFinderException(
'No browser found.\n\nAvailable browsers:\n%s\n' %
'\n'.join(browser_finder.GetAllAvailableBrowserTypes(finder_options)))
return possible_browser
def _GetPossibleBrowser(self, test, finder_options):
"""Return a possible_browser with the given options for |test|. """
possible_browser = self._FindBrowser(finder_options)
finder_options.browser_options.browser_type = (
possible_browser.browser_type)
(enabled, msg) = decorators.IsEnabled(test, possible_browser)
if (not enabled and
not finder_options.run_disabled_tests):
logging.warning(msg)
logging.warning('You are trying to run a disabled test.')
logging.warning('Pass --also-run-disabled-tests to squelch this message.')
sys.exit(0)
if possible_browser.IsRemote():
possible_browser.RunRemote()
sys.exit(0)
return possible_browser
def DidRunStory(self, results):
if self._finder_options.profiler:
self._StopProfiling(results)
# We might hang while trying to close the connection, and need to guarantee
# the page will get cleaned up to avoid future tests failing in weird ways.
try:
if self._current_tab and self._current_tab.IsAlive():
self._current_tab.CloseConnections()
except Exception:
if self._current_tab:
self._current_tab.Close()
finally:
if self._current_page.credentials and self._did_login_for_current_page:
self.browser.credentials.LoginNoLongerNeeded(
self._current_tab, self._current_page.credentials)
if self._test.StopBrowserAfterPage(self.browser, self._current_page):
self._StopBrowser()
self._current_page = None
self._current_tab = None
@property
def platform(self):
return self._possible_browser.platform
def _PrepareWpr(self, network_controller, archive_path,
make_javascript_deterministic):
browser_options = self._finder_options.browser_options
if self._finder_options.use_live_sites:
browser_options.wpr_mode = wpr_modes.WPR_OFF
elif browser_options.wpr_mode != wpr_modes.WPR_RECORD:
browser_options.wpr_mode = (
wpr_modes.WPR_REPLAY
if archive_path and os.path.isfile(archive_path)
else wpr_modes.WPR_OFF)
# Replay's life-cycle is tied to the browser. Start and Stop are handled by
# platform_backend.DidCreateBrowser and platform_backend.WillCloseBrowser,
# respectively.
# TODO(slamm): Update life-cycle comment with https://crbug.com/424777 fix.
wpr_mode = browser_options.wpr_mode
if self._append_to_existing_wpr and wpr_mode == wpr_modes.WPR_RECORD:
wpr_mode = wpr_modes.WPR_APPEND
network_controller.SetReplayArgs(
archive_path, wpr_mode, browser_options.netsim,
browser_options.extra_wpr_args, make_javascript_deterministic)
def _StartBrowser(self, page):
assert self._browser is None
self._possible_browser.SetCredentialsPath(page.credentials_path)
self._test.WillStartBrowser(self.platform)
self._browser = self._possible_browser.Create(self._finder_options)
self._test.DidStartBrowser(self.browser)
if self._first_browser:
self._first_browser = False
self.browser.credentials.WarnIfMissingCredentials(page)
logging.info('OS: %s %s',
self.platform.GetOSName(),
self.platform.GetOSVersionName())
if self.browser.supports_system_info:
system_info = self.browser.GetSystemInfo()
if system_info.model_name:
logging.info('Model: %s', system_info.model_name)
if system_info.gpu:
for i, device in enumerate(system_info.gpu.devices):
logging.info('GPU device %d: %s', i, device)
if system_info.gpu.aux_attributes:
logging.info('GPU Attributes:')
for k, v in sorted(system_info.gpu.aux_attributes.iteritems()):
logging.info(' %-20s: %s', k, v)
if system_info.gpu.feature_status:
logging.info('Feature Status:')
for k, v in sorted(system_info.gpu.feature_status.iteritems()):
logging.info(' %-20s: %s', k, v)
if system_info.gpu.driver_bug_workarounds:
logging.info('Driver Bug Workarounds:')
for workaround in system_info.gpu.driver_bug_workarounds:
logging.info(' %s', workaround)
else:
logging.info('No GPU devices')
else:
logging.warning('System info not supported')
def WillRunStory(self, page):
if self._ShouldDownloadPregeneratedProfileArchive():
self._DownloadPregeneratedProfileArchive()
if self._ShouldMigrateProfile():
self._MigratePregeneratedProfile()
page_set = page.page_set
self._current_page = page
if self._test.RestartBrowserBeforeEachPage() or page.startup_url:
self._StopBrowser()
started_browser = not self.browser
self._PrepareWpr(self.platform.network_controller,
page_set.WprFilePathForStory(page),
page.make_javascript_deterministic)
if self.browser:
# Set new credential path for browser.
self.browser.credentials.credentials_path = page.credentials_path
self.platform.network_controller.UpdateReplayForExistingBrowser()
else:
self._StartBrowser(page)
if self.browser.supports_tab_control and self._test.close_tabs_before_run:
# Create a tab if there's none.
if len(self.browser.tabs) == 0:
self.browser.tabs.New()
# Ensure only one tab is open, unless the test is a multi-tab test.
if not self._test.is_multi_tab_test:
while len(self.browser.tabs) > 1:
self.browser.tabs[-1].Close()
# Must wait for tab to commit otherwise it can commit after the next
# navigation has begun and RenderFrameHostManager::DidNavigateMainFrame()
# will cancel the next navigation because it's pending. This manifests as
# the first navigation in a PageSet freezing indefinitely because the
# navigation was silently cancelled when |self.browser.tabs[0]| was
# committed. Only do this when we just started the browser, otherwise
# there are cases where previous pages in a PageSet never complete
# loading so we'll wait forever.
if started_browser:
self.browser.tabs[0].WaitForDocumentReadyStateToBeComplete()
# Start profiling if needed.
if self._finder_options.profiler:
self._StartProfiling(self._current_page)
def CanRunStory(self, page):
return self.CanRunOnBrowser(browser_info_module.BrowserInfo(self.browser),
page)
def CanRunOnBrowser(self, browser_info,
page): # pylint: disable=unused-argument
"""Override this to return whether the browser brought up by this state
instance is suitable for running the given page.
Args:
browser_info: an instance of telemetry.core.browser_info.BrowserInfo
page: an instance of telemetry.page.Page
"""
return True
def _PreparePage(self):
self._current_tab = self._test.TabForPage(self._current_page, self.browser)
if self._current_page.is_file:
self.browser.SetHTTPServerDirectories(
self._current_page.page_set.serving_dirs |
set([self._current_page.serving_dir]))
if self._current_page.credentials:
if not self.browser.credentials.LoginNeeded(
self._current_tab, self._current_page.credentials):
raise page_test.Failure(
'Login as ' + self._current_page.credentials + ' failed')
self._did_login_for_current_page = True
if self._test.clear_cache_before_each_run:
self._current_tab.ClearCache(force=True)
def _ImplicitPageNavigation(self):
"""Executes the implicit navigation that occurs for every page iteration.
This function will be called once per page before any actions are executed.
"""
self._test.WillNavigateToPage(self._current_page, self._current_tab)
self._test.RunNavigateSteps(self._current_page, self._current_tab)
self._test.DidNavigateToPage(self._current_page, self._current_tab)
def RunStory(self, results):
try:
self._PreparePage()
self._ImplicitPageNavigation()
action_runner = action_runner_module.ActionRunner(
self._current_tab, skip_waits=self._current_page.skip_waits)
self._current_page.RunPageInteractions(action_runner)
self._test.ValidateAndMeasurePage(
self._current_page, self._current_tab, results)
except exceptions.Error:
if self._test.is_multi_tab_test:
# Avoid trying to recover from an unknown multi-tab state.
exception_formatter.PrintFormattedException(
msg='Telemetry Error during multi tab test:')
raise page_test.MultiTabTestAppCrashError
raise
def TearDownState(self):
if self._migrated_profile:
shutil.rmtree(self._migrated_profile)
self._migrated_profile = None
self._StopBrowser()
def _StopBrowser(self):
if self._browser:
self._browser.Close()
self._browser = None
# Restarting the state will also restart the wpr server. If we're
# recording, we need to continue adding into the same wpr archive,
# not overwrite it.
self._append_to_existing_wpr = True
def _StartProfiling(self, page):
output_file = os.path.join(self._finder_options.output_dir,
page.file_safe_name)
is_repeating = (self._finder_options.page_repeat != 1 or
self._finder_options.pageset_repeat != 1)
if is_repeating:
output_file = util.GetSequentialFileName(output_file)
self.browser.profiling_controller.Start(
self._finder_options.profiler, output_file)
def _StopProfiling(self, results):
if self.browser:
profiler_files = self.browser.profiling_controller.Stop()
for f in profiler_files:
if os.path.isfile(f):
results.AddProfilingFile(self._current_page,
file_handle.FromFilePath(f))
def _ShouldMigrateProfile(self):
return not self._migrated_profile
def _MigrateProfile(self, finder_options, found_browser,
initial_profile, final_profile):
"""Migrates a profile to be compatible with a newer version of Chrome.
Launching Chrome with the old profile will perform the migration.
"""
# Save the current input and output profiles.
saved_input_profile = finder_options.browser_options.profile_dir
saved_output_profile = finder_options.output_profile_path
# Set the input and output profiles.
finder_options.browser_options.profile_dir = initial_profile
finder_options.output_profile_path = final_profile
# Launch the browser, then close it.
browser = found_browser.Create(finder_options)
browser.Close()
# Load the saved input and output profiles.
finder_options.browser_options.profile_dir = saved_input_profile
finder_options.output_profile_path = saved_output_profile
def _MigratePregeneratedProfile(self):
"""Migrates the pregenerated profile by launching Chrome with it.
On success, updates self._migrated_profile and
self._finder_options.browser_options.profile_dir with the directory of the
migrated profile.
"""
self._migrated_profile = tempfile.mkdtemp()
logging.info("Starting migration of pregenerated profile to %s",
self._migrated_profile)
pregenerated_profile = self._finder_options.browser_options.profile_dir
possible_browser = self._FindBrowser(self._finder_options)
self._MigrateProfile(self._finder_options, possible_browser,
pregenerated_profile, self._migrated_profile)
self._finder_options.browser_options.profile_dir = self._migrated_profile
logging.info("Finished migration of pregenerated profile to %s",
self._migrated_profile)
def GetPregeneratedProfileArchive(self):
return self._pregenerated_profile_archive
def SetPregeneratedProfileArchive(self, archive):
"""
Benchmarks can set a pre-generated profile archive to indicate that when
Chrome is launched, it should have a --user-data-dir set to the
pregenerated profile, rather than to an empty profile.
If the benchmark is invoked with the option --profile-dir=<dir>, that
option overrides this value.
"""
self._pregenerated_profile_archive = archive
def _ShouldDownloadPregeneratedProfileArchive(self):
"""Whether to download a pre-generated profile archive."""
# There is no pre-generated profile archive.
if not self.GetPregeneratedProfileArchive():
return False
# If profile dir is specified on command line, use that instead.
if self._finder_options.browser_options.profile_dir:
logging.warning("Profile directory specified on command line: %s, this"
"overrides the benchmark's default profile directory.",
self._finder_options.browser_options.profile_dir)
return False
# If the browser is remote, a local download has no effect.
if self._possible_browser.IsRemote():
return False
return True
def _DownloadPregeneratedProfileArchive(self):
"""Download and extract the profile directory archive if one exists.
On success, updates self._finder_options.browser_options.profile_dir with
the directory of the extracted profile.
"""
# Download profile directory from cloud storage.
test_data_dir = os.path.join(util.GetChromiumSrcDir(), 'tools', 'perf',
'generated_profiles',
self._possible_browser.target_os)
archive_name = self.GetPregeneratedProfileArchive()
generated_profile_archive_path = os.path.normpath(
os.path.join(test_data_dir, archive_name))
try:
cloud_storage.GetIfChanged(generated_profile_archive_path,
cloud_storage.PUBLIC_BUCKET)
except (cloud_storage.CredentialsError,
cloud_storage.PermissionError) as e:
if os.path.exists(generated_profile_archive_path):
# If the profile directory archive exists, assume the user has their
# own local copy simply warn.
logging.warning('Could not download Profile archive: %s',
generated_profile_archive_path)
else:
# If the archive profile directory doesn't exist, this is fatal.
logging.error('Can not run without required profile archive: %s. '
'If you believe you have credentials, follow the '
'instructions below.',
generated_profile_archive_path)
logging.error(str(e))
sys.exit(-1)
# Check to make sure the zip file exists.
if not os.path.isfile(generated_profile_archive_path):
raise Exception("Profile directory archive not downloaded: ",
generated_profile_archive_path)
# The location to extract the profile into.
extracted_profile_dir_path = (
os.path.splitext(generated_profile_archive_path)[0])
# Unzip profile directory.
with zipfile.ZipFile(generated_profile_archive_path) as f:
try:
f.extractall(os.path.dirname(generated_profile_archive_path))
except e:
# Cleanup any leftovers from unzipping.
if os.path.exists(extracted_profile_dir_path):
shutil.rmtree(extracted_profile_dir_path)
logging.error("Error extracting profile directory zip file: %s", e)
sys.exit(-1)
# Run with freshly extracted profile directory.
logging.info("Using profile archive directory: %s",
extracted_profile_dir_path)
self._finder_options.browser_options.profile_dir = (
extracted_profile_dir_path)
class SharedMobilePageState(SharedPageState):
_device_type = 'mobile'
class SharedDesktopPageState(SharedPageState):
_device_type = 'desktop'
class SharedTabletPageState(SharedPageState):
_device_type = 'tablet'
class Shared10InchTabletPageState(SharedPageState):
_device_type = 'tablet_10_inch'
| bsd-3-clause | 8,667,519,244,234,333,000 | 39.277207 | 80 | 0.69248 | false |
jreades/starspy | stars/gui/control.py | 1 | 16188 | import wx
import wx.aui
from wx.py.shell import Shell
import mapview_xrc
import stars
from stars.visualization.wxStars import wxCanvas
from stars.visualization.wxStars import wxCanvasTools
from stars.visualization.mapModels import MapModel, CanvasModel
from stars.visualization import layers
from tableViewer import TableViewer
from layerControl import LayersControl
import pysal
import numpy
import os
import json
DEBUG = True
COLOR_SAMPLE_WIDTH = 20
COLOR_SAMPLE_HEIGHT = 20
class StatusTool(wxCanvasTools.wxCanvasControl):
def __init__(self,wx_status_bar,status_field,enabled=True):
self.status = wx_status_bar
self.field = status_field
wxCanvasTools.wxCanvasControl.__init__(self,enabled)
def _onEvent(self,evt):
x,y = self.canvas.model.pixel_to_world(*evt.Position)
self.status.SetStatusText("%f, %f"%(x,y),self.field)
class layerPropFrame(mapview_xrc.xrcLayerPropFrame):
def __init__(self,parent,layer):
stars.remapEvtsToDispatcher(self,self.evtDispatch)
mapview_xrc.xrcLayerPropFrame.__init__(self,parent)
self.Bind(wx.EVT_CLOSE,self.close)
self.layer = layer
#layer.addListener(self.update)
self.update(layer)
self.dispatch = d = {}
d['classificationApply'] = self.run
d['eventsApplyButton'] = self.events_set
d['eventEventsTable'] = self.events_table_set
d['animateButton'] = self.animate
d['animateSlider'] = self.animateMan
def evtDispatch(self,evtName,evt):
evtName,widgetName = evtName.rsplit('_',1)
if widgetName in self.dispatch:
self.dispatch[widgetName](evtName,evt)
else:
if DEBUG: print "not implemented:", evtName,widgetName
def update(self,mdl):
self.classificationAttribute.SetItems(mdl.data_table.header)
self.classificationAttribute.Select(0)
self.classificationMethod.SetItems(pysal.esda.mapclassify.kmethods.keys())
self.classificationMethod.Select(0)
self.classificationClasses.SetItems(map(str,range(3,11)))
self.classificationClasses.Select(2)
if type(self.layer) == layers.RegionLayer:
evtTables = [t.meta['title'] for t in self.layer.table._db.event_tables]
self.eventEventsTable.SetItems(evtTables)
self.eventRegionsJoinField.SetItems(self.layer.table.meta['header'])
if hasattr(mdl.table,'_evtTable'):
print "has evts!"
def events_table_set(self,evtName=None,evt=None,value=None):
tbl_id = self.eventEventsTable.GetSelection()
if tbl_id >= 0:
evtTable = self.layer.table._db.event_tables[tbl_id]
self.eventEventsJoinField.SetItems(evtTable.meta['header'])
def animateMan(self,evtName=None,evt=None,value=None):
n = self.layer.num_periods
if self.animateSlider.GetMax() != n:
self.animateSlider.SetMax(n-1)
n = self.animateSlider.GetValue()
self.layer.set_step(n)
a,b = self.layer.periods[n]
self.animateLabel.SetLabel("%s -- %s"%(a.isoformat(),b.isoformat()))
def animate(self,evtName=None,evt=None,value=None):
n = self.layer.num_periods
self.animateSlider.SetMax(n-1)
for t in range(n):
self.animateSlider.SetValue(t)
self.layer.set_step(t)
a,b = self.layer.periods[t]
self.animateLabel.SetLabel("%s -- %s"%(a.isoformat(),b.isoformat()))
wx.Yield()
def events_set(self,evtName=None,evt=None,value=None):
tbl_id = self.eventEventsTable.GetSelection()
evtJoinField_ID = self.eventEventsJoinField.GetSelection()
rgnJoinField_ID = self.eventRegionsJoinField.GetSelection()
if tbl_id >= 0 and evtJoinField_ID >= 0 and rgnJoinField_ID >=0:
r = self.layer.table
rjf = r.meta['header'][rgnJoinField_ID]
e = r._db.event_tables[tbl_id]
ejf = e.meta['header'][evtJoinField_ID]
try:
r.set_events(e,rjf,ejf)
print "events set"
self.update(self.layer)
except:
print "failed"
return False
print "not ready."
return False
def run(self,evtName=None,evt=None,value=None):
y = self.layer.data_table.by_col(self.classificationAttribute.GetStringSelection())
y = numpy.array(y)
k = int(self.classificationClasses.GetStringSelection())
meth = pysal.esda.mapclassify.kmethods[self.classificationMethod.GetStringSelection()]
self.layer.classification = meth(y,k)
def close(self,evt):
self.Hide()
class mapFrame(mapview_xrc.xrcMapFrame):
def __init__(self,parent=None):
stars.remapEvtsToDispatcher(self,self.evtDispatch)
mapview_xrc.xrcMapFrame.__init__(self,parent)
# localize layerMenu.
self.layerMenu = self.mapMenuBar.Menus[self.mapMenuBar.FindMenu('Layer')][0]
#defaults
defaults = {'pos':wx.DefaultPosition, 'size':wx.DefaultSize, 'shell':{'pos':wx.DefaultPosition,'size':wx.DefaultSize}}
#read prefs...
paths = wx.StandardPaths.Get()
pth = os.path.join(paths.GetUserDataDir(),'stars.config')
if os.path.exists(pth):
config = open(pth,'r')
try:
d = json.load(config)
defaults.update(d)
#print "Config loaded:",defaults
except ValueError:
print "bad config file, consider removing"
# restore defaults
self.SetPosition(defaults['pos'])
self.SetSize(defaults['size'])
# setup shell
shell = wx.Frame(self,pos=defaults['shell']['pos'], size=defaults['shell']['size'])
shell.Bind(wx.EVT_CLOSE,self.shell)
shell.SetTitle("STARS -- Console")
sh = Shell(shell)
self.__shell = shell
#Setup Map Panel and Layers Control
self.model = MapModel()
#self.model.addPath('/Users/charlie/Documents/data/stl_hom/stl_hom.shp')
self.model.addListener(self.able)
self.mapCanvas = wxCanvas(self,self.model)
self.layers = LayersControl(self,self.mapCanvas.model,size=(150,400))
#Add a plot Canvas
#x = self.model.layers[0].data_table.by_col('HR8893')
#y = self.model.layers[0].data_table.by_col('PE87')
#somePoints = map(pysal.cg.Point,zip(x,y))
#for i,pt in enumerate(somePoints):
# pt.id = i+1
#plotLayer = layers.ScatterLayer(somePoints)
#self.plot = CanvasModel([plotLayer])
#self.plotCanvas= wxCanvas(self,self.plot)
#self.plotCanvas.addControl(wxCanvasTools.selectTool())
#def custom_linker(src, tag):
# layers = [plotLayer, self.model.layers[0]]
# targets = [x for x in layers if x != src]
# for target in targets:
# target.selection = src.selection
#plotLayer.addListener(custom_linker)
#self.model.layers[0].addListener(custom_linker)
# initialize the Advanced User Interface (AUI) manager.
self._mgr = wx.aui.AuiManager(self)
# Setup AUI Panes
self._mgr.AddPane(self.mapCanvas, wx.CENTER)
#self._mgr.AddPane(self.plotCanvas, wx.LEFT)
#self._mgr.AddPane(self.mapCanvas, wx.aui.AuiPaneInfo().Name('mapView').Caption('Map View 1').Left().MaximizeButton().Show() )
self._mgr.AddPane(self.layers, wx.aui.AuiPaneInfo().Name('layers').Caption('Layers').Left().MaximizeButton().Hide() )
#self._mgr.AddPane(self.ToolBar, wx.aui.AuiPaneInfo().Name('toolbar1').Caption('ToolBar').ToolbarPane().Top() )
self._mgr.Update()
self.toggleLayers()
# Setup Tools
self.tools = {}
#setup status tool
statusTool = StatusTool(self.status,3)
self.mapCanvas.addControl(statusTool)
#setup pan tool
panTool = wxCanvasTools.panTool()
self.mapCanvas.addControl(panTool)
self.tools['panTool'] = panTool,self.panTool.GetId(),self.menuToolPan.GetId()
#setup zoom tool
zoomTool = wxCanvasTools.zoomTool()
self.mapCanvas.addControl(zoomTool)
self.tools['zoomTool'] = zoomTool,self.zoomTool.GetId(),self.menuToolZoom.GetId()
#setup select tool
selectTool = wxCanvasTools.selectTool()
selectTool.disableBrushing()
self.mapCanvas.addControl(selectTool)
self.tools['selectTool'] = selectTool,self.selectTool.GetId(),self.menuToolSelect.GetId()
self.setTool('panTool',False)
self.dispatch = d = {}
d['FileOpen'] = self.open
d['openTool'] = self.open
d['menuToolPan'] = self.toggle_pan
d['panTool'] = self.toggle_pan
d['menuToolZoom'] = self.toggle_zoom
d['zoomTool'] = self.toggle_zoom
d['selectTool'] = self.toggle_select
d['menuToolSelect'] = self.toggle_select
d['menuToolBrush'] = self.brushing
d['brushTool'] = self.brushing
d['extentTool'] = self.zoomExtent
d['MenuToolExtent'] = self.zoomExtent
d['consoleTool'] = self.shell
d['menuViewConsole'] = self.shell
d['menuEditCopy'] = self.onCopy
d['menuViewIcons'] = self.toolbarIcons
d['menuViewText'] = self.toolbarText
d['tableTool'] = self.table
d['menuViewTable'] = self.table
d['menuViewLayers'] = self.toggleLayers
d['layersTool'] = self.toggleLayers
d['menuLayerRemove'] = self.removeLayer
d['menuLayerZoom'] = self.zoomLayer
d['menuLayerSelectable'] = self.layerSelectable
d['menuLayerProps'] = self.layerProps
def evtDispatch(self,evtName,evt):
evtName,widgetName = evtName.rsplit('_',1)
if widgetName in self.dispatch:
self.dispatch[widgetName](evtName,evt)
else:
if DEBUG: print "not implemented:", evtName,widgetName
def able(self,mdl=None,tag=None):
"""
Enables/Disables GUI Widgets based on the model's state.
"""
if self.model.selected_layer:
self.mapMenuBar.EnableTop(self.mapMenuBar.FindMenu('Layer'),True)
self.ToolBar.EnableTool(self.tableTool.GetId(),True)
self.menuViewTable.Enable(True)
self.MenuBar.Check(self.menuLayerSelectable.GetId(),self.model.selected_layer.is_selectable)
else:
self.mapMenuBar.EnableTop(self.mapMenuBar.FindMenu('Layer'),False)
self.ToolBar.EnableTool(self.tableTool.GetId(),False)
self.menuViewTable.Enable(False)
self.MenuBar.Check(self.menuLayerSelectable.GetId(),False)
def onCopy(self,evtName=None,evt=None,value=None):
""" Copies the current display buffer to the Clipboard """
if wx.TheClipboard.Open():
wx.TheClipboard.SetData(wx.BitmapDataObject(self.mapCanvas.buffer))
wx.TheClipboard.Close()
else:
wx.Bell()
print "Could not open the clipboard?"
def open(self,evtName=None,evt=None,value=None):
dlg = wx.FileDialog(self,"Open Shapefile", wildcard="ESRI ShapeFile (*.shp)|*.shp", style=wx.FD_MULTIPLE|wx.FD_OPEN)
if dlg.ShowModal() == wx.ID_OK:
for pth in dlg.GetPaths():
#pth = dlg.GetPath()
if not pth.endswith('.shp'):
pth = pth+'.shp'
print "Adding Layer:",pth
layer = self.model.addPath(pth)
def setTool(self,toolname,state=None):
tool,tid,mid = self.tools[toolname]
if state == None:
state = tool.enabled^True #Toggle state.
self.mapToolBar.ToggleTool(tid,state)
self.MenuBar.Check(mid,state)
if state:
tool.enable()
else:
tool.disable()
for key in self.tools:
if key!=toolname:
tool,tid,mid = self.tools[key]
tool.disable()
self.mapToolBar.ToggleTool(tid,False)
self.MenuBar.Check(mid,False)
def brushing(self,evtName=None,evt=None,value=None):
state = self.tools['selectTool'][0].isBrushing()^True
self.mapToolBar.ToggleTool(self.brushTool.GetId(),state)
self.MenuBar.Check(self.menuToolBrush.GetId(),state)
if state:
self.tools['selectTool'][0].enableBrushing()
self.setTool('selectTool',True)
else:
self.tools['selectTool'][0].disableBrushing()
def shell(self,evtName=None,evt=None,value=None):
state = self.__shell.IsShown()^True
self.mapToolBar.ToggleTool(self.consoleTool.GetId(),state)
self.MenuBar.Check(self.menuViewConsole.GetId(),state)
if state:
self.__shell.Show()
else:
self.__shell.Hide()
def table(self,evtName=None,evt=None,value=None):
if self.model.selected_layer:
layer = self.model.selected_layer
if not hasattr(layer,'tableView'):
layer.tableView = TableViewer(self,layer)
layer.tableView.SetTitle("STARS -- Attribute Table for %s"%layer.name)
layer.tableView.Show()
layer.tableView.Raise()
def toggleLayers(self,evtName=None,evt=None,value=None):
pane = self._mgr.GetPane(self.layers)
state = pane.IsShown()^True
self.mapToolBar.ToggleTool(self.layersTool.GetId(),state)
self.MenuBar.Check(self.menuViewLayers.GetId(),state)
if state:
pane.Show()
else:
pane.Hide()
self._mgr.Update()
def removeLayer(self,evtName=None,evt=None,value=None):
if evtName == 'OnMenu' and self.model.selected_layer:
self.model.removeLayer(self.model.selected_layer)
def toolbarIcons(self,evtName=None,evt=None,value=None):
self.mapToolBar.ToggleWindowStyle(wx.TB_NOICONS)
self.MenuBar.Check(self.menuViewIcons.GetId(), self.mapToolBar.HasFlag(wx.TB_NOICONS)^True)
def toolbarText(self,evtName=None,evt=None,value=None):
self.mapToolBar.ToggleWindowStyle(wx.TB_TEXT)
self.MenuBar.Check(self.menuViewText.GetId(), self.mapToolBar.HasFlag(wx.TB_TEXT))
def toggle_pan(self,evtName=None,evt=None,value=None):
self.setTool('panTool')
def toggle_zoom(self,evtName=None,evt=None,value=None):
self.setTool('zoomTool')
def toggle_select(self,evtName=None,evt=None,value=None):
self.setTool('selectTool')
def zoomExtent(self,evtName=None,evt=None,value=None):
self.mapCanvas.model.zoom_to_world()
def zoomLayer(self,evtName=None,evt=None,value=None):
self.model.extent = self.model.selected_layer.extent
def layerSelectable(self,evtName=None,evt=None,value=None):
if evtName == 'OnMenu':
state = self.model.selected_layer.is_selectable^True
self.model.selected_layer.is_selectable = state
def layerProps(self,evtName=None,evt=None,value=None):
if evtName == 'OnMenu' and self.model.selected_layer:
layer = self.model.selected_layer
if not hasattr(layer,'propsView'):
print "Create Props View"
layer.propsView = layerPropFrame(self,layer)
if type(layer) != layers.RegionLayer:
layer.propsView.layerBook.RemovePage(1)
layer.propsView.Show()
layer.propsView.Raise()
def table_update(self,mdl,tag=None):
mdl.layer.selection = mdl.selection
def OnClose(self,evt):
paths = wx.StandardPaths.Get()
pth = paths.GetUserDataDir()
if not os.path.exists(pth):
os.mkdir(pth)
config = open(os.path.join(pth,'stars.config'),'w')
json.dump({
"pos":self.GetPosition().Get(),
"size":self.GetSize().Get(),
"shell": {
"pos": self.__shell.GetPosition().Get(),
"size": self.__shell.GetSize().Get()
}
},config)
config.close()
self.Destroy()
| gpl-2.0 | 647,580,272,144,639,400 | 42.516129 | 134 | 0.619904 | false |
AusTac/parma | b3/lib/corestats.py | 1 | 5732 | #!/usr/bin/env python
# corestats.py (COREy STATS)
# Copyright (c) 2006-2007, Corey Goldberg ([email protected])
# updated on 2010-09 by GrosBedo
#
# statistical calculation class
# for processing numeric sequences
#
# license: GNU LGPL
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# CHANGELOG:
# 2010-09-14 - GrosBedo:
# * enhanced variance(), no more memory leak
# 2010-09-13 - GrosBedo:
# * added variance()
# * added mode()
# * added unique()
# * fixed median() algo
# 2010-09-09 - GrosBedo:
# * added percentileforvalue() (inverse of valueforpercentile() )
# * CRITICAL: removed the init function and the self.sequence float conversion (which was a BIG memory hog !)
import sys, math
class Stats:
def sum(self, sequence):
if len(sequence) < 1:
return None
else:
return sum(sequence)
def count(self, sequence):
return len(sequence)
def min(self, sequence):
if len(sequence) < 1:
return None
else:
return min(sequence)
def max(self, sequence):
if len(sequence) < 1:
return None
else:
return max(sequence)
def mean(self, sequence):
if len(sequence) < 1:
return None
else:
return float(sum(sequence)) / len(sequence)
def median(self, sequence):
if len(sequence) < 1:
return None
else:
sequence.sort()
element_idx = float(len(sequence)) / 2
if (element_idx != int(element_idx)):
median1 = sequence[int(math.floor(element_idx))]
median2 = sequence[int(math.ceil(element_idx))]
return float(median1 + median2) / 2
else:
return sequence[int(element_idx)]
def modeold(self, sequence):
results = {}
for item in sequence:
results.setdefault(item, 0) # if index does not already exists, create it and set a value of 0
results[item] += 1
results = sorted(results.iteritems(), key=lambda (k,v):(v,k), reverse=True) # Sort by value (count), then if 2 keys have the same count, it will sort them by their keys
return results
def mode(self, sequence):
"""
Enhanced version of mode(), inspired by statlib/stats.py
The advantage is that this function (as well as mode) can return several modes at once (so you can see the next most frequent values)
"""
scores = self.unique(sequence)
scores.sort()
freq = {}
for item in scores:
freq.setdefault(item, 0) # if index does not already exists, create it and set a value of 0
freq[item] = sequence.count(item)
results = sorted(freq.iteritems(), key=lambda (k,v):(v,k), reverse=True) # Sort by value (count), then if 2 keys have the same count, it will sort them by their keys
return results
def variance(self, sequence):
if len(sequence) < 1:
return None
else:
avg = self.mean(sequence)
sdsq = 0
for i in sequence:
sdsq += (i - avg) ** 2
#sdsq = sum([(i - avg) ** 2 for i in sequence]) # this one-liner hogs a lot of memory, avoid
variance = (float(sdsq) / (len(sequence) - 1))
return variance
def stdev(self, sequence):
if len(sequence) < 1:
return None
else:
variance = self.variance(sequence)
stdev = float(variance) ** 0.5
return stdev
def valueforpercentile(self, sequence, percentile):
if len(sequence) < 1:
value = None
elif (percentile > 100):
sys.stderr.write('ERROR: percentile must be <= 100. you supplied: %s\n'% percentile)
value = None
elif (percentile == 100):
value = max(sequence)
else:
element_idx = int(len(sequence) * (float(percentile) / 100.0))
sequence.sort()
value = sequence[element_idx]
return value
def percentileforvalue(self, sequence, value):
maxnb = max(sequence)
minnb = min(sequence)
if len(sequence) < 1:
percentile = None
elif (value > maxnb or value < minnb ):
#sys.stderr.write('ERROR: value must be between %s < value < %s. you supplied: %s\n'% (minnb, maxnb, value))
#percentile = None
if (value > maxnb):
percentile = 100
else:
percentile = 0
else:
sequence.sort()
sequence.reverse()
element_idx = sequence.index(value) # list.index() returns the first occurence, but we want to enclose all equal values, so we must reverse the sequence and do some calculations in order to get the right value
element_idx = (len(sequence) - element_idx)
percentile = float(element_idx) * 100.0 / len(sequence)
return percentile
def unique(self, sequence):
return list(set(sequence))
# Sample script using this class:
# -------------------------------------------
# #!/usr/bin/env python
# import corestats
#
# sequence = [1, 2.5, 7, 13.4, 8.0]
# stats = corestats.Stats()
# print stats.mean(sequence)
# print stats.valueforpercentile(sequence, 90)
# ------------------------------------------- | gpl-2.0 | -1,045,647,948,741,987,700 | 31.948276 | 221 | 0.565422 | false |
riannucci/rietveldv2 | tests/utils.py | 1 | 1738 | # Copyright 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test utils."""
import os
from google.appengine.ext import testbed
from django.test import TestCase as _TestCase
FILES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'files')
class TestCase(_TestCase):
"""Customized Django TestCase.
This class disables the setup of Django features that are not
available on App Engine (e.g. fixture loading). And it initializes
the Testbad class provided by the App Engine SDK.
"""
def _fixture_setup(self): # defined in django.test.TestCase
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_memcache_stub()
self.testbed.init_datastore_v3_stub()
self.testbed.init_user_stub()
def _fixture_teardown(self): # defined in django.test.TestCase
self.testbed.deactivate()
def tearDown(self):
THIS_IS_NOT_CALLED_GO_FIGURE
def login(self, email):
"""Logs in a user identified by email."""
os.environ['USER_EMAIL'] = email
def logout(self):
"""Logs the user out."""
os.environ['USER_EMAIL'] = ''
def load_file(fname):
"""Read file and return it's content."""
return open(os.path.join(FILES_DIR, fname)).read()
| apache-2.0 | -4,566,225,672,786,474,500 | 28.457627 | 77 | 0.713464 | false |
kyuridenamida/atcoder-tools | tests/utils/gzip_controller.py | 2 | 1295 | import os
import shutil
import tarfile
class GZipController:
def __init__(self, target_dir, gzip_file_path, main_dirname):
self.target_dir = target_dir
self.gzip_file_path = gzip_file_path
self.main_dirname = main_dirname
def create_dir(self):
tf = tarfile.open(self.gzip_file_path, 'r')
tf.extractall(self.target_dir)
main_dir_path = os.path.join(self.target_dir, self.main_dirname)
if os.path.exists(main_dir_path):
return main_dir_path
raise FileNotFoundError("{} is not found".format(main_dir_path))
def remove_dir(self):
shutil.rmtree(self.target_dir)
def _make_data_full_path(filename: str):
return os.path.join(
os.path.dirname(os.path.abspath(__file__)),
filename)
def make_tst_data_controller(target_dir: str):
return GZipController(target_dir,
_make_data_full_path(
'../resources/common/test_data.tar.gz'),
"test_data")
def make_html_data_controller(target_dir: str):
return GZipController(target_dir,
_make_data_full_path(
'../resources/common/problem_htmls.tar.gz'),
"problem_htmls")
| mit | -8,670,779,840,069,245,000 | 29.833333 | 74 | 0.579151 | false |
davidhdz/crits | crits/domains/handlers.py | 2 | 33476 | import json
import re
import datetime
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
from mongoengine.base import ValidationError
from crits.core import form_consts
from crits.core.crits_mongoengine import EmbeddedSource, EmbeddedCampaign
from crits.core.crits_mongoengine import json_handler, create_embedded_source
from crits.core.handsontable_tools import convert_handsontable_to_rows, parse_bulk_upload
from crits.core.handlers import build_jtable, jtable_ajax_list, jtable_ajax_delete
from crits.core.data_tools import convert_string_to_bool
from crits.core.handlers import csv_export
from crits.core.user_tools import user_sources, is_user_favorite
from crits.core.user_tools import is_user_subscribed
from crits.domains.domain import Domain, TLD
from crits.domains.forms import AddDomainForm
from crits.ips.ip import IP
from crits.ips.handlers import validate_and_normalize_ip
from crits.notifications.handlers import remove_user_from_notification
from crits.objects.handlers import object_array_to_dict, validate_and_add_new_handler_object
from crits.relationships.handlers import forge_relationship
from crits.services.handlers import run_triage, get_supported_services
def get_valid_root_domain(domain):
"""
Validate the given domain and TLD, and if valid, parse out the root domain
:param domain: the domain to validate and parse
:type domain: str
:returns: tuple: (Valid root domain, Valid FQDN, Error message)
"""
root = fqdn = error = ""
black_list = "/:@\ "
domain = domain.strip()
if any(c in black_list for c in domain):
error = 'Domain cannot contain space or characters %s' % (black_list)
else:
root = tld_parser.parse(domain)
if root == "no_tld_found_error":
error = 'No valid TLD found'
root = ""
else:
fqdn = domain.lower()
return (root, fqdn, error)
def get_domain_details(domain, analyst):
"""
Generate the data to render the Domain details template.
:param domain: The name of the Domain to get details for.
:type domain: str
:param analyst: The user requesting this information.
:type analyst: str
:returns: template (str), arguments (dict)
"""
template = None
allowed_sources = user_sources(analyst)
dmain = Domain.objects(domain=domain,
source__name__in=allowed_sources).first()
if not dmain:
error = ("Either no data exists for this domain"
" or you do not have permission to view it.")
template = "error.html"
args = {'error': error}
return template, args
dmain.sanitize_sources(username="%s" % analyst,
sources=allowed_sources)
# remove pending notifications for user
remove_user_from_notification("%s" % analyst, dmain.id, 'Domain')
# subscription
subscription = {
'type': 'Domain',
'id': dmain.id,
'subscribed': is_user_subscribed("%s" % analyst,
'Domain',
dmain.id),
}
#objects
objects = dmain.sort_objects()
#relationships
relationships = dmain.sort_relationships("%s" % analyst, meta=True)
# relationship
relationship = {
'type': 'Domain',
'value': dmain.id
}
#comments
comments = {'comments': dmain.get_comments(),
'url_key':dmain.domain}
#screenshots
screenshots = dmain.get_screenshots(analyst)
# favorites
favorite = is_user_favorite("%s" % analyst, 'Domain', dmain.id)
# services
service_list = get_supported_services('Domain')
# analysis results
service_results = dmain.get_analysis_results()
args = {'objects': objects,
'relationships': relationships,
'comments': comments,
'favorite': favorite,
'relationship': relationship,
'subscription': subscription,
'screenshots': screenshots,
'domain': dmain,
'service_list': service_list,
'service_results': service_results}
return template, args
def generate_domain_csv(request):
"""
Generate a CSV file of the Domain information
:param request: The request for this CSV.
:type request: :class:`django.http.HttpRequest`
:returns: :class:`django.http.HttpResponse`
"""
response = csv_export(request,Domain)
return response
def generate_domain_jtable(request, option):
"""
Generate the jtable data for rendering in the list template.
:param request: The request for this jtable.
:type request: :class:`django.http.HttpRequest`
:param option: Action to take.
:type option: str of either 'jtlist', 'jtdelete', or 'inline'.
:returns: :class:`django.http.HttpResponse`
"""
obj_type = Domain
type_ = "domain"
mapper = obj_type._meta['jtable_opts']
if option == "jtlist":
# Sets display url
details_url = mapper['details_url']
details_url_key = mapper['details_url_key']
fields = mapper['fields']
response = jtable_ajax_list(obj_type,
details_url,
details_url_key,
request,
includes=fields)
return HttpResponse(json.dumps(response,
default=json_handler),
content_type="application/json")
if option == "jtdelete":
response = {"Result": "ERROR"}
if jtable_ajax_delete(obj_type,request):
response = {"Result": "OK"}
return HttpResponse(json.dumps(response,
default=json_handler),
content_type="application/json")
jtopts = {
'title': "Domains",
'default_sort': mapper['default_sort'],
'listurl': reverse('crits.%ss.views.%ss_listing' % (type_, type_),
args=('jtlist',)),
'deleteurl': reverse('crits.%ss.views.%ss_listing' % (type_, type_),
args=('jtdelete',)),
'searchurl': reverse(mapper['searchurl']),
'fields': mapper['jtopts_fields'],
'hidden_fields': mapper['hidden_fields'],
'linked_fields': mapper['linked_fields'],
'details_link': mapper['details_link']
}
jtable = build_jtable(jtopts,request)
jtable['toolbar'] = [
{
'tooltip': "'All Domains'",
'text': "'All'",
'click': "function () {$('#domain_listing').jtable('load', {'refresh': 'yes'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'New Domains'",
'text': "'New'",
'click': "function () {$('#domain_listing').jtable('load', {'refresh': 'yes', 'status': 'New'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'In Progress Domains'",
'text': "'In Progress'",
'click': "function () {$('#domain_listing').jtable('load', {'refresh': 'yes', 'status': 'In Progress'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'Analyzed Domains'",
'text': "'Analyzed'",
'click': "function () {$('#domain_listing').jtable('load', {'refresh': 'yes', 'status': 'Analyzed'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'Deprecated Domains'",
'text': "'Deprecated'",
'click': "function () {$('#domain_listing').jtable('load', {'refresh': 'yes', 'status': 'Deprecated'});}",
'cssClass': "'jtable-toolbar-center'",
},
{
'tooltip': "'Add Domain'",
'text': "'Add Domain'",
'click': "function () {$('#new-domain').click()}",
},
]
if option == "inline":
return render_to_response("jtable.html",
{'jtable': jtable,
'jtid': '%s_listing' % type_,
'button' : '%ss_tab' % type_},
RequestContext(request))
else:
return render_to_response("%s_listing.html" % type_,
{'jtable': jtable,
'jtid': '%s_listing' % type_},
RequestContext(request))
def add_new_domain_via_bulk(data, rowData, request, errors,
is_validate_only=False, cache={}):
"""
Wrapper for add_new_domain to pass in rowData.
:param data: The data about the domain.
:type data: dict
:param rowData: Any objects that need to be added to the domain.
:type rowData: dict
:param request: The Django request.
:type request: :class:`django.http.HttpRequest`
:param errors: A list of current errors to append to.
:type errors: list
:param is_validate_only: Only validate the data and return any errors.
:type is_validate_only: boolean
:param cache: Cached data, typically for performance enhancements
during bulk uperations.
:type cache: dict
:returns: tuple
"""
return add_new_domain(data, request, errors, rowData=rowData,
is_validate_only=is_validate_only, cache=cache)
def retrieve_domain(domain, cache):
"""
Retrieves a domain by checking cache first. If not in cache
then queries mongo for the domain.
:param domain: The domain name.
:type domain: str
:param cache: Cached data, typically for performance enhancements
during bulk uperations.
:type cache: dict
:returns: :class:`crits.domains.domain.Domain`
"""
domain_obj = None
cached_results = cache.get(form_consts.Domain.CACHED_RESULTS)
if cached_results:
domain_obj = cached_results.get(domain.lower())
if not domain_obj:
domain_obj = Domain.objects(domain__iexact=domain).first()
return domain_obj
def add_new_domain(data, request, errors, rowData=None, is_validate_only=False, cache={}):
"""
Add a new domain to CRITs.
:param data: The data about the domain.
:type data: dict
:param request: The Django request.
:type request: :class:`django.http.HttpRequest`
:param errors: A list of current errors to append to.
:type errors: list
:param rowData: Any objects that need to be added to the domain.
:type rowData: dict
:param is_validate_only: Only validate the data and return any errors.
:type is_validate_only: boolean
:param cache: Cached data, typically for performance enhancements
during bulk operations.
:type cache: dict
:returns: tuple (<result>, <errors>, <retVal>)
"""
result = False
retVal = {}
domain = data['domain']
add_ip = data.get('add_ip')
ip = data.get('ip')
ip_type = data.get('ip_type')
if add_ip:
error = validate_and_normalize_ip(ip, ip_type)[1]
if error:
errors.append(error)
if is_validate_only:
error = get_valid_root_domain(domain)[2]
if error:
errors.append(error)
# check for duplicate domains
fqdn_domain = retrieve_domain(domain, cache)
if fqdn_domain:
if isinstance(fqdn_domain, Domain):
resp_url = reverse('crits.domains.views.domain_detail', args=[domain])
message = ('Warning: Domain already exists: '
'<a href="%s">%s</a>' % (resp_url, domain))
retVal['message'] = message
retVal['status'] = form_consts.Status.DUPLICATE
retVal['warning'] = message
else:
result_cache = cache.get(form_consts.Domain.CACHED_RESULTS);
result_cache[domain.lower()] = True
elif not errors:
username = request.user.username
reference = data.get('domain_reference')
source_name = data.get('domain_source')
method = data.get('domain_method')
source = [create_embedded_source(source_name, reference=reference,
method=method, analyst=username)]
bucket_list = data.get(form_consts.Common.BUCKET_LIST_VARIABLE_NAME)
ticket = data.get(form_consts.Common.TICKET_VARIABLE_NAME)
if data.get('campaign') and data.get('confidence'):
campaign = [EmbeddedCampaign(name=data.get('campaign'),
confidence=data.get('confidence'),
analyst=username)]
else:
campaign = []
retVal = upsert_domain(domain, source, username, campaign,
bucket_list=bucket_list, ticket=ticket, cache=cache)
if not retVal['success']:
errors.append(retVal.get('message'))
retVal['message'] = ""
else:
new_domain = retVal['object']
ip_result = {}
if add_ip:
if data.get('same_source'):
ip_source = source_name
ip_method = method
ip_reference = reference
else:
ip_source = data.get('ip_source')
ip_method = data.get('ip_method')
ip_reference = data.get('ip_reference')
from crits.ips.handlers import ip_add_update
ip_result = ip_add_update(ip,
ip_type,
ip_source,
ip_method,
ip_reference,
campaign=campaign,
analyst=username,
bucket_list=bucket_list,
ticket=ticket,
cache=cache)
if not ip_result['success']:
errors.append(ip_result['message'])
else:
#add a relationship with the new IP address
new_ip = ip_result['object']
if new_domain and new_ip:
new_domain.add_relationship(rel_item=new_ip,
rel_type='Resolved_To',
analyst=username,
get_rels=False)
new_domain.save(username=username)
new_ip.save(username=username)
#set the URL for viewing the new data
resp_url = reverse('crits.domains.views.domain_detail', args=[domain])
if retVal['is_domain_new'] == True:
retVal['message'] = ('Success! Click here to view the new domain: '
'<a href="%s">%s</a>' % (resp_url, domain))
else:
message = ('Updated existing domain: <a href="%s">%s</a>' % (resp_url, domain))
retVal['message'] = message
retVal[form_consts.Status.STATUS_FIELD] = form_consts.Status.DUPLICATE
retVal['warning'] = message
#add indicators
if data.get('add_indicators'):
from crits.indicators.handlers import create_indicator_from_tlo
# If we have an IP object, add an indicator for that.
if ip_result.get('success'):
ip = ip_result['object']
result = create_indicator_from_tlo('IP',
ip,
username,
ip_source,
add_domain=False)
ip_ind = result.get('indicator')
if not result['success']:
errors.append(result['message'])
# Add an indicator for the domain.
result = create_indicator_from_tlo('Domain',
new_domain,
username,
source_name,
add_domain=False)
if not result['success']:
errors.append(result['message'])
elif ip_result.get('success') and ip_ind:
forge_relationship(left_class=result['indicator'],
right_class=ip_ind,
rel_type='Resolved_To',
analyst=username)
result = True
# This block validates, and may also add, objects to the Domain
if retVal.get('success') or is_validate_only == True:
if rowData:
objectsData = rowData.get(form_consts.Common.OBJECTS_DATA)
# add new objects if they exist
if objectsData:
objectsData = json.loads(objectsData)
current_domain = retrieve_domain(domain, cache)
for object_row_counter, objectData in enumerate(objectsData, 1):
if current_domain != None:
# if the domain exists then try to add objects to it
if isinstance(current_domain, Domain) == True:
objectDict = object_array_to_dict(objectData,
"Domain",
current_domain.id)
else:
objectDict = object_array_to_dict(objectData,
"Domain",
"")
current_domain = None;
else:
objectDict = object_array_to_dict(objectData,
"Domain",
"")
(obj_result,
errors,
obj_retVal) = validate_and_add_new_handler_object(
None, objectDict, request, errors, object_row_counter,
is_validate_only=is_validate_only,
cache=cache, obj=current_domain)
if not obj_result:
retVal['success'] = False
return result, errors, retVal
def edit_domain_name(domain, new_domain, analyst):
"""
Edit domain name for an entry.
:param domain: The domain name to edit.
:type domain: str
:param new_domain: The new domain name.
:type new_domain: str
:param analyst: The user editing the domain name.
:type analyst: str
:returns: boolean
"""
# validate new domain
(root, validated_domain, error) = get_valid_root_domain(new_domain)
if error:
return False
domain = Domain.objects(domain=domain).first()
if not domain:
return False
try:
domain.domain = validated_domain
domain.save(username=analyst)
return True
except ValidationError:
return False
def upsert_domain(domain, source, username=None, campaign=None,
confidence=None, bucket_list=None, ticket=None, cache={}):
"""
Add or update a domain/FQDN. Campaign is assumed to be a list of campaign
dictionary objects.
:param domain: The domain to add/update.
:type domain: str
:param source: The name of the source.
:type source: str
:param username: The user adding/updating the domain.
:type username: str
:param campaign: The campaign to attribute to this domain.
:type campaign: list, str
:param confidence: Confidence for the campaign attribution.
:type confidence: str
:param bucket_list: List of buckets to add to this domain.
:type bucket_list: list, str
:param ticket: The ticket for this domain.
:type ticket: str
:param cache: Cached data, typically for performance enhancements
during bulk uperations.
:type cache: dict
:returns: dict with keys:
"success" (boolean),
"object" the domain that was added,
"is_domain_new" (boolean)
"""
# validate domain and grab root domain
(root, domain, error) = get_valid_root_domain(domain)
if error:
return {'success': False, 'message': error}
is_fqdn_domain_new = False
is_root_domain_new = False
if not campaign:
campaign = []
# assume it's a list, but check if it's a string
elif isinstance(campaign, basestring):
c = EmbeddedCampaign(name=campaign, confidence=confidence, analyst=username)
campaign = [c]
# assume it's a list, but check if it's a string
if isinstance(source, basestring):
s = EmbeddedSource()
s.name = source
instance = EmbeddedSource.SourceInstance()
instance.reference = ''
instance.method = ''
instance.analyst = username
instance.date = datetime.datetime.now()
s.instances = [instance]
source = [s]
fqdn_domain = None
root_domain = None
cached_results = cache.get(form_consts.Domain.CACHED_RESULTS)
if cached_results != None:
if domain != root:
fqdn_domain = cached_results.get(domain)
root_domain = cached_results.get(root)
else:
root_domain = cached_results.get(root)
else:
#first find the domain(s) if it/they already exist
root_domain = Domain.objects(domain=root).first()
if domain != root:
fqdn_domain = Domain.objects(domain=domain).first()
#if they don't exist, create them
if not root_domain:
root_domain = Domain()
root_domain.domain = root
root_domain.source = []
root_domain.record_type = 'A'
is_root_domain_new = True
if cached_results != None:
cached_results[root] = root_domain
if domain != root and not fqdn_domain:
fqdn_domain = Domain()
fqdn_domain.domain = domain
fqdn_domain.source = []
fqdn_domain.record_type = 'A'
is_fqdn_domain_new = True
if cached_results != None:
cached_results[domain] = fqdn_domain
# if new or found, append the new source(s)
for s in source:
if root_domain:
root_domain.add_source(s)
if fqdn_domain:
fqdn_domain.add_source(s)
#campaigns
#both root and fqdn get campaigns updated
for c in campaign:
if root_domain:
root_domain.add_campaign(c)
if fqdn_domain:
fqdn_domain.add_campaign(c)
if username:
if root_domain:
root_domain.analyst = username
if fqdn_domain:
fqdn_domain.analyst = username
if bucket_list:
if root_domain:
root_domain.add_bucket_list(bucket_list, username)
if fqdn_domain:
fqdn_domain.add_bucket_list(bucket_list, username)
if ticket:
if root_domain:
root_domain.add_ticket(ticket, username)
if fqdn_domain:
fqdn_domain.add_ticket(ticket, username)
# save
try:
if root_domain:
root_domain.save(username=username)
if fqdn_domain:
fqdn_domain.save(username=username)
except Exception, e:
return {'success': False, 'message': e}
#Add relationships between fqdn, root
if fqdn_domain and root_domain:
root_domain.add_relationship(rel_item=fqdn_domain,
rel_type="Supra-domain_Of",
analyst=username,
get_rels=False)
root_domain.save(username=username)
fqdn_domain.save(username=username)
# run domain triage
if is_fqdn_domain_new:
fqdn_domain.reload()
run_triage(fqdn_domain, username)
if is_root_domain_new:
root_domain.reload()
run_triage(root_domain, username)
# return fqdn if they added an fqdn, or root if they added a root
if fqdn_domain:
return {'success': True, 'object': fqdn_domain, 'is_domain_new': is_fqdn_domain_new}
else:
return {'success': True, 'object': root_domain, 'is_domain_new': is_root_domain_new}
def update_tlds(data=None):
"""
Update the TLD list in the database.
:param data: The TLD data.
:type data: file handle.
:returns: dict with key "success" (boolean)
"""
if not data:
return {'success': False}
line = data.readline()
while line:
line = line.rstrip()
if line and not line.startswith('//'):
TLD.objects(tld=line).update_one(set__tld=line, upsert=True)
line = data.readline()
# Update the package local tld_parser with the new domain info
tld_parser = etld()
return {'success': True}
class etld(object):
"""
TLD class to assist with extracting root domains.
"""
def __init__(self):
self.rules = {}
etlds = TLD.objects()
for etld in etlds:
tld = etld.tld.split('.')[-1]
self.rules.setdefault(tld, [])
self.rules[tld].append(re.compile(self.regexpize(etld.tld)))
def regexpize(self, etld):
"""
Generate regex for this TLD.
:param etld: The TLD to generate regex for.
:returns: str
"""
etld = etld[::-1].replace('.',
'\\.').replace('*',
'[^\\.]*').replace('!',
'')
return '^(%s)\.(.*)$' % etld
def parse(self, hostname):
"""
Parse the domain.
:param hostname: The domain to parse.
:returns: str
"""
try:
hostname = hostname.lower()
tld = hostname.split('.')[-1]
hostname = hostname[::-1]
etld = ''
for rule in self.rules[tld]:
m = rule.match(hostname)
if m and m.group(1) > etld:
mytld = "%s.%s" % ( m.group(2)[::-1].split(".")[-1],
m.group(1)[::-1])
if not mytld:
return ("no_tld_found_error")
return (mytld)
except Exception:
return ("no_tld_found_error")
def parse_row_to_bound_domain_form(request, rowData, cache):
"""
Parse a row in bulk upload into form data that can be used to add a Domain.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param rowData: The objects to add for the Domain.
:type rowData: dict
:param cache: Cached data, typically for performance enhancements
during bulk uperations.
:type cache: dict
:returns: :class:`crits.domains.forms.AddDomainForm`
"""
bound_domain_form = None
# TODO Add common method to convert data to string
domain_name = rowData.get(form_consts.Domain.DOMAIN_NAME, "").strip();
campaign = rowData.get(form_consts.Domain.CAMPAIGN, "")
confidence = rowData.get(form_consts.Domain.CAMPAIGN_CONFIDENCE, "")
domain_source = rowData.get(form_consts.Domain.DOMAIN_SOURCE, "")
domain_method = rowData.get(form_consts.Domain.DOMAIN_METHOD, "")
domain_reference = rowData.get(form_consts.Domain.DOMAIN_REFERENCE, "")
#is_add_ip = convert_string_to_bool(rowData.get(form_consts.Domain.ADD_IP_ADDRESS, ""))
is_add_ip = False
ip = rowData.get(form_consts.Domain.IP_ADDRESS, "")
ip_type = rowData.get(form_consts.Domain.IP_TYPE, "")
created = rowData.get(form_consts.Domain.IP_DATE, "")
#is_same_source = convert_string_to_bool(rowData.get(form_consts.Domain.SAME_SOURCE, "False"))
is_same_source = False
ip_source = rowData.get(form_consts.Domain.IP_SOURCE, "")
ip_method = rowData.get(form_consts.Domain.IP_METHOD, "")
ip_reference = rowData.get(form_consts.Domain.IP_REFERENCE, "")
is_add_indicators = convert_string_to_bool(rowData.get(form_consts.Domain.ADD_INDICATORS, "False"))
bucket_list = rowData.get(form_consts.Common.BUCKET_LIST, "")
ticket = rowData.get(form_consts.Common.TICKET, "")
if(ip or created or ip_source or ip_method or ip_reference):
is_add_ip = True
if is_add_ip == True:
data = {'domain': domain_name,
'campaign': campaign,
'confidence': confidence,
'domain_source': domain_source,
'domain_method': domain_method,
'domain_reference': domain_reference,
'add_ip': is_add_ip,
'ip': ip,
'ip_type': ip_type,
'created': created,
'same_source': is_same_source,
'ip_source': ip_source,
'ip_method': ip_method,
'ip_reference': ip_reference,
'add_indicators': is_add_indicators,
'bucket_list': bucket_list,
'ticket': ticket}
bound_domain_form = cache.get("domain_ip_form")
if bound_domain_form == None:
bound_domain_form = AddDomainForm(request.user, data)
cache['domain_ip_form'] = bound_domain_form
else:
bound_domain_form.data = data
else:
data = {'domain': domain_name,
'campaign': campaign,
'confidence': confidence,
'domain_source': domain_source,
'domain_method': domain_method,
'domain_reference': domain_reference,
'add_ip': is_add_ip,
'bucket_list': bucket_list,
'ticket': ticket}
bound_domain_form = cache.get("domain_form")
if bound_domain_form == None:
bound_domain_form = AddDomainForm(request.user, data)
cache['domain_form'] = bound_domain_form
else:
bound_domain_form.data = data
if bound_domain_form != None:
bound_domain_form.full_clean()
return bound_domain_form
def process_bulk_add_domain(request, formdict):
"""
Performs the bulk add of domains by parsing the request data. Batches
some data into a cache object for performance by reducing large
amounts of single database queries.
:param request: Django request.
:type request: :class:`django.http.HttpRequest`
:param formdict: The form representing the bulk uploaded data.
:type formdict: dict
:returns: :class:`django.http.HttpResponse`
"""
domain_names = []
ip_addresses = []
cached_domain_results = {}
cached_ip_results = {}
cleanedRowsData = convert_handsontable_to_rows(request)
for rowData in cleanedRowsData:
if rowData != None:
if rowData.get(form_consts.Domain.DOMAIN_NAME) != None:
domain = rowData.get(form_consts.Domain.DOMAIN_NAME).strip().lower()
(root_domain, full_domain, error) = get_valid_root_domain(domain)
domain_names.append(full_domain)
if domain != root_domain:
domain_names.append(root_domain)
if rowData.get(form_consts.Domain.IP_ADDRESS) != None:
ip_addr = rowData.get(form_consts.Domain.IP_ADDRESS)
ip_type = rowData.get(form_consts.Domain.IP_TYPE)
(ip_addr, error) = validate_and_normalize_ip(ip_addr, ip_type)
ip_addresses.append(ip_addr)
domain_results = Domain.objects(domain__in=domain_names)
ip_results = IP.objects(ip__in=ip_addresses)
for domain_result in domain_results:
cached_domain_results[domain_result.domain] = domain_result
for ip_result in ip_results:
cached_ip_results[ip_result.ip] = ip_result
cache = {form_consts.Domain.CACHED_RESULTS: cached_domain_results,
form_consts.IP.CACHED_RESULTS: cached_ip_results,
'cleaned_rows_data': cleanedRowsData}
response = parse_bulk_upload(request, parse_row_to_bound_domain_form, add_new_domain_via_bulk, formdict, cache)
return response
# Global definition of the TLD parser -- etld.
# This is a workaround to use a global instance because the __init__ method takes ~0.5 seconds to
# initialize. Was causing performance problems (high CPU usage) with bulk uploading of domains since
# each domain needed to create the etld() class.
# TODO investigate if updating of TLDs causes this global instance to become stale.
tld_parser = etld()
| mit | 5,617,783,682,987,655,000 | 36.613483 | 119 | 0.549588 | false |
JonathanSchmalhofer/RecursiveStereoUAV | js_recursive_stereo/python_vs_matlab/airsim/generate_pcl_calib.py | 1 | 2784 | import numpy as np
import cv2
# Copied from https://github.com/utiasSTARS/pykitti/blob/master/pykitti/utils.py
def read_calib_file(filepath):
"""Read in a calibration file and parse into a dictionary."""
data = {}
with open(filepath, 'r') as f:
for line in f.readlines():
key, value = line.split(':', 1)
# The only non-float values in these files are dates, which
# we don't care about anyway
try:
data[key] = np.array([float(x) for x in value.split()])
except ValueError:
pass
return data
# Focal Length in [pix]
f = 573.2981
# u-Coordinate of Center Point in [pix]
c_u = 399.5661
# u-Coordinate of Center Point in [pix]
c_v = 295.6579
# baseline with respect to reference camera 0 in [m]
b = 0.1400
Q = np.matrix(
[[ 0, 0, 0, f],
[-1.0000, 0, 0, c_u],
[ 0, -1.0000, 0, c_v],
[ 0, 0, 1/b, 0]])
ply_header = '''ply
format ascii 1.0
element vertex %(vert_num)d
property float x
property float y
property float z
property uchar red
property uchar green
property uchar blue
end_header
'''
def write_ply(fn, verts, colors):
verts = verts.reshape(-1, 3)
colors = colors.reshape(-1, 3)
verts = np.hstack([verts, colors])
with open(fn, 'wb') as f:
f.write((ply_header % dict(vert_num=len(verts))).encode('utf-8'))
np.savetxt(f, verts, fmt='%f %f %f %d %d %d ')
# Following part for converting a disparity image to a PointCloud was originally copied from
# https://stackoverflow.com/questions/45325795/point-cloud-from-kitti-stereo-images
imgC = cv2.imread('../../resources/AirSimCameraCalibration/left/left_00000.png')
imgL = cv2.imread('../../resources/AirSimCameraCalibration/left/left_00000.png')
imgR = cv2.imread('../../resources/AirSimCameraCalibration/right/right_00000.png')
imgL = cv2.cvtColor( imgL, cv2.COLOR_RGB2GRAY )
imgR = cv2.cvtColor( imgR, cv2.COLOR_RGB2GRAY )
window_size = 9
minDisparity = 1
stereo = cv2.StereoSGBM_create(
blockSize=10,
numDisparities=64,
preFilterCap=10,
minDisparity=minDisparity,
P1=4 * 3 * window_size ** 2,
P2=32 * 3 * window_size ** 2
)
print('computing disparity...')
disp = stereo.compute(imgL, imgR).astype(np.float32) / 16.0
points = cv2.reprojectImageTo3D(disp, Q)
colors = cv2.cvtColor(imgC, cv2.COLOR_BGR2RGB)
mask = disp > disp.min()
out_points = points[mask]
out_colors = colors[mask]
out_fn = 'checkerboard.ply'
write_ply('checkerboard.ply', out_points, out_colors)
print('%s saved' % 'checkerboard.ply') | mit | 6,907,530,882,995,174,000 | 31.383721 | 92 | 0.602011 | false |
Tanych/CodeTracking | 312-Burst-Balloons/solution.py | 1 | 2616 | class Solution(object):
def bursthelper(self,memo,nums,left,right):
if left+1==right: return 0
if memo[left][right]>0: return memo[left][right]
res=0
for i in xrange(left+1,right):
res=max(res,nums[left]*nums[i]*nums[right]+self.bursthelper(memo,nums,left,i)+\
self.bursthelper(memo,nums,i,right))
memo[left][right]=res
return res
def maxCoinsMemo(self,nums):
n=len(nums)
# burst the 0 in middle of nums
# since the get nothing
new_nums=[0]*(n+2)
cnt=1
for num in nums:
if num:
new_nums[cnt]=num
cnt+=1
# buidling the new_nums
new_nums[0]=new_nums[cnt]=1
cnt+=1
memo=[[0 for _ in xrange(cnt)] for _ in xrange(cnt)]
return self.bursthelper(memo,new_nums,0,cnt-1)
def dpmethod(self,nums):
n=len(nums)
# burst the 0 in middle of nums
# since the get nothing
new_nums=[0]*(n+2)
cnt=1
for num in nums:
if num:
new_nums[cnt]=num
cnt+=1
# buidling the new_nums
new_nums[0]=new_nums[cnt]=1
cnt+=1
dp=[[0 for _ in xrange(cnt)] for _ in xrange(cnt)]
# k is the diff between left and right
for k in xrange(2,cnt):
for left in xrange(0,cnt-k):
right=left+k
for i in xrange(left+1,right):
dp[left][right]=max(dp[left][right],new_nums[left]*new_nums[i]*new_nums[right]+dp[left][i]+dp[i][right])
return dp[0][cnt-1]
def maxCoins(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
"""
Extend the nums to 1+nums+1
"""
return self.dpmethod(nums)
n=len(nums)
new_num=[1]+nums+[1]
#stote the dpfun values
dp=[[0 for i in xrange(n+2)] for j in xrange(n+2)]
#get the dpfun
def dpfun(i,j):
#if done, return
if dp[i][j]>0:return dp[i][j]
#find the max x for depart the i,j,x is the last ele to push out
#and the final x is for [i-i]*[x]*[j+1]
#we can simple assume that [2] is the last ele, therefore the nums we can easily understand
for x in xrange(i,j+1):
dp[i][j]=max(dp[i][j],dpfun(i,x-1)+new_num[i-1]*new_num[x]*new_num[j+1]+dpfun(x+1,j))
return dp[i][j]
#return 1-n max value
return dpfun(1,n)
| mit | 2,221,113,808,342,528,500 | 32.126582 | 124 | 0.495413 | false |
asterix24/GestionaleCaldaie | gestionale/default/wsgi.py | 1 | 1308 | """
WSGI config for gestionale project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import sys
import site
import os
os.environ["DJANGO_SETTINGS_MODULE"] = "gestionale.DEFAULT.settings"
from gestionale.besalba.local_settings import *
site.addsitedir(ENV_PYTHON)
site.addsitedir(ENV_SITE_PYTHON)
sys.path.append(LOCAL_ROOT_PATH)
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| gpl-2.0 | -4,604,149,561,931,418,600 | 35.333333 | 79 | 0.800459 | false |
PKRoma/s2n | tests/integration/s2n_client_endpoint_handshake_test.py | 1 | 5399 | #
# Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://aws.amazon.com/apache2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
#
import os
import sys
import time
import socket
import subprocess
import itertools
import argparse
from s2n_test_constants import *
# If a cipher_preference_version is specified, we will use it while attempting the handshake;
# otherwise, s2n will use the default. If an expected_cipher is specified, the test will pass
# if and only if the handshake is negotiated with that cipher; otherwise, the test will pass
# if the handshake is negotiated with any cipher.
well_known_endpoints = [
{"endpoint": "amazon.com"},
{"endpoint": "facebook.com"},
{"endpoint": "google.com"},
{"endpoint": "netflix.com"},
{"endpoint": "s3.amazonaws.com"},
{"endpoint": "twitter.com"},
{"endpoint": "wikipedia.org"},
{"endpoint": "yahoo.com"},
]
if os.getenv("S2N_NO_PQ") is None:
# If PQ was compiled into S2N, test the PQ preferences against KMS
pq_endpoints = [
{
"endpoint": "kms.us-east-1.amazonaws.com",
"cipher_preference_version": "KMS-PQ-TLS-1-0-2019-06",
"expected_cipher": "ECDHE-BIKE-RSA-AES256-GCM-SHA384"
},
{
"endpoint": "kms.us-east-1.amazonaws.com",
"cipher_preference_version": "PQ-SIKE-TEST-TLS-1-0-2019-11",
"expected_cipher": "ECDHE-SIKE-RSA-AES256-GCM-SHA384"
}
]
well_known_endpoints.extend(pq_endpoints)
# Make an exception to allow failure (if CI is having issues)
allowed_endpoints_failures = [
'wikipedia.org'
]
def print_result(result_prefix, return_code):
print(result_prefix, end="")
if return_code == 0:
if sys.stdout.isatty():
print("\033[32;1mPASSED\033[0m")
else:
print("PASSED")
else:
if sys.stdout.isatty():
print("\033[31;1mFAILED\033[0m")
else:
print("FAILED")
def try_client_handshake(endpoint, arguments, expected_cipher):
s2nc_cmd = ["../../bin/s2nc", "-f", "./trust-store/ca-bundle.crt", "-a", "http/1.1"] + arguments + [str(endpoint)]
currentDir = os.path.dirname(os.path.realpath(__file__))
s2nc = subprocess.Popen(s2nc_cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, cwd=currentDir)
found = 0
expected_output = "Cipher negotiated: "
if expected_cipher:
expected_output += expected_cipher
for line in range(0, 10):
output = str(s2nc.stdout.readline().decode("utf-8"))
if expected_output in output:
found = 1
break
s2nc.kill()
s2nc.wait()
if found == 0:
return -1
return 0
def well_known_endpoints_test(use_corked_io, tls13_enabled):
arguments = []
msg = "\n\tTesting s2n Client with Well Known Endpoints"
opt_list = []
if tls13_enabled:
arguments += ["--tls13", "--ciphers", "default_tls13"]
opt_list += ["TLS 1.3"]
if use_corked_io:
arguments += ["-C"]
opt_list += ["Corked IO"]
if len(opt_list) != 0:
msg += " using "
if len(opt_list) > 1:
msg += ", ".join(opt_list[:-2] + [opt_list[-2] + " and " + opt_list[-1]])
else:
msg += opt_list[0]
print(msg + ":")
maxRetries = 5
failed = 0
for endpoint_config in well_known_endpoints:
endpoint = endpoint_config["endpoint"]
expected_cipher = endpoint_config.get("expected_cipher")
if "cipher_preference_version" in endpoint_config:
arguments += ["-c", endpoint_config["cipher_preference_version"]]
# Retry handshake in case there are any problems going over the internet
for i in range(1, maxRetries):
ret = try_client_handshake(endpoint, arguments, expected_cipher)
if ret is 0:
break
else:
if endpoint in allowed_endpoints_failures:
break
time.sleep(i)
print_result("Endpoint: %-35sExpected Cipher: %-40s... " % (endpoint, expected_cipher if expected_cipher else "Any"), ret)
if ret != 0 and endpoint not in allowed_endpoints_failures:
failed += 1
return failed
def main(argv):
parser = argparse.ArgumentParser(description="Run client endpoint handshake tests")
parser.add_argument("--no-tls13", action="store_true", help="Disable TLS 1.3 tests")
args = parser.parse_args()
failed = 0
# TLS 1.2 Tests
failed += well_known_endpoints_test(use_corked_io=False, tls13_enabled=False)
failed += well_known_endpoints_test(use_corked_io=True, tls13_enabled=False)
# TLS 1.3 Tests
if not args.no_tls13:
failed += well_known_endpoints_test(use_corked_io=False, tls13_enabled=True)
failed += well_known_endpoints_test(use_corked_io=True, tls13_enabled=True)
return failed
if __name__ == "__main__":
sys.exit(main(sys.argv[1:]))
| apache-2.0 | 9,201,132,819,238,231,000 | 30.758824 | 130 | 0.622152 | false |
JoeJasinski/evesch | evesch/org/views.py | 1 | 22743 | # Create your views here.
from datetime import datetime
from django.utils.translation import ugettext_lazy as _
from django.shortcuts import render_to_response
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.core.paginator import Paginator
from django.db.models import Q
from django.contrib.auth import get_user_model
from evesch.core.lib import Message, ePage
from evesch.egroup.models import UserGroup
from evesch.euser.models import get_current_user
from evesch.org.models import Organization, OrgInvite
from evesch.event.models import EventType
from evesch.org.forms import OrganizationForm, OrganizationFormEdit, OrganizationJoinForm, OrganizationInviteMember
def org_browse(request, filter_abc=None, template_name=None):
public_orgs = Organization.objects.get_browsable_orgs()
if filter_abc:
raise AssertionError(filter_abc)
public_orgs = public_orgs.filter(org_name__istartswith=filter_abc)
context = {'orgs':public_orgs,}
return render_to_response(template_name,context, context_instance=RequestContext(request))
@login_required
def orgs_list(request, template_name=None):
current_user, message = get_current_user(request.user)
if not message:
all_orgs_page = ePage(1)
if request.GET.__contains__("all_orgs_page"):
try:
all_orgs_page.curr = int(request.GET['all_orgs_page'])
except ValueError:
all_orgs_page.curr = 1
orgs = Organization.objects.filter(org_active=True).order_by('org_name')
all_orgs_page.set_pages(Paginator(orgs, 3))
my_orgs_page = ePage(1)
if request.GET.__contains__("my_orgs_page"):
try:
my_orgs_page.curr = int(request.GET['my_orgs_page'])
#my_orgs_page.curr = int(request.GET.get('my_orgs_page',1))
except:
my_orgs_page.curr = 1
my_org_groups = UserGroup.objects.filter(pk__in=current_user.get_user_groups())
my_groups = orgs.filter(group_set__in=my_org_groups)
my_orgs = current_user.get_user_orgs().order_by('org_name')
jaz_orgs = []
for org in my_orgs:
org.user_perms = org.org_perms(current_user)
jaz_orgs.append(org)
my_orgs_page.set_pages(Paginator(jaz_orgs, 3))
#raise AssertionError(jaz_orgs[0].user_perms)
context = {'message':_("Index"),
'all_orgs_page':all_orgs_page,
'my_groups':my_groups,
'my_orgs_page':my_orgs_page,
'ajax_page_my':reverse('org_orgs_list_my_ajax',kwargs={}),
'ajax_page_all':reverse('org_orgs_list_all_ajax',kwargs={}),
}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context, context_instance=RequestContext(request))
@login_required
def orgs_list_all(request, template_name=None):
message = None
if not request.is_ajax():
template_name = "core/message.html"
message = Message(title=_("Cannot Be Viewed"), text=_("Cannot view this page" ))
context = {'message':message,}
if not message:
current_user, message = get_current_user(request.user)
if not message:
all_orgs_page = ePage(1)
if request.GET.__contains__("all_orgs_page"):
try:
all_orgs_page.curr = int(request.GET['all_orgs_page'])
except:
all_orgs_page.curr = 1
orgs = Organization.objects.filter(org_active=True).order_by('org_name')
all_orgs_page.set_pages(Paginator(orgs, 3))
context = {'all_orgs_page':all_orgs_page, 'ajax_page_all':reverse('org_orgs_list_all_ajax',kwargs={}),}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context, context_instance=RequestContext(request))
@login_required
def orgs_list_my(request, template_name=None):
message = None
if not request.is_ajax():
template_name = "core/message.html"
message = Message(title=_("Cannot Be Viewed"), text=_("Cannot view this page" ))
context = {'message':message,}
if not message:
current_user, message = get_current_user(request.user)
if not message:
orgs = Organization.objects.filter(org_active=True).order_by('org_name')
my_orgs_page = ePage(1)
if request.GET.__contains__("my_orgs_page"):
try:
my_orgs_page.curr = int(request.GET['my_orgs_page'])
except:
my_orgs_page.curr = 1
my_org_groups = UserGroup.objects.filter(pk__in=current_user.get_user_groups())
my_groups = orgs.filter(group_set__in=my_org_groups)
my_orgs = current_user.get_user_orgs().order_by('org_name')
jaz_orgs = []
for org in my_orgs:
org.user_perms = org.org_perms(current_user)
jaz_orgs.append(org)
my_orgs_page.set_pages(Paginator(jaz_orgs, 3))
#raise AssertionError(my_orgs_page.current_page().object_list)
context = {'my_orgs_page':my_orgs_page,'ajax_page_my':reverse('org_orgs_list_my_ajax',kwargs={}),}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context, context_instance=RequestContext(request))
@login_required
def org_join(request, org_short_name, template_name=None):
current_user, message = get_current_user(request.user)
if not message:
current_org, message = Organization.objects.get_current_org(org_short_name, message)
if not message:
operms = current_org.org_perms(current_user)
if operms['is_memberof_org']:
template_name = "core/message.html"
message = Message(title=_("Already a Member"), text=_("You are already a member of this organization." ))
message.addlink(_("Continue"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
if not operms['can_join_org']:
template_name = "core/message.html"
message = Message(title=_("Approval Needed"), text=_("In order to join this organization, you need approval from the organization admin."))
message.addlink(_("Back"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
if request.method == 'POST':
current_user.user_organizations.add(current_org)
current_user.user_invites_set.filter(org=current_org).delete()
template_name = "core/message.html"
#message = Message(title="You have Joined the organization", text="Org Join Successful: " + org_user_group.group_name )
message = Message(title=_("You have Joined the organization"), text=_("Org Join Successful: %s" % (current_org.org_name,)) )
message.addlink(_("Continue"),current_org.get_absolute_url())
context = {'message':message,}
else:
form = OrganizationJoinForm()
context = {'form':form,'current_org':current_org}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
@login_required
def org_leave(request, org_short_name, template_name=None):
current_user, message = get_current_user(request.user)
if not message:
current_org, message = Organization.objects.get_current_org(org_short_name)
if not message:
operms = current_org.org_perms(current_user)
if operms['is_memberof_org']:
if request.method == 'POST':
current_user.user_organizations.remove(current_org)
template_name = "core/message.html"
message = Message(title=_("Left Organization"), text=_("You have left the Organization"))
message.addlink(_("Continue"),reverse('org_orgs_list',kwargs={}))
else:
template_name = "core/message.html"
message = Message(title=_("Not a Member"), text=_("You cannot leave this organization because you are not a member of the organization."))
message.addlink(_("Back"),reverse('org_orgs_list',kwargs={}))
context = {'message':message, 'current_org':current_org, }
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
@login_required
def org_view(request,org_short_name,template_name=None):
""" Displays organization detail information """
current_org, message = Organization.objects.get_current_org(org_short_name)
if not message:
members_page = ePage(1)
if request.GET.__contains__("members_page"):
try:
members_page.curr = int(request.GET['members_page'])
except:
members_page.curr = 1
members = current_org.get_members()
members_page.set_pages(Paginator(members, 48))
#raise AssertionError(members_page.prev)
org_eventtypes = current_org.get_eventtypes()
context = {'message':_("Org View"),'current_org':current_org,'org_eventtypes':org_eventtypes, 'members':members_page, 'ajax_page_members': reverse('org_org_user_list_ajax', kwargs={'org_short_name':current_org.org_short_name,})}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
@login_required
def org_members(request,org_short_name,template_name=None):
current_org, message = Organization.objects.get_current_org(org_short_name)
if not message:
current_user, message = get_current_user(request.user)
if not message:
operms = current_org.org_perms(current_user)
if not operms['is_memberof_org']:
template_name = "core/message.html"
message = Message(title=_("Can Not Edit Org"), text=_("You cannot view members of an organization that you do not belong to."))
message.addlink(_("Back"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
members_page = ePage(1)
if request.GET.__contains__("members_page"):
try:
members_page.curr = int(request.GET['members_page'])
except:
members_page.curr = 1
members = current_org.get_members()
members_page.set_pages(Paginator(members, 48))
context = {'current_org':current_org,'members':members_page,'ajax_page_members': reverse('org_org_user_list_ajax', kwargs={'org_short_name':current_org.org_short_name,})}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
@login_required
def org_edit(request,org_short_name=None,template_name=None):
""" Edits an organization """
current_org, message = Organization.objects.get_current_org(org_short_name)
if not message:
current_user, message = get_current_user(request.user)
if not message:
operms = current_org.org_perms(current_user)
if not operms['is_memberof_org']:
template_name = "core/message.html"
message = Message(title=_("Can Not Edit Org"), text=_("You cannot edit an organization that you do not belong to."))
message.addlink(_("Back"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
if not operms['can_edit_org']:
template_name = "core/message.html"
message = Message(title=_("Can Not Edit Org"), text=_("You cannot edit this organization because you do not have permission to."))
message.addlink(_("Back"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
show_dialog=False
if request.method == 'POST':
form = OrganizationFormEdit(request.POST, instance=current_org)
if form.is_valid():
form.save()
message = Message(title=_("Organization Changes Saved"), text=_("Organization Changes Saved"))
message.addlink(_("View"),current_org.get_absolute_url())
message.addlink(_("Edit"),reverse('org_org_edit',kwargs={'org_short_name':current_org.org_short_name,}))
if request.POST.get("dialog",'') == "False":
template_name = "core/message.html"
show_dialog=False
else:
show_dialog=True
context = {'org_short_name':org_short_name,'form':form,'current_org':current_org,'message':message,'show_dialog':show_dialog,}
else:
form = OrganizationFormEdit(auto_id=False,instance=current_org)
context = {'org_short_name':org_short_name,'form':form,'current_org':current_org}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name, context,context_instance=RequestContext(request))
@login_required
def org_remove(request,org_short_name=None,template_name=None):
""" Removes an organization """
current_org, message = Organization.objects.get_current_org(org_short_name)
if not message:
current_user, message = get_current_user(request.user)
if not message:
operms = current_org.org_perms(current_user)
if not operms['is_memberof_org']:
template_name = "core/message.html"
message = Message(title=_("Can Not Remove Org"), text=_("You cannot remove an organization that you do not belong to."))
message.addlink(_("Continue"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
if not operms['can_remove_org']:
template_name = "core/message.html"
message = Message(title=_("Can Not Remove Org"), text=_("You cannot remove this organization because you do not have permission to."))
message.addlink(_("Back"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
context = {'current_org':current_org}
if request.method == 'POST':
current_org.org_active = False
current_org.save()
return HttpResponseRedirect(reverse('org_orgs_list',))
else:
pass
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
@login_required
def org_add(request,template_name=None):
""" Adds an organization """
current_user, message = get_current_user(request.user)
if not message:
show_dialog=False
if request.method == 'POST':
form = OrganizationForm(request.POST)
if form.is_valid():
current_org = form.save()
current_org.save()
groups = UserGroup.objects.init_org_groups(current_org, current_user)
eventtypes = EventType.objects.init_event_types(current_org)
message = Message(title=_("Organization Added"), text=_("Organization Added"))
message.addlink(_("View"),current_org.get_absolute_url())
message.addlink(_("Edit"),reverse('org_org_edit',kwargs={'org_short_name':current_org.org_short_name,}))
if request.POST.get("dialog",'') == "False":
template_name = "core/message.html"
show_dialog=False
else:
show_dialog=True
context = {'message':message,'current_org':current_org,'form':form,'show_dialog':show_dialog,}
else:
context = { 'form':form,'show_dialog':show_dialog,}
else:
form = OrganizationForm()
context = { 'form':form }
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
@login_required
def org_member_remove(request,org_short_name=None, username=None, template_name=None):
current_user, message = get_current_user(request.user)
if not message:
current_org, message = Organization.objects.get_current_org(org_short_name)
if not message:
current_member, message = get_current_user(username)
if not message:
operms = current_org.org_perms(current_user)
if not operms['is_memberof_org']:
template_name = "core/message.html"
message = Message(title=_("Can Not Remove User"), text=_("You cannot remove a user in an organization that you do not belong to."))
message.addlink(_("Continue"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
if not operms['can_remove_users']:
template_name = "core/message.html"
message = Message(title=_("Can Not Remove Member"), text=_("You cannot remove this member because you do not have permission to."))
message.addlink(_("Back"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
if request.method == 'POST':
current_member.user_organizations.remove(current_org)
return HttpResponseRedirect(current_org.get_absolute_url())
else:
pass
context = {'current_org':current_org, 'current_member':current_member, }
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
@login_required
def org_member_invite(request,org_short_name=None, template_name=None):
current_user, message = get_current_user(request.user)
invited_users = get_user_model().objects.none()
if not message:
current_org, message = Organization.objects.get_current_org(org_short_name)
if not message:
operms = current_org.org_perms(current_user)
if not operms['is_memberof_org']:
template_name = "core/message.html"
message = Message(title=_("Can Not Invite User"), text=_("You cannot invite a user to an organization that you do not belong to."))
message.addlink(_("Continue"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
if not operms['can_invite_users']:
template_name = "core/message.html"
message = Message(title=_("Can Not Invite Member"), text=_("You cannot invite people to this organization because you do not have permission to."))
message.addlink(_("Back"),current_org.get_absolute_url())
context = {'message':message,}
if not message:
invited_users_page = ePage(1)
org_invites = current_org.invite_set.all()
invited_users = get_user_model().objects.filter(user_invites_set__in=org_invites)
if request.method == 'POST':
form = OrganizationInviteMember(request.POST)
if form.is_valid():
user_list = form.cleaned_data['invite_list'].strip().strip(',').split(',')
new_user_list = []
for user in user_list:
new_user_list.append(user.strip().strip(','))
new_invited_users = get_user_model().objects.filter(username__in=new_user_list).exclude(user_invites_set__in=org_invites)
for user in new_invited_users:
i = OrgInvite()
i.user = user
i.org = current_org
i.direction = True
i.save()
invited_users = invited_users | new_invited_users
else:
form = OrganizationInviteMember()
if request.GET.__contains__("members_page"):
try:
members_page.curr = int(request.GET['members_page'])
except:
members_page.curr = 1
invited_users_page.set_pages(Paginator(invited_users, 5))
context = {'current_org':current_org,'form':form,'invited_users':invited_users_page,'ajax_page_members':reverse('org_org_invites_list_ajax', kwargs={'org_short_name':current_org.org_short_name,})}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
def org_list_invites(request,org_short_name,template_name=None):
invited_users_page = ePage(1)
message = None
if not True: # request.is_ajax():
template_name = "core/message.html"
message = Message(title=_("Cannot Be Viewed"), text=_("Cannot view this page" ))
context = {'message':message,}
if not message:
current_user, message = get_current_user(request.user)
if not message:
current_org, message = Organization.objects.get_current_org(org_short_name)
if not message:
if request.GET.__contains__("invited_users_page"):
try:
invited_users_page.curr = int(request.GET['invited_users_page'])
except:
invited_users_page.curr = 1
org_invites = current_org.invite_set.all()
invited_users = get_user_model().objects.filter(user_invites_set__in=org_invites)
invited_users_page.set_pages(Paginator(invited_users, 5))
context = {'current_org':current_org,'invited_users':invited_users_page,'ajax_page_members':reverse('org_org_invites_list_ajax', kwargs={'org_short_name':current_org.org_short_name,})}
else:
template_name = "core/message.html"
context = {'message':message }
return render_to_response(template_name,context,context_instance=RequestContext(request))
| gpl-2.0 | -6,959,863,144,797,156,000 | 47.804721 | 236 | 0.617597 | false |
jamesp/jpy | jpy/maths/pde.py | 1 | 2463 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Numerical Methods of Partial Differential Equations.
Provides integration methods and other utility functions such as the RA & RAW
time filters for numerical integration of PDEs.
"""
import numpy as np
def RA_filter(phi, epsilon=0.1):
"""Robert-Asselin-Williams time filter.
phi: A tuple of phi at time levels (n-1), n, (n+1)
epsilon: The RA filter weighting
Takes variable phi at 3 timelevels (n-1), n, (n+1) and recouples the values
at (n) and (n+1).
φ_bar(n) = φ(n) + ϵ[ φ(n+1) - 2φ(n) + φ(n-1) ]
"""
_phi, phi, phi_ = phi
return (_phi, phi + epsilon*(_phi - 2.0 * phi + phi_), phi_)
def RAW_filter(phi, nu=0.2, alpha=0.53):
"""The RAW time filter, an improvement on RA filter.
phi: A tuple of phi at time levels (n-1), n, (n+1)
nu: Equivalent to 2*ϵ; the RA filter weighting
alpha: Scaling factor for n and (n+1) timesteps.
With α=1, RAW —> RA.
For more information, see [Williams 2009].
"""
_phi, phi, phi_ = phi
d = nu*0.5*(_phi - 2.0 * phi + phi_)
return (_phi, phi+alpha*d, phi_ + (alpha-1)*d)
if __name__ == '__main__':
import matplotlib.pyplot as plt
# simple harmonic osicallator example from [Williams 2009]
xt = lambda x,y,t,omega: -omega*y
yt = lambda x,y,t,omega: omega*x
x0, y0 = 1.0, 0.0
dt = 0.2
omega = 1.0
alpha = 0.53 # RAW filter parameter
t=0.0
# initialise with a single euler step
_x = x = x0
_y = y = y0
x = _x + dt*xt(x,y,t,omega)
y = _y + dt*yt(x,y,t,omega)
xs = [x0,x]
ys = [y0,y]
ts = [0, dt]
# integrate forward using leapfrog method
for t in np.arange(0+dt,100,dt):
x_ = _x + 2*dt*xt(x,y,t,omega)
y_ = _y + 2*dt*yt(x,y,t,omega)
(_x,x,x_) = RAW_filter((_x,x,x_), alpha=alpha)
(_y,y,y_) = RAW_filter((_y,y,y_), alpha=alpha)
# step variables forward
ts.append(t+dt)
_x,x = x,x_
_y,y = y,y_
xs.append(x)
ys.append(y)
ts = np.array(ts)
xs = np.array(xs)
ys = np.array(ys)
print np.array([ts,xs,ys])
plt.subplot(211)
plt.plot(ts,xs)
plt.plot(ts, np.cos(ts), 'grey')
plt.xlabel('x')
plt.subplot(212)
plt.plot(ts,ys)
plt.plot(ts, np.sin(ts), 'grey')
plt.ylabel('y')
plt.show()
# [Williams 2009] - Paul Williams. A Proposed Modification to the Robert–Asselin Time Filter.
| mit | -3,893,353,240,602,028,000 | 27.172414 | 93 | 0.565891 | false |
Aipakazuma/b_scouter | src/preprocess.py | 1 | 4275 | # -*- coding: utf8 -*-
import cv2
import numpy as np
import glob
import os
ASSETS_PATH = os.path.join(os.path.dirname(__file__), 'assets', 'calibrate')
calibrate_files = []
def preprocess_calibrate():
"""preprocess calibrate."""
global calibrate_files
for file in glob.glob(os.path.join(ASSETS_PATH, 'left*.jpg')):
calibrate_files.append(file)
for file in glob.glob(os.path.join(ASSETS_PATH, 'right*.jpg')):
calibrate_files.append(file)
def calibrate():
"""exec calibrate."""
# termination criteria
criteria = (cv2.TERM_CRITERIA_EPS + cv2.TERM_CRITERIA_MAX_ITER, 30, 0.001)
# prepare object points, like (0,0,0), (1,0,0), (2,0,0) ....,(6,5,0)
objp = np.zeros((6 * 7, 3), np.float32)
objp[:,:2] = np.mgrid[0:7, 0:6].T.reshape(-1,2)
# Arrays to store object points and image points from all the images.
objpoints = [] # 3d point in real world space
imgpoints = [] # 2d points in image plane.
shape = None
global calibrate_files
for fname in calibrate_files:
img = cv2.imread(fname)
gray_image = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# Find the chess board corners
ret, corners = cv2.findChessboardCorners(gray_image, (7, 6), None)
# If found, add object points, image points (after refining them)
if ret == True:
objpoints.append(objp)
corners2 = cv2.cornerSubPix(gray_image, corners, (11, 11), (-1, -1), criteria)
imgpoints.append(corners2)
if shape is None:
shape = gray_image.shape[::-1]
# キャリブレーション
ret, mtx, dist, rvecs, tvecs = cv2.calibrateCamera(objpoints, imgpoints, shape, None, None)
return mtx, dist
def distortion_correction(original_image, gray_image):
"""distortion correction."""
mtx, dist = calibrate()
# 歪み補正
h, w = gray_image.shape[:2]
newcameramtx, roi = cv2.getOptimalNewCameraMatrix(mtx, dist, (w, h), 1, (w, h))
# 歪補正
dist2 = cv2.undistort(gray_image, mtx, dist, None, newcameramtx)
# 画像の切り落とし
x, y, w, h = roi
return dist2[y:y+h, x:x+w]
def line_processing(gray_image, output_threshold_min=200):
"""dilate and substract."""
gaussian_blur_image = cv2.GaussianBlur(gray_image.copy(), (7, 7), 1)
_, threshold = cv2.threshold(gaussian_blur_image.copy(), 125, 255, cv2.THRESH_BINARY)
kernel = np.ones((5, 5), np.uint8)
dilation = cv2.dilate(gaussian_blur_image.copy(), kernel, iterations=1)
diff = cv2.subtract(dilation, gaussian_blur_image.copy())
inverted_white = 255 - diff
_, line_threshold = cv2.threshold(inverted_white, output_threshold_min, 255, cv2.THRESH_BINARY)
return line_threshold
def rect_processing(original_image, line_threshold):
"""rect processing."""
find_contours_image, contours, hierarchy = cv2.findContours(line_threshold.copy(), cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
draw_image = cv2.drawContours(line_threshold.copy(), contours, -1, (255, 255, 255), 3)
th_area = line_threshold.shape[0] * line_threshold.shape[1] / 100
contours_large = list(filter(lambda c:cv2.contourArea(c) > th_area, contours))
outputs = []
rects = []
approxes = []
for (i,cnt) in enumerate(contours_large):
# 面積の計算
arclen = cv2.arcLength(cnt, True)
# 周囲長を計算(要は多角形の辺の総和)
approx = cv2.approxPolyDP(cnt, 0.02 * arclen, True)
# 小さいやつは除外
if len(approx) < 4:
continue
x, y, w, h = cv2.boundingRect(cnt)
if is_video_frame_size(x, y, w, h):
approxes.append(approx)
rects.append([x, y, w, h])
rect = cv2.rectangle(original_image.copy(), (x, y), (x+w, y+h), (255, 255, 255), 2)
outputs.append(rect)
return rects, outputs, approxes
def is_video_frame_size(x, y, w, h, threshold=200):
"""check video frame size.
DVD 68:95
"""
width = w - x
height = h - y
# 68:95 = width:height -> height = (95 * width) / 68
_height = (95 * width) / 68
loss = height - _height
if threshold > abs(loss):
return True
return False
preprocess_calibrate()
| mit | -8,244,897,975,416,989,000 | 29.40146 | 125 | 0.617527 | false |
gmt/overlay-upstream-tracking | OUT/__init__.py | 1 | 5112 | from __future__ import print_function
from FixPython import is_string
__all__ = [
"Ephemeral",
"FixPython",
"OOParsing",
"OUTDate",
"OUTDbgPrint",
"OUTDebug",
"OUTDebugModes",
"OUTUpdated",
"OUTVerboseModes",
"OUTVersion",
"Repo",
"Rules",
"RulesParser"
]
__version__ = 0.1
__date__ = '2014-07-18'
__updated__ = '2014-07-18'
OUTVersion = __version__
OUTDate = __date__
OUTUpdated = __updated__
OUTDebugModes = frozenset([
'misc', # General debug output not fitting any other category
'cmd', # Command-line processing
])
OUTVerboseModes = frozenset([
'misc'
])
class _OUTDebug(object):
def __init__(self, mode=set()):
self.mode = mode
@property
def mode(self):
return self._mode
@mode.setter
def mode(self, value):
if isinstance(value, _OUTDebug):
self._mode = value.mode.copy()
elif is_string(value):
self._mode = set(value.split(' '))
else:
self._mode = set(value)
self.validate()
def enable(self, mode):
if is_string(mode):
mode = mode.split(' ')
mode = set(mode)
self._mode |= mode
self.validate()
def disable(self, mode):
if is_string(mode):
mode = mode.split(' ')
mode = set(mode)
self._mode -= mode
# no need to validate as always valid
def validate(self):
if not OUTDebugModes >= self._mode:
raise Exception('Invalid debug mode(s) set: %s' % ', '.join(self._mode - OUTDebugModes))
def __getitem__(self, attrs):
return self._mode >= attrs
def __setitem__(self, attrs, value):
if value:
self.enable(attrs)
else:
self.disable(attrs)
def __contains__(self, attrs):
return self[attrs]
def __iadd__(self, value):
if isinstance(value, _OUTDebug):
# no validation needed
self._mode |= value.mode
elif is_string(value):
self.mode |= set(value.split(' '))
else:
self.mode |= set(value)
return self
def __add__(self, value):
if isinstance(value, _OUTDebug):
return _OUTDebug(self._mode | value.mode)
elif is_string(value):
return _OUTDebug(self._mode | set(value.split(' ')))
else:
return _OUTDebug(self._mode | set(value))
__or__ = __add__
__ior__ = __iadd__
def __iand__(self, value):
if isinstance(value, _OUTDebug):
self._mode &= value.mode
elif is_string(value):
self.mode = self.mode & set(value.split(' '))
else:
self.mode = self.mode & set(value)
return self
def __and__(self, value):
if isinstance(value, _OUTDebug):
return _OUTDebug(self._mode & value.mode)
elif is_string(value):
return _OUTDebug(self._mode & set(value.split(' ')))
else:
return _OUTDebug(self._mode & set(value))
def __isub__(self, value):
if isinstance(value, _OUTDebug):
self._mode -= value.mode
else:
self.disable(value)
return self
def __sub__(self, value):
if isinstance(value, _OUTDebug):
return _OUTDebug(self._mode - value.mode)
elif is_string(value):
return self._mode - set(value.split(' '))
else:
return self._mode - set(value)
def __invert__(self):
return _OUTDebug(OUTDebugModes - self._mode)
__not__ = __invert__
def __le__(self, value):
if isinstance(value, _OUTDebug):
return self._mode <= value.mode
elif is_string(value):
return self._mode <= set(value.split(' '))
else:
return self._mode <= set(value)
def __ge__(self, value):
if isinstance(value, _OUTDebug):
return self.mode >= value.mode
elif is_string(value):
return self.mode >= set(value.split(' '))
else:
return self.mode >= set(value)
def __eq__(self, value):
if isinstance(value, _OUTDebug):
return self._mode == value._mode
elif is_string(value):
return self.mode == set(value.split(' '))
else:
return self.mode == set(value)
def __ne__(self, value):
return (not (self == value))
def __lt__(self, value):
return self <= value and self != value
def __gt__(self, value):
return self >= value and self != value
def __len__(self):
return len(self._mode)
issubset = __lt__
issuperset = __gt__
union = __or__
intersection = __and__
difference = __sub__
def __copy__(self):
return _OUTDebug(self._mode.copy())
def __repr__(self):
return '_OUTDebug("%s")' % ' '.join(self.mode)
__str__ = __repr__
OUTDebug = _OUTDebug()
OUTProfile = False
def OUTDbgPrint(mode, *args, **kwargs):
if len(OUTDebug & mode) > 0:
print('debug: ', end='')
print(*args, **kwargs)
| gpl-2.0 | 1,658,684,616,275,879,200 | 27.4 | 100 | 0.52856 | false |
justinvforvendetta/electrum-rby | gui/qt/main_window.py | 1 | 110670 | #!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@gitorious
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import sys, time, re, threading
from electrum_rby.i18n import _, set_language
from electrum_rby.util import block_explorer, block_explorer_info, block_explorer_URL
from electrum_rby.util import print_error, print_msg
import os.path, json, ast, traceback
import shutil
import StringIO
import PyQt4
from PyQt4.QtGui import *
from PyQt4.QtCore import *
import PyQt4.QtCore as QtCore
from electrum_rby.bitcoin import MIN_RELAY_TX_FEE, is_valid
from electrum_rby.plugins import run_hook
import icons_rc
from electrum_rby.util import format_satoshis, format_time, NotEnoughFunds, StoreDict
from electrum_rby import Transaction
from electrum_rby import mnemonic
from electrum_rby import util, bitcoin, commands, Wallet
from electrum_rby import SimpleConfig, Wallet, WalletStorage
from electrum_rby import Imported_Wallet
from amountedit import AmountEdit, BTCAmountEdit, MyLineEdit
from network_dialog import NetworkDialog
from qrcodewidget import QRCodeWidget, QRDialog
from qrtextedit import ScanQRTextEdit, ShowQRTextEdit
from decimal import Decimal
import httplib
import socket
import webbrowser
import csv
from electrum_rby import ELECTRUM_VERSION
import re
from util import *
class StatusBarButton(QPushButton):
def __init__(self, icon, tooltip, func):
QPushButton.__init__(self, icon, '')
self.setToolTip(tooltip)
self.setFlat(True)
self.setMaximumWidth(25)
self.clicked.connect(func)
self.func = func
self.setIconSize(QSize(25,25))
def keyPressEvent(self, e):
if e.key() == QtCore.Qt.Key_Return:
apply(self.func,())
from electrum_rby.paymentrequest import PR_UNPAID, PR_PAID, PR_EXPIRED
from electrum_rby.paymentrequest import PaymentRequest, InvoiceStore, get_payment_request, make_payment_request
pr_icons = {
PR_UNPAID:":icons/unpaid.png",
PR_PAID:":icons/confirmed.png",
PR_EXPIRED:":icons/expired.png"
}
pr_tooltips = {
PR_UNPAID:_('Pending'),
PR_PAID:_('Paid'),
PR_EXPIRED:_('Expired')
}
expiration_values = [
(_('1 hour'), 60*60),
(_('1 day'), 24*64*64),
(_('1 week'), 7*24*60*60),
(_('Never'), None)
]
class ElectrumWindow(QMainWindow):
labelsChanged = pyqtSignal()
def __init__(self, config, network, gui_object):
QMainWindow.__init__(self)
self.config = config
self.network = network
self.gui_object = gui_object
self.tray = gui_object.tray
self.go_lite = gui_object.go_lite
self.lite = None
self.app = gui_object.app
self.invoices = InvoiceStore(self.config)
self.contacts = StoreDict(self.config, 'contacts')
self.create_status_bar()
self.need_update = threading.Event()
self.decimal_point = config.get('decimal_point', 8)
self.num_zeros = int(config.get('num_zeros',0))
self.completions = QStringListModel()
self.tabs = tabs = QTabWidget(self)
tabs.addTab(self.create_history_tab(), _('History') )
tabs.addTab(self.create_send_tab(), _('Send') )
tabs.addTab(self.create_receive_tab(), _('Receive') )
tabs.addTab(self.create_addresses_tab(), _('Addresses') )
tabs.addTab(self.create_contacts_tab(), _('Contacts') )
tabs.addTab(self.create_console_tab(), _('Console') )
tabs.setMinimumSize(600, 400)
tabs.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.setCentralWidget(tabs)
try:
self.setGeometry(*self.config.get("winpos-qt"))
except:
self.setGeometry(100, 100, 840, 400)
if self.config.get("is_maximized"):
self.showMaximized()
self.setWindowIcon(QIcon(":icons/electrum-rby.png"))
self.init_menubar()
QShortcut(QKeySequence("Ctrl+W"), self, self.close)
QShortcut(QKeySequence("Ctrl+Q"), self, self.close)
QShortcut(QKeySequence("Ctrl+R"), self, self.update_wallet)
QShortcut(QKeySequence("Ctrl+PgUp"), self, lambda: tabs.setCurrentIndex( (tabs.currentIndex() - 1 )%tabs.count() ))
QShortcut(QKeySequence("Ctrl+PgDown"), self, lambda: tabs.setCurrentIndex( (tabs.currentIndex() + 1 )%tabs.count() ))
for i in range(tabs.count()):
QShortcut(QKeySequence("Alt+" + str(i + 1)), self, lambda i=i: tabs.setCurrentIndex(i))
self.connect(self, QtCore.SIGNAL('stop'), self.close)
self.connect(self, QtCore.SIGNAL('update_status'), self.update_status)
self.connect(self, QtCore.SIGNAL('banner_signal'), lambda: self.console.showMessage(self.network.banner) )
self.connect(self, QtCore.SIGNAL('transaction_signal'), lambda: self.notify_transactions() )
self.connect(self, QtCore.SIGNAL('payment_request_ok'), self.payment_request_ok)
self.connect(self, QtCore.SIGNAL('payment_request_error'), self.payment_request_error)
self.labelsChanged.connect(self.update_tabs)
self.history_list.setFocus(True)
# network callbacks
if self.network:
self.network.register_callback('updated', lambda: self.need_update.set())
self.network.register_callback('banner', lambda: self.emit(QtCore.SIGNAL('banner_signal')))
self.network.register_callback('status', lambda: self.emit(QtCore.SIGNAL('update_status')))
self.network.register_callback('new_transaction', lambda: self.emit(QtCore.SIGNAL('transaction_signal')))
self.network.register_callback('stop', lambda: self.emit(QtCore.SIGNAL('stop')))
# set initial message
self.console.showMessage(self.network.banner)
self.wallet = None
self.payment_request = None
self.qr_window = None
self.not_enough_funds = False
self.pluginsdialog = None
def update_account_selector(self):
# account selector
accounts = self.wallet.get_account_names()
self.account_selector.clear()
if len(accounts) > 1:
self.account_selector.addItems([_("All accounts")] + accounts.values())
self.account_selector.setCurrentIndex(0)
self.account_selector.show()
else:
self.account_selector.hide()
def close_wallet(self):
self.wallet.stop_threads()
run_hook('close_wallet')
def load_wallet(self, wallet):
import electrum_rby as electrum
self.wallet = wallet
# backward compatibility
self.update_wallet_format()
self.import_old_contacts()
# address used to create a dummy transaction and estimate transaction fee
a = self.wallet.addresses(False)
self.dummy_address = a[0] if a else None
self.accounts_expanded = self.wallet.storage.get('accounts_expanded',{})
self.current_account = self.wallet.storage.get("current_account", None)
title = 'Electrum-RBY %s - %s' % (self.wallet.electrum_version, self.wallet.basename())
if self.wallet.is_watching_only():
title += ' [%s]' % (_('watching only'))
self.setWindowTitle( title )
self.update_history_tab()
self.need_update.set()
# Once GUI has been initialized check if we want to announce something since the callback has been called before the GUI was initialized
self.notify_transactions()
self.update_account_selector()
# update menus
self.new_account_menu.setVisible(self.wallet.can_create_accounts())
self.private_keys_menu.setEnabled(not self.wallet.is_watching_only())
self.password_menu.setEnabled(self.wallet.can_change_password())
self.seed_menu.setEnabled(self.wallet.has_seed())
self.mpk_menu.setEnabled(self.wallet.is_deterministic())
self.import_menu.setVisible(self.wallet.can_import())
self.export_menu.setEnabled(self.wallet.can_export())
self.update_lock_icon()
self.update_buttons_on_seed()
self.update_console()
self.clear_receive_tab()
self.update_receive_tab()
self.show()
run_hook('load_wallet', wallet)
def import_old_contacts(self):
# backward compatibility: import contacts
addressbook = set(self.wallet.storage.get('contacts', []))
for k in addressbook:
l = self.wallet.labels.get(k)
if bitcoin.is_address(k) and l:
self.contacts[l] = ('address', k)
self.wallet.storage.put('contacts', None)
def update_wallet_format(self):
# convert old-format imported keys
if self.wallet.imported_keys:
password = self.password_dialog(_("Please enter your password in order to update imported keys")) if self.wallet.use_encryption else None
try:
self.wallet.convert_imported_keys(password)
except Exception as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
# call synchronize to regenerate addresses in case we are offline
if self.wallet.get_master_public_keys() and self.wallet.addresses() == []:
self.wallet.synchronize()
def open_wallet(self):
wallet_folder = self.wallet.storage.path
filename = unicode( QFileDialog.getOpenFileName(self, "Select your wallet file", wallet_folder) )
if not filename:
return
try:
storage = WalletStorage(filename)
except Exception as e:
self.show_message(str(e))
return
if not storage.file_exists:
self.show_message(_("File not found") + ' ' + filename)
return
# read wizard action
try:
wallet = Wallet(storage)
except BaseException as e:
QMessageBox.warning(None, _('Warning'), str(e), _('OK'))
return
action = wallet.get_action()
self.hide()
# run wizard
if action is not None:
wallet = self.gui_object.run_wizard(storage, action)
else:
wallet.start_threads(self.network)
# keep current wallet
if not wallet:
self.show()
return
# close current wallet
self.close_wallet()
# load new wallet in gui
self.load_wallet(wallet)
# save path
if self.config.get('wallet_path') is None:
self.config.set_key('gui_last_wallet', filename)
def backup_wallet(self):
import shutil
path = self.wallet.storage.path
wallet_folder = os.path.dirname(path)
filename = unicode( QFileDialog.getSaveFileName(self, _('Enter a filename for the copy of your wallet'), wallet_folder) )
if not filename:
return
new_path = os.path.join(wallet_folder, filename)
if new_path != path:
try:
shutil.copy2(path, new_path)
QMessageBox.information(None,"Wallet backup created", _("A copy of your wallet file was created in")+" '%s'" % str(new_path))
except (IOError, os.error), reason:
QMessageBox.critical(None,"Unable to create backup", _("Electrum was unable to copy your wallet file to the specified location.")+"\n" + str(reason))
def new_wallet(self):
import installwizard
wallet_folder = os.path.dirname(os.path.abspath(self.wallet.storage.path))
i = 1
while True:
filename = "wallet_%d"%i
if filename in os.listdir(wallet_folder):
i += 1
else:
break
filename = line_dialog(self, _('New Wallet'), _('Enter file name') + ':', _('OK'), filename)
if not filename:
return
full_path = os.path.join(wallet_folder, filename)
storage = WalletStorage(full_path)
if storage.file_exists:
QMessageBox.critical(None, "Error", _("File exists"))
return
self.hide()
wizard = installwizard.InstallWizard(self.config, self.network, storage)
action, wallet_type = wizard.restore_or_create()
if not action:
self.show()
return
# close current wallet, but keep a reference to it
self.close_wallet()
wallet = wizard.run(action, wallet_type)
if wallet:
self.load_wallet(wallet)
else:
self.wallet.start_threads(self.network)
self.load_wallet(self.wallet)
self.show()
def init_menubar(self):
menubar = QMenuBar()
file_menu = menubar.addMenu(_("&File"))
file_menu.addAction(_("&Open"), self.open_wallet).setShortcut(QKeySequence.Open)
file_menu.addAction(_("&New/Restore"), self.new_wallet).setShortcut(QKeySequence.New)
file_menu.addAction(_("&Save Copy"), self.backup_wallet).setShortcut(QKeySequence.SaveAs)
file_menu.addAction(_("&Quit"), self.close)
wallet_menu = menubar.addMenu(_("&Wallet"))
wallet_menu.addAction(_("&New contact"), self.new_contact_dialog)
self.new_account_menu = wallet_menu.addAction(_("&New account"), self.new_account_dialog)
wallet_menu.addSeparator()
self.password_menu = wallet_menu.addAction(_("&Password"), self.change_password_dialog)
self.seed_menu = wallet_menu.addAction(_("&Seed"), self.show_seed_dialog)
self.mpk_menu = wallet_menu.addAction(_("&Master Public Keys"), self.show_master_public_keys)
wallet_menu.addSeparator()
labels_menu = wallet_menu.addMenu(_("&Labels"))
labels_menu.addAction(_("&Import"), self.do_import_labels)
labels_menu.addAction(_("&Export"), self.do_export_labels)
self.private_keys_menu = wallet_menu.addMenu(_("&Private keys"))
self.private_keys_menu.addAction(_("&Sweep"), self.sweep_key_dialog)
self.import_menu = self.private_keys_menu.addAction(_("&Import"), self.do_import_privkey)
self.export_menu = self.private_keys_menu.addAction(_("&Export"), self.export_privkeys_dialog)
wallet_menu.addAction(_("&Export History"), self.export_history_dialog)
wallet_menu.addAction(_("Search"), self.toggle_search).setShortcut(QKeySequence("Ctrl+S"))
tools_menu = menubar.addMenu(_("&Tools"))
# Settings / Preferences are all reserved keywords in OSX using this as work around
tools_menu.addAction(_("Electrum preferences") if sys.platform == 'darwin' else _("Preferences"), self.settings_dialog)
tools_menu.addAction(_("&Network"), self.run_network_dialog)
tools_menu.addAction(_("&Plugins"), self.plugins_dialog)
tools_menu.addSeparator()
tools_menu.addAction(_("&Sign/verify message"), self.sign_verify_message)
tools_menu.addAction(_("&Encrypt/decrypt message"), self.encrypt_message)
tools_menu.addSeparator()
paytomany_menu = tools_menu.addAction(_("&Pay to many"), self.paytomany)
raw_transaction_menu = tools_menu.addMenu(_("&Load transaction"))
raw_transaction_menu.addAction(_("&From file"), self.do_process_from_file)
raw_transaction_menu.addAction(_("&From text"), self.do_process_from_text)
raw_transaction_menu.addAction(_("&From the blockchain"), self.do_process_from_txid)
raw_transaction_menu.addAction(_("&From QR code"), self.read_tx_from_qrcode)
self.raw_transaction_menu = raw_transaction_menu
help_menu = menubar.addMenu(_("&Help"))
help_menu.addAction(_("&About"), self.show_about)
help_menu.addAction(_("&Official website"), lambda: webbrowser.open("http://electrum-rby.space"))
help_menu.addSeparator()
help_menu.addAction(_("&Documentation"), lambda: webbrowser.open("http://electrum-rby.space")).setShortcut(QKeySequence.HelpContents)
help_menu.addAction(_("&Report Bug"), self.show_report_bug)
self.setMenuBar(menubar)
def show_about(self):
QMessageBox.about(self, "Electrum-RBY",
_("Version")+" %s" % (self.wallet.electrum_version) + "\n\n" + _("Electrum's focus is speed, with low resource usage and simplifying RubyCoin. You do not need to perform regular backups, because your wallet can be recovered from a secret phrase that you can memorize or write on paper. Startup times are instant because it operates in conjunction with high-performance servers that handle the most complicated parts of the RubyCoin system."))
def show_report_bug(self):
QMessageBox.information(self, "Electrum-RBY - " + _("Reporting Bugs"),
_("Please report any bugs as issues on github:")+" <a href=\"https://github.com/rby/electrum-rby/issues\">https://github.com/rby/electrum-rby/issues</a>")
def notify_transactions(self):
if not self.network or not self.network.is_connected():
return
print_error("Notifying GUI")
if len(self.network.pending_transactions_for_notifications) > 0:
# Combine the transactions if there are more then three
tx_amount = len(self.network.pending_transactions_for_notifications)
if(tx_amount >= 3):
total_amount = 0
for tx in self.network.pending_transactions_for_notifications:
is_relevant, is_mine, v, fee = self.wallet.get_tx_value(tx)
if(v > 0):
total_amount += v
self.notify(_("%(txs)s new transactions received. Total amount received in the new transactions %(amount)s %(unit)s") \
% { 'txs' : tx_amount, 'amount' : self.format_amount(total_amount), 'unit' : self.base_unit()})
self.network.pending_transactions_for_notifications = []
else:
for tx in self.network.pending_transactions_for_notifications:
if tx:
self.network.pending_transactions_for_notifications.remove(tx)
is_relevant, is_mine, v, fee = self.wallet.get_tx_value(tx)
if(v > 0):
self.notify(_("New transaction received. %(amount)s %(unit)s") % { 'amount' : self.format_amount(v), 'unit' : self.base_unit()})
def notify(self, message):
if self.tray:
self.tray.showMessage("Electrum-RBY", message, QSystemTrayIcon.Information, 20000)
# custom wrappers for getOpenFileName and getSaveFileName, that remember the path selected by the user
def getOpenFileName(self, title, filter = ""):
directory = self.config.get('io_dir', unicode(os.path.expanduser('~')))
fileName = unicode( QFileDialog.getOpenFileName(self, title, directory, filter) )
if fileName and directory != os.path.dirname(fileName):
self.config.set_key('io_dir', os.path.dirname(fileName), True)
return fileName
def getSaveFileName(self, title, filename, filter = ""):
directory = self.config.get('io_dir', unicode(os.path.expanduser('~')))
path = os.path.join( directory, filename )
fileName = unicode( QFileDialog.getSaveFileName(self, title, path, filter) )
if fileName and directory != os.path.dirname(fileName):
self.config.set_key('io_dir', os.path.dirname(fileName), True)
return fileName
def close(self):
if self.qr_window:
self.qr_window.close()
QMainWindow.close(self)
run_hook('close_main_window')
def connect_slots(self, sender):
self.connect(sender, QtCore.SIGNAL('timersignal'), self.timer_actions)
self.previous_payto_e=''
def timer_actions(self):
if self.need_update.is_set():
self.update_wallet()
self.need_update.clear()
run_hook('timer_actions')
def format_amount(self, x, is_diff=False, whitespaces=False):
return format_satoshis(x, is_diff, self.num_zeros, self.decimal_point, whitespaces)
def get_decimal_point(self):
return self.decimal_point
def base_unit(self):
assert self.decimal_point in [2, 5, 8]
if self.decimal_point == 2:
return 'bits'
if self.decimal_point == 5:
return 'mRBY'
if self.decimal_point == 8:
return 'RBY'
raise Exception('Unknown base unit')
def update_status(self):
if not self.wallet:
return
if self.network is None or not self.network.is_running():
text = _("Offline")
icon = QIcon(":icons/status_disconnected.png")
elif self.network.is_connected():
server_lag = self.network.get_local_height() - self.network.get_server_height()
if not self.wallet.up_to_date:
text = _("Synchronizing...")
icon = QIcon(":icons/status_waiting.png")
elif server_lag > 1:
text = _("Server is lagging (%d blocks)"%server_lag)
icon = QIcon(":icons/status_lagging.png")
else:
c, u, x = self.wallet.get_account_balance(self.current_account)
text = _("Balance" ) + ": %s "%(self.format_amount(c)) + self.base_unit()
if u:
text += " [%s unconfirmed]"%(self.format_amount(u, True).strip())
if x:
text += " [%s unmatured]"%(self.format_amount(x, True).strip())
# append fiat balance and price from exchange rate plugin
r = {}
run_hook('get_fiat_status_text', c+u, r)
quote = r.get(0)
if quote:
text += "%s"%quote
if self.tray:
self.tray.setToolTip("%s (%s)" % (text, self.wallet.basename()))
icon = QIcon(":icons/status_connected.png")
else:
text = _("Not connected")
icon = QIcon(":icons/status_disconnected.png")
self.balance_label.setText(text)
self.status_button.setIcon( icon )
def update_wallet(self):
self.update_status()
if self.wallet.up_to_date or not self.network or not self.network.is_connected():
self.update_tabs()
def update_tabs(self):
self.update_history_tab()
self.update_receive_tab()
self.update_address_tab()
self.update_contacts_tab()
self.update_completions()
self.update_invoices_list()
def create_history_tab(self):
from history_widget import HistoryWidget
self.history_list = l = HistoryWidget(self)
return l
def show_address(self, addr):
import address_dialog
d = address_dialog.AddressDialog(addr, self)
d.exec_()
def show_transaction(self, tx):
import transaction_dialog
d = transaction_dialog.TxDialog(tx, self)
d.exec_()
def update_history_tab(self):
domain = self.wallet.get_account_addresses(self.current_account)
h = self.wallet.get_history(domain)
self.history_list.update(h)
def create_receive_tab(self):
self.receive_grid = grid = QGridLayout()
grid.setColumnMinimumWidth(3, 300)
self.receive_address_e = ButtonsLineEdit()
self.receive_address_e.addCopyButton(self.app)
self.receive_address_e.setReadOnly(True)
self.receive_address_label = QLabel(_('Receiving address'))
self.receive_address_e.textChanged.connect(self.update_receive_qr)
self.receive_address_e.setFocusPolicy(Qt.NoFocus)
grid.addWidget(self.receive_address_label, 0, 0)
grid.addWidget(self.receive_address_e, 0, 1, 1, 4)
self.receive_message_e = QLineEdit()
grid.addWidget(QLabel(_('Description')), 1, 0)
grid.addWidget(self.receive_message_e, 1, 1, 1, 4)
self.receive_message_e.textChanged.connect(self.update_receive_qr)
self.receive_amount_e = BTCAmountEdit(self.get_decimal_point)
grid.addWidget(QLabel(_('Requested amount')), 2, 0)
grid.addWidget(self.receive_amount_e, 2, 1, 1, 2)
self.receive_amount_e.textChanged.connect(self.update_receive_qr)
self.expires_combo = QComboBox()
self.expires_combo.addItems(map(lambda x:x[0], expiration_values))
self.expires_combo.setCurrentIndex(1)
grid.addWidget(QLabel(_('Expires in')), 3, 0)
grid.addWidget(self.expires_combo, 3, 1)
self.expires_label = QLineEdit('')
self.expires_label.setReadOnly(1)
self.expires_label.setFocusPolicy(Qt.NoFocus)
self.expires_label.hide()
grid.addWidget(self.expires_label, 3, 1, 1, 2)
self.save_request_button = QPushButton(_('Save'))
self.save_request_button.clicked.connect(self.save_payment_request)
self.new_request_button = QPushButton(_('New'))
self.new_request_button.clicked.connect(self.new_payment_request)
self.receive_qr = QRCodeWidget(fixedSize=200)
self.receive_qr.mouseReleaseEvent = lambda x: self.toggle_qr_window()
self.receive_qr.enterEvent = lambda x: self.app.setOverrideCursor(QCursor(Qt.PointingHandCursor))
self.receive_qr.leaveEvent = lambda x: self.app.setOverrideCursor(QCursor(Qt.ArrowCursor))
self.receive_buttons = buttons = QHBoxLayout()
buttons.addStretch(1)
buttons.addWidget(self.save_request_button)
buttons.addWidget(self.new_request_button)
self.receive_requests_label = QLabel(_('My Requests'))
self.receive_list = MyTreeWidget(self, self.receive_list_menu, [_('Date'), _('Account'), _('Address'), _('Description'), _('Amount'), _('Status')], 3)
self.receive_list.currentItemChanged.connect(self.receive_item_changed)
self.receive_list.itemClicked.connect(self.receive_item_changed)
self.receive_list.setSortingEnabled(True)
self.receive_list.setColumnWidth(0, 180)
self.receive_list.hideColumn(1) # the update will show it if necessary
self.receive_list.hideColumn(2) # don't show address
self.receive_list.setColumnWidth(2, 340)
h = self.receive_list.header()
h.setStretchLastSection(False)
h.setResizeMode(3, QHeaderView.Stretch)
# layout
vbox_g = QVBoxLayout()
vbox_g.addLayout(grid)
vbox_g.addLayout(buttons)
hbox = QHBoxLayout()
hbox.addLayout(vbox_g)
hbox.addStretch()
hbox.addWidget(self.receive_qr)
w = QWidget()
vbox = QVBoxLayout(w)
vbox.addLayout(hbox)
vbox.addStretch(1)
vbox.addWidget(self.receive_requests_label)
vbox.addWidget(self.receive_list)
return w
def receive_item_changed(self, item):
if item is None:
return
if not self.receive_list.isItemSelected(item):
return
addr = str(item.text(2))
req = self.receive_requests[addr]
expires = _('Never') if req.get('expiration') is None else format_time(req['time'] + req['expiration'])
amount = req['amount']
message = self.wallet.labels.get(addr, '')
self.receive_address_e.setText(addr)
self.receive_message_e.setText(message)
self.receive_amount_e.setAmount(amount)
self.expires_combo.hide()
self.expires_label.show()
self.expires_label.setText(expires)
self.new_request_button.setEnabled(True)
def delete_payment_request(self, item):
addr = str(item.text(2))
self.receive_requests.pop(addr)
self.wallet.storage.put('receive_requests2', self.receive_requests)
self.update_receive_tab()
self.clear_receive_tab()
def get_receive_URI(self):
addr = str(self.receive_address_e.text())
amount = self.receive_amount_e.get_amount()
message = unicode(self.receive_message_e.text())
URI = util.create_URI(addr, amount, message)
return URI
def receive_list_menu(self, position):
item = self.receive_list.itemAt(position)
addr = str(item.text(2))
req = self.receive_requests[addr]
time, amount = req['time'], req['amount']
message = self.wallet.labels.get(addr, '')
URI = util.create_URI(addr, amount, message)
menu = QMenu()
menu.addAction(_("Copy Address"), lambda: self.app.clipboard().setText(addr))
menu.addAction(_("Copy URI"), lambda: self.app.clipboard().setText(str(URI)))
menu.addAction(_("Save as BIP70 file"), lambda: self.export_payment_request(addr))
menu.addAction(_("Delete"), lambda: self.delete_payment_request(item))
menu.exec_(self.receive_list.viewport().mapToGlobal(position))
def save_payment_request(self):
addr = str(self.receive_address_e.text())
amount = self.receive_amount_e.get_amount()
message = unicode(self.receive_message_e.text())
if not message and not amount:
QMessageBox.warning(self, _('Error'), _('No message or amount'), _('OK'))
return
self.receive_requests = self.wallet.storage.get('receive_requests2', {})
if addr in self.receive_requests:
self.receive_requests[addr]['amount'] = amount
else:
now = int(time.time())
i = self.expires_combo.currentIndex()
expiration = map(lambda x: x[1], expiration_values)[i]
self.receive_requests[addr] = {'time':now, 'amount':amount, 'expiration':expiration}
self.wallet.storage.put('receive_requests2', self.receive_requests)
self.wallet.set_label(addr, message)
self.update_receive_tab()
self.update_address_tab()
self.save_request_button.setEnabled(False)
def make_payment_request(self, addr):
req = self.receive_requests[addr]
time = req['time']
amount = req['amount']
expiration = req['expiration']
message = self.wallet.labels.get(addr, '')
script = Transaction.pay_script('address', addr).decode('hex')
outputs = [(script, amount)]
key_path = self.config.get('ssl_key_path')
cert_path = self.config.get('ssl_cert_path')
return make_payment_request(outputs, message, time, time + expiration, key_path, cert_path)
def export_payment_request(self, addr):
pr = self.make_payment_request(addr)
name = 'request.bip70'
fileName = self.getSaveFileName(_("Select where to save your payment request"), name, "*.bip70")
if fileName:
with open(fileName, "wb+") as f:
f.write(str(pr))
self.show_message(_("Request saved successfully"))
self.saved = True
def get_receive_address(self):
domain = self.wallet.get_account_addresses(self.current_account, include_change=False)
for addr in domain:
if not self.wallet.history.get(addr) and addr not in self.receive_requests.keys():
return addr
def new_payment_request(self):
addr = self.get_receive_address()
if addr is None:
if isinstance(self.wallet, Imported_Wallet):
self.show_message(_('No more addresses in your wallet.'))
return
if not self.question(_("Warning: The next address will not be recovered automatically if you restore your wallet from seed; you may need to add it manually.\n\nThis occurs because you have too many unused addresses in your wallet. To avoid this situation, use the existing addresses first.\n\nCreate anyway?")):
return
addr = self.wallet.create_new_address(self.current_account, False)
self.set_receive_address(addr)
self.expires_label.hide()
self.expires_combo.show()
self.new_request_button.setEnabled(False)
self.receive_message_e.setFocus(1)
def set_receive_address(self, addr):
self.receive_address_e.setText(addr)
self.receive_message_e.setText('')
self.receive_amount_e.setAmount(None)
def clear_receive_tab(self):
self.receive_requests = self.wallet.storage.get('receive_requests2',{})
domain = self.wallet.get_account_addresses(self.current_account, include_change=False)
for addr in domain:
if not self.wallet.history.get(addr) and addr not in self.receive_requests.keys():
break
else:
addr = ''
self.receive_address_e.setText(addr)
self.receive_message_e.setText('')
self.receive_amount_e.setAmount(None)
self.expires_label.hide()
self.expires_combo.show()
def toggle_qr_window(self):
import qrwindow
if not self.qr_window:
self.qr_window = qrwindow.QR_Window(self)
self.qr_window.setVisible(True)
self.qr_window_geometry = self.qr_window.geometry()
else:
if not self.qr_window.isVisible():
self.qr_window.setVisible(True)
self.qr_window.setGeometry(self.qr_window_geometry)
else:
self.qr_window_geometry = self.qr_window.geometry()
self.qr_window.setVisible(False)
self.update_receive_qr()
def receive_at(self, addr):
if not bitcoin.is_address(addr):
return
self.tabs.setCurrentIndex(2)
self.receive_address_e.setText(addr)
self.new_request_button.setEnabled(True)
def update_receive_tab(self):
self.receive_requests = self.wallet.storage.get('receive_requests2',{})
# hide receive tab if no receive requests available
b = len(self.receive_requests) > 0
self.receive_list.setVisible(b)
self.receive_requests_label.setVisible(b)
if not b:
self.expires_label.hide()
self.expires_combo.show()
# check if it is necessary to show the account
self.receive_list.setColumnHidden(1, len(self.wallet.get_accounts()) == 1)
# update the receive address if necessary
current_address = self.receive_address_e.text()
domain = self.wallet.get_account_addresses(self.current_account, include_change=False)
addr = self.get_receive_address()
if not current_address in domain and addr:
self.set_receive_address(addr)
self.new_request_button.setEnabled(addr != current_address)
# clear the list and fill it again
self.receive_list.clear()
for address, req in self.receive_requests.viewitems():
timestamp, amount = req['time'], req['amount']
expiration = req.get('expiration', None)
message = self.wallet.labels.get(address, '')
# only show requests for the current account
if address not in domain:
continue
date = format_time(timestamp)
account = self.wallet.get_account_name(self.wallet.get_account_from_address(address))
amount_str = self.format_amount(amount) if amount else ""
if amount:
paid = amount <= self.wallet.get_addr_received(address)
status = PR_PAID if paid else PR_UNPAID
if status == PR_UNPAID and expiration is not None and time.time() > timestamp + expiration:
status = PR_EXPIRED
else:
status = ''
item = QTreeWidgetItem([date, account, address, message, amount_str, pr_tooltips.get(status,'')])
if status is not '':
item.setIcon(5, QIcon(pr_icons.get(status)))
self.receive_list.addTopLevelItem(item)
def update_receive_qr(self):
addr = str(self.receive_address_e.text())
amount = self.receive_amount_e.get_amount()
message = unicode(self.receive_message_e.text()).encode('utf8')
self.save_request_button.setEnabled((amount is not None) or (message != ""))
uri = util.create_URI(addr, amount, message)
self.receive_qr.setData(uri)
if self.qr_window and self.qr_window.isVisible():
self.qr_window.set_content(addr, amount, message, uri)
def create_send_tab(self):
self.send_grid = grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnMinimumWidth(3,300)
grid.setColumnStretch(5,1)
grid.setRowStretch(8, 1)
from paytoedit import PayToEdit
self.amount_e = BTCAmountEdit(self.get_decimal_point)
self.payto_e = PayToEdit(self)
msg = _('Recipient of the funds.') + '\n\n'\
+ _('You may enter a RubyCoin address, a label from your list of contacts (a list of completions will be proposed), or an alias (email-like address that forwards to a RubyCoin address)')
payto_label = HelpLabel(_('Pay to'), msg)
grid.addWidget(payto_label, 1, 0)
grid.addWidget(self.payto_e, 1, 1, 1, 3)
completer = QCompleter()
completer.setCaseSensitivity(False)
self.payto_e.setCompleter(completer)
completer.setModel(self.completions)
msg = _('Description of the transaction (not mandatory).') + '\n\n'\
+ _('The description is not sent to the recipient of the funds. It is stored in your wallet file, and displayed in the \'History\' tab.')
description_label = HelpLabel(_('Description'), msg)
grid.addWidget(description_label, 2, 0)
self.message_e = MyLineEdit()
grid.addWidget(self.message_e, 2, 1, 1, 3)
self.from_label = QLabel(_('From'))
grid.addWidget(self.from_label, 3, 0)
self.from_list = MyTreeWidget(self, self.from_list_menu, ['',''])
self.from_list.setHeaderHidden(True)
self.from_list.setMaximumHeight(80)
grid.addWidget(self.from_list, 3, 1, 1, 3)
self.set_pay_from([])
msg = _('Amount to be sent.') + '\n\n' \
+ _('The amount will be displayed in red if you do not have enough funds in your wallet.') + ' ' \
+ _('Note that if you have frozen some of your addresses, the available funds will be lower than your total balance.') + '\n\n' \
+ _('Keyboard shortcut: type "!" to send all your coins.')
amount_label = HelpLabel(_('Amount'), msg)
grid.addWidget(amount_label, 4, 0)
grid.addWidget(self.amount_e, 4, 1, 1, 2)
msg = _('RubyCoin transactions are in general not free. A transaction fee is paid by the sender of the funds.') + '\n\n'\
+ _('The amount of fee can be decided freely by the sender. However, transactions with low fees take more time to be processed.') + '\n\n'\
+ _('A suggested fee is automatically added to this field. You may override it. The suggested fee increases with the size of the transaction.')
self.fee_e_label = HelpLabel(_('Fee'), msg)
self.fee_e = BTCAmountEdit(self.get_decimal_point)
grid.addWidget(self.fee_e_label, 5, 0)
grid.addWidget(self.fee_e, 5, 1, 1, 2)
self.send_button = EnterButton(_("Send"), self.do_send)
self.clear_button = EnterButton(_("Clear"), self.do_clear)
buttons = QHBoxLayout()
buttons.addStretch(1)
buttons.addWidget(self.send_button)
buttons.addWidget(self.clear_button)
def on_shortcut():
sendable = self.get_sendable_balance()
inputs = self.get_coins()
for i in inputs: self.wallet.add_input_info(i)
addr = self.payto_e.payto_address if self.payto_e.payto_address else self.dummy_address
output = ('address', addr, sendable)
dummy_tx = Transaction.from_io(inputs, [output])
fee = self.wallet.estimated_fee(dummy_tx)
self.amount_e.setAmount(max(0,sendable-fee))
self.amount_e.textEdited.emit("")
self.fee_e.setAmount(fee)
self.amount_e.shortcut.connect(on_shortcut)
self.payto_e.textChanged.connect(lambda: self.update_fee(False))
self.amount_e.textEdited.connect(lambda: self.update_fee(False))
self.fee_e.textEdited.connect(lambda: self.update_fee(True))
def entry_changed():
if not self.not_enough_funds:
palette = QPalette()
palette.setColor(self.amount_e.foregroundRole(), QColor('black'))
text = ""
else:
palette = QPalette()
palette.setColor(self.amount_e.foregroundRole(), QColor('red'))
text = _( "Not enough funds" )
c, u, x = self.wallet.get_frozen_balance()
if c+u+x:
text += ' (' + self.format_amount(c+u+x).strip() + ' ' + self.base_unit() + ' ' +_("are frozen") + ')'
self.statusBar().showMessage(text)
self.amount_e.setPalette(palette)
self.fee_e.setPalette(palette)
self.amount_e.textChanged.connect(entry_changed)
self.fee_e.textChanged.connect(entry_changed)
self.invoices_label = QLabel(_('Invoices'))
self.invoices_list = MyTreeWidget(self, self.create_invoice_menu,
[_('Date'), _('Requestor'), _('Description'), _('Amount'), _('Status')], 2)
self.invoices_list.header().setResizeMode(1, QHeaderView.Interactive)
self.invoices_list.setColumnWidth(1, 200)
vbox0 = QVBoxLayout()
vbox0.addLayout(grid)
vbox0.addLayout(buttons)
vbox0.addStretch(1)
hbox = QHBoxLayout()
hbox.addLayout(vbox0)
hbox.addStretch(1)
w = QWidget()
vbox = QVBoxLayout(w)
vbox.addLayout(hbox)
vbox.addStretch()
vbox.addWidget(self.invoices_label)
vbox.addWidget(self.invoices_list)
# Defer this until grid is parented to avoid ugly flash during startup
self.update_fee_edit()
run_hook('create_send_tab', grid)
return w
def update_fee(self, is_fee):
outputs = self.payto_e.get_outputs()
amount = self.amount_e.get_amount()
fee = self.fee_e.get_amount() if is_fee else None
if amount is None:
self.fee_e.setAmount(None)
self.not_enough_funds = False
else:
if not outputs:
addr = self.payto_e.payto_address if self.payto_e.payto_address else self.dummy_address
outputs = [('address', addr, amount)]
try:
tx = self.wallet.make_unsigned_transaction(outputs, fee, coins = self.get_coins())
self.not_enough_funds = False
except NotEnoughFunds:
self.not_enough_funds = True
if not is_fee:
fee = None if self.not_enough_funds else self.wallet.get_tx_fee(tx)
self.fee_e.setAmount(fee)
def update_fee_edit(self):
b = self.config.get('can_edit_fees', False)
self.fee_e.setVisible(b)
self.fee_e_label.setVisible(b)
def from_list_delete(self, item):
i = self.from_list.indexOfTopLevelItem(item)
self.pay_from.pop(i)
self.redraw_from_list()
def from_list_menu(self, position):
item = self.from_list.itemAt(position)
menu = QMenu()
menu.addAction(_("Remove"), lambda: self.from_list_delete(item))
menu.exec_(self.from_list.viewport().mapToGlobal(position))
def set_pay_from(self, domain = None):
self.pay_from = [] if domain == [] else self.wallet.get_spendable_coins(domain)
self.redraw_from_list()
def redraw_from_list(self):
self.from_list.clear()
self.from_label.setHidden(len(self.pay_from) == 0)
self.from_list.setHidden(len(self.pay_from) == 0)
def format(x):
h = x.get('prevout_hash')
return h[0:8] + '...' + h[-8:] + ":%d"%x.get('prevout_n') + u'\t' + "%s"%x.get('address')
for item in self.pay_from:
self.from_list.addTopLevelItem(QTreeWidgetItem( [format(item), self.format_amount(item['value']) ]))
def get_contact_payto(self, key):
_type, value = self.contacts.get(key)
return key + ' <' + value + '>' if _type == 'address' else key
def update_completions(self):
l = [self.get_contact_payto(key) for key in self.contacts.keys()]
self.completions.setStringList(l)
def protected(func):
return lambda s, *args: s.do_protect(func, args)
def read_send_tab(self):
if self.payment_request and self.payment_request.has_expired():
QMessageBox.warning(self, _('Error'), _('Payment request has expired'), _('OK'))
return
label = unicode( self.message_e.text() )
if self.payment_request:
outputs = self.payment_request.get_outputs()
else:
errors = self.payto_e.get_errors()
if errors:
self.show_warning(_("Invalid Lines found:") + "\n\n" + '\n'.join([ _("Line #") + str(x[0]+1) + ": " + x[1] for x in errors]))
return
outputs = self.payto_e.get_outputs()
if not outputs:
QMessageBox.warning(self, _('Error'), _('No outputs'), _('OK'))
return
for _type, addr, amount in outputs:
if addr is None:
QMessageBox.warning(self, _('Error'), _('RubyCoin Address is None'), _('OK'))
return
if _type == 'address' and not bitcoin.is_address(addr):
QMessageBox.warning(self, _('Error'), _('Invalid RubyCoin Address'), _('OK'))
return
if amount is None:
QMessageBox.warning(self, _('Error'), _('Invalid Amount'), _('OK'))
return
fee = self.fee_e.get_amount()
if fee is None:
QMessageBox.warning(self, _('Error'), _('Invalid Fee'), _('OK'))
return
amount = sum(map(lambda x:x[2], outputs))
confirm_amount = self.config.get('confirm_amount', 1000000000)
if amount >= confirm_amount:
o = '\n'.join(map(lambda x:x[1], outputs))
if not self.question(_("send %(amount)s to %(address)s?")%{ 'amount' : self.format_amount(amount) + ' '+ self.base_unit(), 'address' : o}):
return
coins = self.get_coins()
return outputs, fee, label, coins
def do_send(self):
if run_hook('before_send'):
return
r = self.read_send_tab()
if not r:
return
outputs, fee, label, coins = r
try:
tx = self.wallet.make_unsigned_transaction(outputs, fee, None, coins = coins)
if not tx:
raise BaseException(_("Insufficient funds"))
except Exception as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
if tx.get_fee() < tx.required_fee(self.wallet):
QMessageBox.warning(self, _('Error'), _("This transaction requires a higher fee, or it will not be propagated by the network."), _('OK'))
return
if not self.config.get('can_edit_fees', False):
if not self.question(_("A fee of %(fee)s will be added to this transaction.\nProceed?")%{ 'fee' : self.format_amount(fee) + ' '+ self.base_unit()}):
return
else:
confirm_fee = self.config.get('confirm_fee', 10000000)
if fee >= confirm_fee:
if not self.question(_("The fee for this transaction seems unusually high.\nAre you really sure you want to pay %(fee)s in fees?")%{ 'fee' : self.format_amount(fee) + ' '+ self.base_unit()}):
return
self.send_tx(tx, label)
@protected
def send_tx(self, tx, label, password):
self.send_button.setDisabled(True)
# call hook to see if plugin needs gui interaction
run_hook('send_tx', tx)
# sign the tx
def sign_thread():
if self.wallet.is_watching_only():
return tx
self.wallet.sign_transaction(tx, password)
return tx
def sign_done(tx):
if label and tx.is_complete():
self.wallet.set_label(tx.hash(), label)
if not tx.is_complete() or self.config.get('show_before_broadcast'):
self.show_transaction(tx)
self.do_clear()
return
self.broadcast_transaction(tx)
# keep a reference to WaitingDialog or the gui might crash
self.waiting_dialog = WaitingDialog(self, 'Signing..', sign_thread, sign_done, lambda: self.send_button.setDisabled(False))
self.waiting_dialog.start()
def broadcast_transaction(self, tx):
def broadcast_thread():
# non-GUI thread
pr = self.payment_request
if pr is None:
return self.wallet.sendtx(tx)
if pr.has_expired():
self.payment_request = None
return False, _("Payment request has expired")
status, msg = self.wallet.sendtx(tx)
if not status:
return False, msg
key = pr.get_id()
self.invoices.set_paid(key, tx.hash())
self.payment_request = None
refund_address = self.wallet.addresses()[0]
ack_status, ack_msg = pr.send_ack(str(tx), refund_address)
if ack_status:
msg = ack_msg
return status, msg
def broadcast_done(status, msg):
# GUI thread
if status:
QMessageBox.information(self, '', _('Payment sent.') + '\n' + msg, _('OK'))
self.update_invoices_list()
self.do_clear()
else:
QMessageBox.warning(self, _('Error'), msg, _('OK'))
self.send_button.setDisabled(False)
self.waiting_dialog = WaitingDialog(self, 'Broadcasting..', broadcast_thread, broadcast_done)
self.waiting_dialog.start()
def prepare_for_payment_request(self):
self.tabs.setCurrentIndex(1)
self.payto_e.is_pr = True
for e in [self.payto_e, self.amount_e, self.message_e]:
e.setFrozen(True)
self.payto_e.setText(_("please wait..."))
return True
def payment_request_ok(self):
pr = self.payment_request
key = self.invoices.add(pr)
status = self.invoices.get_status(key)
self.update_invoices_list()
if status == PR_PAID:
self.show_message("invoice already paid")
self.do_clear()
self.payment_request = None
return
if not pr.has_expired():
self.payto_e.setGreen()
else:
self.payto_e.setExpired()
self.payto_e.setText(pr.get_requestor())
self.amount_e.setText(self.format_amount(pr.get_amount()))
self.message_e.setText(pr.get_memo())
# signal to set fee
self.amount_e.textEdited.emit("")
def payment_request_error(self):
self.do_clear()
self.show_message(self.payment_request.error)
self.payment_request = None
def pay_from_URI(self,URI):
if not URI:
return
try:
address, amount, label, message, request_url = util.parse_URI(URI)
except Exception as e:
QMessageBox.warning(self, _('Error'), _('Invalid rubycoin URI:') + '\n' + str(e), _('OK'))
return
self.tabs.setCurrentIndex(1)
if not request_url:
if label:
if self.wallet.labels.get(address) != label:
if self.question(_('Save label "%(label)s" for address %(address)s ?'%{'label':label,'address':address})):
if address not in self.wallet.addressbook and not self.wallet.is_mine(address):
self.wallet.addressbook.append(address)
self.wallet.set_label(address, label)
else:
label = self.wallet.labels.get(address)
if address:
self.payto_e.setText(label + ' <'+ address +'>' if label else address)
if message:
self.message_e.setText(message)
if amount:
self.amount_e.setAmount(amount)
self.amount_e.textEdited.emit("")
return
def get_payment_request_thread():
self.payment_request = get_payment_request(request_url)
if self.payment_request.verify():
self.emit(SIGNAL('payment_request_ok'))
else:
self.emit(SIGNAL('payment_request_error'))
t = threading.Thread(target=get_payment_request_thread)
t.setDaemon(True)
t.start()
self.prepare_for_payment_request()
def do_clear(self):
self.not_enough_funds = False
self.payto_e.is_pr = False
for e in [self.payto_e, self.message_e, self.amount_e, self.fee_e]:
e.setText('')
e.setFrozen(False)
self.set_pay_from([])
self.update_status()
run_hook('do_clear')
def set_addrs_frozen(self,addrs,freeze):
for addr in addrs:
if not addr: continue
if addr in self.wallet.frozen_addresses and not freeze:
self.wallet.unfreeze(addr)
elif addr not in self.wallet.frozen_addresses and freeze:
self.wallet.freeze(addr)
self.update_address_tab()
def create_list_tab(self, l):
w = QWidget()
vbox = QVBoxLayout()
w.setLayout(vbox)
vbox.setMargin(0)
vbox.setSpacing(0)
vbox.addWidget(l)
buttons = QWidget()
vbox.addWidget(buttons)
return w
def create_addresses_tab(self):
l = MyTreeWidget(self, self.create_receive_menu, [ _('Address'), _('Label'), _('Balance'), _('Tx')], 1)
l.setSelectionMode(QAbstractItemView.ExtendedSelection)
l.setSortingEnabled(False)
self.address_list = l
return self.create_list_tab(l)
def create_contacts_tab(self):
l = MyTreeWidget(self, self.create_contact_menu, [_('Key'), _('Value'), _('Type')], 1)
self.contacts_list = l
return self.create_list_tab(l)
def update_invoices_list(self):
inv_list = self.invoices.sorted_list()
l = self.invoices_list
l.clear()
for pr in inv_list:
key = pr.get_id()
status = self.invoices.get_status(key)
requestor = pr.get_requestor()
date_str = format_time(pr.get_expiration_date())
item = QTreeWidgetItem( [ date_str, requestor, pr.memo, self.format_amount(pr.get_amount(), whitespaces=True), pr_tooltips.get(status,'')] )
item.setIcon(4, QIcon(pr_icons.get(status)))
item.setData(0, Qt.UserRole, key)
item.setFont(1, QFont(MONOSPACE_FONT))
item.setFont(3, QFont(MONOSPACE_FONT))
l.addTopLevelItem(item)
l.setCurrentItem(l.topLevelItem(0))
self.invoices_list.setVisible(len(inv_list))
self.invoices_label.setVisible(len(inv_list))
def delete_imported_key(self, addr):
if self.question(_("Do you want to remove")+" %s "%addr +_("from your wallet?")):
self.wallet.delete_imported_key(addr)
self.update_address_tab()
self.update_history_tab()
def edit_account_label(self, k):
text, ok = QInputDialog.getText(self, _('Rename account'), _('Name') + ':', text = self.wallet.labels.get(k,''))
if ok:
label = unicode(text)
self.wallet.set_label(k,label)
self.update_address_tab()
def account_set_expanded(self, item, k, b):
item.setExpanded(b)
self.accounts_expanded[k] = b
def create_account_menu(self, position, k, item):
menu = QMenu()
exp = item.isExpanded()
menu.addAction(_("Minimize") if exp else _("Maximize"), lambda: self.account_set_expanded(item, k, not exp))
menu.addAction(_("Rename"), lambda: self.edit_account_label(k))
if self.wallet.seed_version > 4:
menu.addAction(_("View details"), lambda: self.show_account_details(k))
if self.wallet.account_is_pending(k):
menu.addAction(_("Delete"), lambda: self.delete_pending_account(k))
menu.exec_(self.address_list.viewport().mapToGlobal(position))
def delete_pending_account(self, k):
self.wallet.delete_pending_account(k)
self.update_address_tab()
self.update_account_selector()
def create_receive_menu(self, position):
# fixme: this function apparently has a side effect.
# if it is not called the menu pops up several times
#self.address_list.selectedIndexes()
selected = self.address_list.selectedItems()
multi_select = len(selected) > 1
addrs = [unicode(item.text(0)) for item in selected]
if not multi_select:
item = self.address_list.itemAt(position)
if not item:
return
addr = addrs[0]
if not is_valid(addr):
k = str(item.data(0,32).toString())
if k:
self.create_account_menu(position, k, item)
else:
item.setExpanded(not item.isExpanded())
return
menu = QMenu()
if not multi_select:
menu.addAction(_("Copy to clipboard"), lambda: self.app.clipboard().setText(addr))
menu.addAction(_("Request payment"), lambda: self.receive_at(addr))
menu.addAction(_("Edit label"), lambda: self.address_list.edit_label(item))
menu.addAction(_('History'), lambda: self.show_address(addr))
menu.addAction(_('Public Keys'), lambda: self.show_public_keys(addr))
if self.wallet.can_export():
menu.addAction(_("Private key"), lambda: self.show_private_key(addr))
if not self.wallet.is_watching_only():
menu.addAction(_("Sign/verify message"), lambda: self.sign_verify_message(addr))
menu.addAction(_("Encrypt/decrypt message"), lambda: self.encrypt_message(addr))
if self.wallet.is_imported(addr):
menu.addAction(_("Remove from wallet"), lambda: self.delete_imported_key(addr))
addr_URL = block_explorer_URL(self.config, 'addr', addr)
if addr_URL:
menu.addAction(_("View on block explorer"), lambda: webbrowser.open(addr_URL))
if any(addr not in self.wallet.frozen_addresses for addr in addrs):
menu.addAction(_("Freeze"), lambda: self.set_addrs_frozen(addrs, True))
if any(addr in self.wallet.frozen_addresses for addr in addrs):
menu.addAction(_("Unfreeze"), lambda: self.set_addrs_frozen(addrs, False))
def can_send(addr):
return addr not in self.wallet.frozen_addresses and self.wallet.get_addr_balance(addr) != (0, 0)
if any(can_send(addr) for addr in addrs):
menu.addAction(_("Send From"), lambda: self.send_from_addresses(addrs))
run_hook('receive_menu', menu, addrs)
menu.exec_(self.address_list.viewport().mapToGlobal(position))
def get_sendable_balance(self):
return sum(map(lambda x:x['value'], self.get_coins()))
def get_coins(self):
if self.pay_from:
return self.pay_from
else:
domain = self.wallet.get_account_addresses(self.current_account)
for i in self.wallet.frozen_addresses:
if i in domain: domain.remove(i)
return self.wallet.get_spendable_coins(domain)
def send_from_addresses(self, addrs):
self.set_pay_from( addrs )
self.tabs.setCurrentIndex(1)
def paytomany(self):
self.tabs.setCurrentIndex(1)
self.payto_e.paytomany()
def payto(self, addr):
if not addr:
return
self.tabs.setCurrentIndex(1)
self.payto_e.setText(addr)
self.amount_e.setFocus()
def delete_contact(self, x):
if not self.question(_("Do you want to remove")+" %s "%x +_("from your list of contacts?")):
return
self.contacts.pop(x)
self.update_history_tab()
self.update_contacts_tab()
self.update_completions()
def create_contact_menu(self, position):
item = self.contacts_list.itemAt(position)
menu = QMenu()
if not item:
menu.addAction(_("New contact"), lambda: self.new_contact_dialog())
else:
key = unicode(item.text(0))
menu.addAction(_("Copy to Clipboard"), lambda: self.app.clipboard().setText(key))
menu.addAction(_("Pay to"), lambda: self.payto(self.get_contact_payto(key)))
menu.addAction(_("Delete"), lambda: self.delete_contact(key))
run_hook('create_contact_menu', menu, item)
menu.exec_(self.contacts_list.viewport().mapToGlobal(position))
def show_invoice(self, key):
pr = self.invoices.get(key)
pr.verify()
self.show_pr_details(pr)
def show_pr_details(self, pr):
d = QDialog(self)
d.setWindowTitle(_("Invoice"))
vbox = QVBoxLayout(d)
grid = QGridLayout()
grid.addWidget(QLabel(_("Requestor") + ':'), 0, 0)
grid.addWidget(QLabel(pr.get_requestor()), 0, 1)
grid.addWidget(QLabel(_("Expires") + ':'), 1, 0)
grid.addWidget(QLabel(format_time(pr.get_expiration_date())), 1, 1)
grid.addWidget(QLabel(_("Memo") + ':'), 2, 0)
grid.addWidget(QLabel(pr.get_memo()), 2, 1)
grid.addWidget(QLabel(_("Signature") + ':'), 3, 0)
grid.addWidget(QLabel(pr.get_verify_status()), 3, 1)
grid.addWidget(QLabel(_("Payment URL") + ':'), 4, 0)
grid.addWidget(QLabel(pr.payment_url), 4, 1)
grid.addWidget(QLabel(_("Outputs") + ':'), 5, 0)
outputs_str = '\n'.join(map(lambda x: x[1] + ' ' + self.format_amount(x[2])+ self.base_unit(), pr.get_outputs()))
grid.addWidget(QLabel(outputs_str), 5, 1)
if pr.tx:
grid.addWidget(QLabel(_("Transaction ID") + ':'), 6, 0)
l = QLineEdit(pr.tx)
l.setReadOnly(True)
grid.addWidget(l, 6, 1)
vbox.addLayout(grid)
vbox.addLayout(Buttons(CloseButton(d)))
d.exec_()
return
def do_pay_invoice(self, key):
pr = self.invoices.get(key)
self.payment_request = pr
self.prepare_for_payment_request()
if pr.verify():
self.payment_request_ok()
else:
self.payment_request_error()
def create_invoice_menu(self, position):
item = self.invoices_list.itemAt(position)
if not item:
return
key = str(item.data(0, 32).toString())
pr = self.invoices.get(key)
status = self.invoices.get_status(key)
menu = QMenu()
menu.addAction(_("Details"), lambda: self.show_invoice(key))
if status == PR_UNPAID:
menu.addAction(_("Pay Now"), lambda: self.do_pay_invoice(key))
def delete_invoice(key):
self.invoices.remove(key)
self.update_invoices_list()
menu.addAction(_("Delete"), lambda: delete_invoice(key))
menu.exec_(self.invoices_list.viewport().mapToGlobal(position))
def update_address_tab(self):
l = self.address_list
item = l.currentItem()
current_address = item.data(0, Qt.UserRole).toString() if item else None
l.clear()
accounts = self.wallet.get_accounts()
if self.current_account is None:
account_items = sorted(accounts.items())
else:
account_items = [(self.current_account, accounts.get(self.current_account))]
for k, account in account_items:
if len(accounts) > 1:
name = self.wallet.get_account_name(k)
c, u, x = self.wallet.get_account_balance(k)
account_item = QTreeWidgetItem([ name, '', self.format_amount(c + u + x), ''])
l.addTopLevelItem(account_item)
account_item.setExpanded(self.accounts_expanded.get(k, True))
account_item.setData(0, Qt.UserRole, k)
else:
account_item = l
sequences = [0,1] if account.has_change() else [0]
for is_change in sequences:
if len(sequences) > 1:
name = _("Receiving") if not is_change else _("Change")
seq_item = QTreeWidgetItem( [ name, '', '', '', ''] )
account_item.addChild(seq_item)
if not is_change:
seq_item.setExpanded(True)
else:
seq_item = account_item
used_item = QTreeWidgetItem( [ _("Used"), '', '', '', ''] )
used_flag = False
addr_list = account.get_addresses(is_change)
for address in addr_list:
num, is_used = self.wallet.is_used(address)
label = self.wallet.labels.get(address,'')
c, u, x = self.wallet.get_addr_balance(address)
balance = self.format_amount(c + u + x)
item = QTreeWidgetItem( [ address, label, balance, "%d"%num] )
item.setFont(0, QFont(MONOSPACE_FONT))
item.setData(0, Qt.UserRole, address)
item.setData(0, Qt.UserRole+1, True) # label can be edited
if address in self.wallet.frozen_addresses:
item.setBackgroundColor(0, QColor('lightblue'))
if self.wallet.is_beyond_limit(address, account, is_change):
item.setBackgroundColor(0, QColor('red'))
if is_used:
if not used_flag:
seq_item.insertChild(0, used_item)
used_flag = True
used_item.addChild(item)
else:
seq_item.addChild(item)
if address == current_address:
l.setCurrentItem(item)
def update_contacts_tab(self):
l = self.contacts_list
item = l.currentItem()
current_key = item.data(0, Qt.UserRole).toString() if item else None
l.clear()
for key in sorted(self.contacts.keys()):
_type, value = self.contacts[key]
item = QTreeWidgetItem([key, value, _type])
item.setData(0, Qt.UserRole, key)
l.addTopLevelItem(item)
if key == current_key:
l.setCurrentItem(item)
run_hook('update_contacts_tab', l)
def create_console_tab(self):
from console import Console
self.console = console = Console()
return console
def update_console(self):
console = self.console
console.history = self.config.get("console-history",[])
console.history_index = len(console.history)
console.updateNamespace({'wallet' : self.wallet, 'network' : self.network, 'gui':self})
console.updateNamespace({'util' : util, 'bitcoin':bitcoin})
c = commands.Commands(self.wallet, self.network, lambda: self.console.set_json(True))
methods = {}
def mkfunc(f, method):
return lambda *args: apply( f, (method, args, self.password_dialog ))
for m in dir(c):
if m[0]=='_' or m in ['network','wallet']: continue
methods[m] = mkfunc(c._run, m)
console.updateNamespace(methods)
def change_account(self,s):
if s == _("All accounts"):
self.current_account = None
else:
accounts = self.wallet.get_account_names()
for k, v in accounts.items():
if v == s:
self.current_account = k
self.update_history_tab()
self.update_status()
self.update_address_tab()
self.update_receive_tab()
def create_status_bar(self):
sb = QStatusBar()
sb.setFixedHeight(35)
qtVersion = qVersion()
self.balance_label = QLabel("")
sb.addWidget(self.balance_label)
from version_getter import UpdateLabel
self.updatelabel = UpdateLabel(self.config, sb)
self.account_selector = QComboBox()
self.account_selector.setSizeAdjustPolicy(QComboBox.AdjustToContents)
self.connect(self.account_selector,SIGNAL("activated(QString)"),self.change_account)
sb.addPermanentWidget(self.account_selector)
self.search_box = QLineEdit()
self.search_box.textChanged.connect(self.do_search)
self.search_box.hide()
sb.addPermanentWidget(self.search_box)
if (int(qtVersion[0]) >= 4 and int(qtVersion[2]) >= 7):
sb.addPermanentWidget( StatusBarButton( QIcon(":icons/switchgui.png"), _("Switch to Lite Mode"), self.go_lite ) )
self.lock_icon = QIcon()
self.password_button = StatusBarButton( self.lock_icon, _("Password"), self.change_password_dialog )
sb.addPermanentWidget( self.password_button )
sb.addPermanentWidget( StatusBarButton( QIcon(":icons/preferences.png"), _("Preferences"), self.settings_dialog ) )
self.seed_button = StatusBarButton( QIcon(":icons/seed.png"), _("Seed"), self.show_seed_dialog )
sb.addPermanentWidget( self.seed_button )
self.status_button = StatusBarButton( QIcon(":icons/status_disconnected.png"), _("Network"), self.run_network_dialog )
sb.addPermanentWidget( self.status_button )
run_hook('create_status_bar', sb)
self.setStatusBar(sb)
def update_lock_icon(self):
icon = QIcon(":icons/lock.png") if self.wallet.use_encryption else QIcon(":icons/unlock.png")
self.password_button.setIcon( icon )
def update_buttons_on_seed(self):
self.seed_button.setVisible(self.wallet.has_seed())
self.password_button.setVisible(self.wallet.can_change_password())
self.send_button.setText(_("Create unsigned transaction") if self.wallet.is_watching_only() else _("Send"))
def change_password_dialog(self):
from password_dialog import PasswordDialog
d = PasswordDialog(self.wallet, self)
d.run()
self.update_lock_icon()
def toggle_search(self):
self.search_box.setHidden(not self.search_box.isHidden())
if not self.search_box.isHidden():
self.search_box.setFocus(1)
else:
self.do_search('')
def do_search(self, t):
i = self.tabs.currentIndex()
if i == 0:
self.history_list.filter(t, [1, 2, 3]) # Date, Description, Amount
elif i == 1:
self.invoices_list.filter(t, [0, 1, 2, 3]) # Date, Requestor, Description, Amount
elif i == 2:
self.receive_list.filter(t, [0, 1, 2, 3, 4]) # Date, Account, Address, Description, Amount
elif i == 3:
self.address_list.filter(t, [0,1, 2]) # Address, Label, Balance
elif i == 4:
self.contacts_list.filter(t, [0, 1]) # Key, Value
def new_contact_dialog(self):
d = QDialog(self)
d.setWindowTitle(_("New Contact"))
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_('New Contact') + ':'))
grid = QGridLayout()
line1 = QLineEdit()
line2 = QLineEdit()
grid.addWidget(QLabel(_("Address")), 1, 0)
grid.addWidget(line1, 1, 1)
grid.addWidget(QLabel(_("Name")), 2, 0)
grid.addWidget(line2, 2, 1)
vbox.addLayout(grid)
vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))
if not d.exec_():
return
address = str(line1.text())
label = unicode(line2.text())
if not is_valid(address):
QMessageBox.warning(self, _('Error'), _('Invalid Address'), _('OK'))
return
self.contacts[label] = ('address', address)
self.update_contacts_tab()
self.update_history_tab()
self.update_completions()
self.tabs.setCurrentIndex(3)
@protected
def new_account_dialog(self, password):
dialog = QDialog(self)
dialog.setModal(1)
dialog.setWindowTitle(_("New Account"))
vbox = QVBoxLayout()
vbox.addWidget(QLabel(_('Account name')+':'))
e = QLineEdit()
vbox.addWidget(e)
msg = _("Note: Newly created accounts are 'pending' until they receive rubycoins.") + " " \
+ _("You will need to wait for 2 confirmations until the correct balance is displayed and more addresses are created for that account.")
l = QLabel(msg)
l.setWordWrap(True)
vbox.addWidget(l)
vbox.addLayout(Buttons(CancelButton(dialog), OkButton(dialog)))
dialog.setLayout(vbox)
r = dialog.exec_()
if not r:
return
name = str(e.text())
self.wallet.create_pending_account(name, password)
self.update_address_tab()
self.update_account_selector()
self.tabs.setCurrentIndex(3)
def show_master_public_keys(self):
dialog = QDialog(self)
dialog.setModal(1)
dialog.setWindowTitle(_("Master Public Keys"))
mpk_dict = self.wallet.get_master_public_keys()
vbox = QVBoxLayout()
# only show the combobox in case multiple accounts are available
if len(mpk_dict) > 1:
gb = QGroupBox(_("Master Public Keys"))
vbox.addWidget(gb)
group = QButtonGroup()
first_button = None
for key in sorted(mpk_dict.keys()):
is_mine = self.wallet.master_private_keys.has_key(key)
b = QRadioButton(gb)
name = 'Self' if is_mine else 'Cosigner'
b.setText(name + ' (%s)'%key)
b.key = key
group.addButton(b)
vbox.addWidget(b)
if not first_button:
first_button = b
mpk_text = ShowQRTextEdit()
mpk_text.setMaximumHeight(170)
vbox.addWidget(mpk_text)
def show_mpk(b):
mpk = mpk_dict.get(b.key, "")
mpk_text.setText(mpk)
group.buttonReleased.connect(show_mpk)
first_button.setChecked(True)
show_mpk(first_button)
elif len(mpk_dict) == 1:
mpk = mpk_dict.values()[0]
mpk_text = ShowQRTextEdit(text=mpk)
mpk_text.setMaximumHeight(170)
vbox.addWidget(mpk_text)
mpk_text.addCopyButton(self.app)
vbox.addLayout(Buttons(CloseButton(dialog)))
dialog.setLayout(vbox)
dialog.exec_()
@protected
def show_seed_dialog(self, password):
if not self.wallet.has_seed():
QMessageBox.information(self, _('Message'), _('This wallet has no seed'), _('OK'))
return
try:
mnemonic = self.wallet.get_mnemonic(password)
except BaseException as e:
QMessageBox.warning(self, _('Error'), str(e), _('OK'))
return
from seed_dialog import SeedDialog
d = SeedDialog(self, mnemonic, self.wallet.has_imported_keys())
d.exec_()
def show_qrcode(self, data, title = _("QR code")):
if not data:
return
d = QRDialog(data, self, title)
d.exec_()
def do_protect(self, func, args):
if self.wallet.use_encryption:
while True:
password = self.password_dialog()
if not password:
return
try:
self.wallet.check_password(password)
break
except Exception as e:
QMessageBox.warning(self, _('Error'), str(e), _('OK'))
continue
else:
password = None
if args != (False,):
args = (self,) + args + (password,)
else:
args = (self, password)
apply(func, args)
def show_public_keys(self, address):
if not address: return
try:
pubkey_list = self.wallet.get_public_keys(address)
except Exception as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
d = QDialog(self)
d.setMinimumSize(600, 200)
d.setModal(1)
d.setWindowTitle(_("Public key"))
vbox = QVBoxLayout()
vbox.addWidget( QLabel(_("Address") + ': ' + address))
vbox.addWidget( QLabel(_("Public key") + ':'))
keys_e = ShowQRTextEdit(text='\n'.join(pubkey_list))
keys_e.addCopyButton(self.app)
vbox.addWidget(keys_e)
vbox.addLayout(Buttons(CloseButton(d)))
d.setLayout(vbox)
d.exec_()
@protected
def show_private_key(self, address, password):
if not address: return
try:
pk_list = self.wallet.get_private_key(address, password)
except Exception as e:
traceback.print_exc(file=sys.stdout)
self.show_message(str(e))
return
d = QDialog(self)
d.setMinimumSize(600, 200)
d.setModal(1)
d.setWindowTitle(_("Private key"))
vbox = QVBoxLayout()
vbox.addWidget( QLabel(_("Address") + ': ' + address))
vbox.addWidget( QLabel(_("Private key") + ':'))
keys_e = ShowQRTextEdit(text='\n'.join(pk_list))
keys_e.addCopyButton(self.app)
vbox.addWidget(keys_e)
vbox.addLayout(Buttons(CloseButton(d)))
d.setLayout(vbox)
d.exec_()
@protected
def do_sign(self, address, message, signature, password):
message = unicode(message.toPlainText())
message = message.encode('utf-8')
try:
sig = self.wallet.sign_message(str(address.text()), message, password)
signature.setText(sig)
except Exception as e:
self.show_message(str(e))
def do_verify(self, address, message, signature):
message = unicode(message.toPlainText())
message = message.encode('utf-8')
if bitcoin.verify_message(address.text(), str(signature.toPlainText()), message):
self.show_message(_("Signature verified"))
else:
self.show_message(_("Error: wrong signature"))
def sign_verify_message(self, address=''):
d = QDialog(self)
d.setModal(1)
d.setWindowTitle(_('Sign/verify Message'))
d.setMinimumSize(410, 290)
layout = QGridLayout(d)
message_e = QTextEdit()
layout.addWidget(QLabel(_('Message')), 1, 0)
layout.addWidget(message_e, 1, 1)
layout.setRowStretch(2,3)
address_e = QLineEdit()
address_e.setText(address)
layout.addWidget(QLabel(_('Address')), 2, 0)
layout.addWidget(address_e, 2, 1)
signature_e = QTextEdit()
layout.addWidget(QLabel(_('Signature')), 3, 0)
layout.addWidget(signature_e, 3, 1)
layout.setRowStretch(3,1)
hbox = QHBoxLayout()
b = QPushButton(_("Sign"))
b.clicked.connect(lambda: self.do_sign(address_e, message_e, signature_e))
hbox.addWidget(b)
b = QPushButton(_("Verify"))
b.clicked.connect(lambda: self.do_verify(address_e, message_e, signature_e))
hbox.addWidget(b)
b = QPushButton(_("Close"))
b.clicked.connect(d.accept)
hbox.addWidget(b)
layout.addLayout(hbox, 4, 1)
d.exec_()
@protected
def do_decrypt(self, message_e, pubkey_e, encrypted_e, password):
try:
decrypted = self.wallet.decrypt_message(str(pubkey_e.text()), str(encrypted_e.toPlainText()), password)
message_e.setText(decrypted)
except BaseException as e:
traceback.print_exc(file=sys.stdout)
self.show_warning(str(e))
def do_encrypt(self, message_e, pubkey_e, encrypted_e):
message = unicode(message_e.toPlainText())
message = message.encode('utf-8')
try:
encrypted = bitcoin.encrypt_message(message, str(pubkey_e.text()))
encrypted_e.setText(encrypted)
except BaseException as e:
traceback.print_exc(file=sys.stdout)
self.show_warning(str(e))
def encrypt_message(self, address = ''):
d = QDialog(self)
d.setModal(1)
d.setWindowTitle(_('Encrypt/decrypt Message'))
d.setMinimumSize(610, 490)
layout = QGridLayout(d)
message_e = QTextEdit()
layout.addWidget(QLabel(_('Message')), 1, 0)
layout.addWidget(message_e, 1, 1)
layout.setRowStretch(2,3)
pubkey_e = QLineEdit()
if address:
pubkey = self.wallet.get_public_keys(address)[0]
pubkey_e.setText(pubkey)
layout.addWidget(QLabel(_('Public key')), 2, 0)
layout.addWidget(pubkey_e, 2, 1)
encrypted_e = QTextEdit()
layout.addWidget(QLabel(_('Encrypted')), 3, 0)
layout.addWidget(encrypted_e, 3, 1)
layout.setRowStretch(3,1)
hbox = QHBoxLayout()
b = QPushButton(_("Encrypt"))
b.clicked.connect(lambda: self.do_encrypt(message_e, pubkey_e, encrypted_e))
hbox.addWidget(b)
b = QPushButton(_("Decrypt"))
b.clicked.connect(lambda: self.do_decrypt(message_e, pubkey_e, encrypted_e))
hbox.addWidget(b)
b = QPushButton(_("Close"))
b.clicked.connect(d.accept)
hbox.addWidget(b)
layout.addLayout(hbox, 4, 1)
d.exec_()
def question(self, msg):
return QMessageBox.question(self, _('Message'), msg, QMessageBox.Yes | QMessageBox.No, QMessageBox.No) == QMessageBox.Yes
def show_message(self, msg):
QMessageBox.information(self, _('Message'), msg, _('OK'))
def show_warning(self, msg):
QMessageBox.warning(self, _('Warning'), msg, _('OK'))
def password_dialog(self, msg=None):
d = QDialog(self)
d.setModal(1)
d.setWindowTitle(_("Enter Password"))
pw = QLineEdit()
pw.setEchoMode(2)
vbox = QVBoxLayout()
if not msg:
msg = _('Please enter your password')
vbox.addWidget(QLabel(msg))
grid = QGridLayout()
grid.setSpacing(8)
grid.addWidget(QLabel(_('Password')), 1, 0)
grid.addWidget(pw, 1, 1)
vbox.addLayout(grid)
vbox.addLayout(Buttons(CancelButton(d), OkButton(d)))
d.setLayout(vbox)
run_hook('password_dialog', pw, grid, 1)
if not d.exec_(): return
return unicode(pw.text())
def tx_from_text(self, txt):
"json or raw hexadecimal"
txt = txt.strip()
try:
txt.decode('hex')
is_hex = True
except:
is_hex = False
if is_hex:
try:
return Transaction(txt)
except:
traceback.print_exc(file=sys.stdout)
QMessageBox.critical(None, _("Unable to parse transaction"), _("Electrum was unable to parse your transaction"))
return
try:
tx_dict = json.loads(str(txt))
assert "hex" in tx_dict.keys()
tx = Transaction(tx_dict["hex"])
#if tx_dict.has_key("input_info"):
# input_info = json.loads(tx_dict['input_info'])
# tx.add_input_info(input_info)
return tx
except Exception:
traceback.print_exc(file=sys.stdout)
QMessageBox.critical(None, _("Unable to parse transaction"), _("Electrum was unable to parse your transaction"))
def read_tx_from_qrcode(self):
from electrum_rby import qrscanner
try:
data = qrscanner.scan_qr(self.config)
except BaseException, e:
QMessageBox.warning(self, _('Error'), _(e), _('OK'))
return
if not data:
return
# if the user scanned a bitcoin URI
if data.startswith("rubycoin:"):
self.pay_from_URI(data)
return
# else if the user scanned an offline signed tx
# transactions are binary, but qrcode seems to return utf8...
data = data.decode('utf8')
z = bitcoin.base_decode(data, length=None, base=43)
data = ''.join(chr(ord(b)) for b in z).encode('hex')
tx = self.tx_from_text(data)
if not tx:
return
self.show_transaction(tx)
def read_tx_from_file(self):
fileName = self.getOpenFileName(_("Select your transaction file"), "*.txn")
if not fileName:
return
try:
with open(fileName, "r") as f:
file_content = f.read()
except (ValueError, IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to read file or no transaction found"), _("Electrum was unable to open your transaction file") + "\n" + str(reason))
return self.tx_from_text(file_content)
@protected
def sign_raw_transaction(self, tx, password):
try:
self.wallet.sign_transaction(tx, password)
except Exception as e:
traceback.print_exc(file=sys.stdout)
QMessageBox.warning(self, _("Error"), str(e))
def do_process_from_text(self):
text = text_dialog(self, _('Input raw transaction'), _("Transaction:"), _("Load transaction"))
if not text:
return
tx = self.tx_from_text(text)
if tx:
self.show_transaction(tx)
def do_process_from_file(self):
tx = self.read_tx_from_file()
if tx:
self.show_transaction(tx)
def do_process_from_txid(self):
from electrum_rby import transaction
txid, ok = QInputDialog.getText(self, _('Lookup transaction'), _('Transaction ID') + ':')
if ok and txid:
r = self.network.synchronous_get([ ('blockchain.transaction.get',[str(txid)]) ])[0]
if r:
tx = transaction.Transaction(r)
if tx:
self.show_transaction(tx)
else:
self.show_message("unknown transaction")
def do_process_from_csvReader(self, csvReader):
outputs = []
errors = []
errtext = ""
try:
for position, row in enumerate(csvReader):
address = row[0]
if not bitcoin.is_address(address):
errors.append((position, address))
continue
amount = Decimal(row[1])
amount = int(100000000*amount)
outputs.append(('address', address, amount))
except (ValueError, IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to read file or no transaction found"), _("Electrum was unable to open your transaction file") + "\n" + str(reason))
return
if errors != []:
for x in errors:
errtext += "CSV Row " + str(x[0]+1) + ": " + x[1] + "\n"
QMessageBox.critical(None, _("Invalid Addresses"), _("ABORTING! Invalid Addresses found:") + "\n\n" + errtext)
return
try:
tx = self.wallet.make_unsigned_transaction(outputs, None, None)
except Exception as e:
self.show_message(str(e))
return
self.show_transaction(tx)
@protected
def export_privkeys_dialog(self, password):
if self.wallet.is_watching_only():
self.show_message(_("This is a watching-only wallet"))
return
try:
self.wallet.check_password(password)
except Exception as e:
QMessageBox.warning(self, _('Error'), str(e), _('OK'))
return
d = QDialog(self)
d.setWindowTitle(_('Private keys'))
d.setMinimumSize(850, 300)
vbox = QVBoxLayout(d)
msg = "%s\n%s\n%s" % (_("WARNING: ALL your private keys are secret."),
_("Exposing a single private key can compromise your entire wallet!"),
_("In particular, DO NOT use 'redeem private key' services proposed by third parties."))
vbox.addWidget(QLabel(msg))
e = QTextEdit()
e.setReadOnly(True)
vbox.addWidget(e)
defaultname = 'electrum-rby-private-keys.csv'
select_msg = _('Select file to export your private keys to')
hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg)
vbox.addLayout(hbox)
b = OkButton(d, _('Export'))
b.setEnabled(False)
vbox.addLayout(Buttons(CancelButton(d), b))
private_keys = {}
addresses = self.wallet.addresses(True)
done = False
def privkeys_thread():
for addr in addresses:
time.sleep(0.1)
if done:
break
private_keys[addr] = "\n".join(self.wallet.get_private_key(addr, password))
d.emit(SIGNAL('computing_privkeys'))
d.emit(SIGNAL('show_privkeys'))
def show_privkeys():
s = "\n".join( map( lambda x: x[0] + "\t"+ x[1], private_keys.items()))
e.setText(s)
b.setEnabled(True)
d.connect(d, QtCore.SIGNAL('computing_privkeys'), lambda: e.setText("Please wait... %d/%d"%(len(private_keys),len(addresses))))
d.connect(d, QtCore.SIGNAL('show_privkeys'), show_privkeys)
threading.Thread(target=privkeys_thread).start()
if not d.exec_():
done = True
return
filename = filename_e.text()
if not filename:
return
try:
self.do_export_privkeys(filename, private_keys, csv_button.isChecked())
except (IOError, os.error), reason:
export_error_label = _("Electrum was unable to produce a private key-export.")
QMessageBox.critical(None, _("Unable to create csv"), export_error_label + "\n" + str(reason))
except Exception as e:
self.show_message(str(e))
return
self.show_message(_("Private keys exported."))
def do_export_privkeys(self, fileName, pklist, is_csv):
with open(fileName, "w+") as f:
if is_csv:
transaction = csv.writer(f)
transaction.writerow(["address", "private_key"])
for addr, pk in pklist.items():
transaction.writerow(["%34s"%addr,pk])
else:
import json
f.write(json.dumps(pklist, indent = 4))
def do_import_labels(self):
labelsFile = self.getOpenFileName(_("Open labels file"), "*.dat")
if not labelsFile: return
try:
f = open(labelsFile, 'r')
data = f.read()
f.close()
for key, value in json.loads(data).items():
self.wallet.set_label(key, value)
QMessageBox.information(None, _("Labels imported"), _("Your labels were imported from")+" '%s'" % str(labelsFile))
except (IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to import labels"), _("Electrum was unable to import your labels.")+"\n" + str(reason))
def do_export_labels(self):
labels = self.wallet.labels
try:
fileName = self.getSaveFileName(_("Select file to save your labels"), 'electrum-rby_labels.dat', "*.dat")
if fileName:
with open(fileName, 'w+') as f:
json.dump(labels, f)
QMessageBox.information(None, _("Labels exported"), _("Your labels where exported to")+" '%s'" % str(fileName))
except (IOError, os.error), reason:
QMessageBox.critical(None, _("Unable to export labels"), _("Electrum was unable to export your labels.")+"\n" + str(reason))
def export_history_dialog(self):
d = QDialog(self)
d.setWindowTitle(_('Export History'))
d.setMinimumSize(400, 200)
vbox = QVBoxLayout(d)
defaultname = os.path.expanduser('~/electrum-rby-history.csv')
select_msg = _('Select file to export your wallet transactions to')
hbox, filename_e, csv_button = filename_field(self, self.config, defaultname, select_msg)
vbox.addLayout(hbox)
vbox.addStretch(1)
hbox = Buttons(CancelButton(d), OkButton(d, _('Export')))
vbox.addLayout(hbox)
run_hook('export_history_dialog', self, hbox)
self.update()
if not d.exec_():
return
filename = filename_e.text()
if not filename:
return
try:
self.do_export_history(self.wallet, filename, csv_button.isChecked())
except (IOError, os.error), reason:
export_error_label = _("Electrum was unable to produce a transaction export.")
QMessageBox.critical(self, _("Unable to export history"), export_error_label + "\n" + str(reason))
return
QMessageBox.information(self,_("History exported"), _("Your wallet history has been successfully exported."))
def do_export_history(self, wallet, fileName, is_csv):
history = wallet.get_history()
lines = []
for item in history:
tx_hash, confirmations, value, timestamp, balance = item
if confirmations:
if timestamp is not None:
time_string = format_time(timestamp)
else:
time_string = "unknown"
else:
time_string = "pending"
if value is not None:
value_string = format_satoshis(value, True)
else:
value_string = '--'
if tx_hash:
label, is_default_label = wallet.get_label(tx_hash)
label = label.encode('utf-8')
else:
label = ""
if is_csv:
lines.append([tx_hash, label, confirmations, value_string, time_string])
else:
lines.append({'txid':tx_hash, 'date':"%16s"%time_string, 'label':label, 'value':value_string})
with open(fileName, "w+") as f:
if is_csv:
transaction = csv.writer(f, lineterminator='\n')
transaction.writerow(["transaction_hash","label", "confirmations", "value", "timestamp"])
for line in lines:
transaction.writerow(line)
else:
import json
f.write(json.dumps(lines, indent = 4))
def sweep_key_dialog(self):
d = QDialog(self)
d.setWindowTitle(_('Sweep private keys'))
d.setMinimumSize(600, 300)
vbox = QVBoxLayout(d)
vbox.addWidget(QLabel(_("Enter private keys")))
keys_e = QTextEdit()
keys_e.setTabChangesFocus(True)
vbox.addWidget(keys_e)
h, address_e = address_field(self.wallet.addresses(False))
vbox.addLayout(h)
vbox.addStretch(1)
button = OkButton(d, _('Sweep'))
vbox.addLayout(Buttons(CancelButton(d), button))
button.setEnabled(False)
def get_address():
addr = str(address_e.text())
if bitcoin.is_address(addr):
return addr
def get_pk():
pk = str(keys_e.toPlainText()).strip()
if Wallet.is_private_key(pk):
return pk.split()
f = lambda: button.setEnabled(get_address() is not None and get_pk() is not None)
keys_e.textChanged.connect(f)
address_e.textChanged.connect(f)
if not d.exec_():
return
fee = self.wallet.fee_per_kb
tx = Transaction.sweep(get_pk(), self.network, get_address(), fee)
self.show_transaction(tx)
@protected
def do_import_privkey(self, password):
if not self.wallet.has_imported_keys():
r = QMessageBox.question(None, _('Warning'), '<b>'+_('Warning') +':\n</b><br/>'+ _('Imported keys are not recoverable from seed.') + ' ' \
+ _('If you ever need to restore your wallet from its seed, these keys will be lost.') + '<p>' \
+ _('Are you sure you understand what you are doing?'), 3, 4)
if r == 4: return
text = text_dialog(self, _('Import private keys'), _("Enter private keys")+':', _("Import"))
if not text: return
text = str(text).split()
badkeys = []
addrlist = []
for key in text:
try:
addr = self.wallet.import_key(key, password)
except Exception as e:
badkeys.append(key)
continue
if not addr:
badkeys.append(key)
else:
addrlist.append(addr)
if addrlist:
QMessageBox.information(self, _('Information'), _("The following addresses were added") + ':\n' + '\n'.join(addrlist))
if badkeys:
QMessageBox.critical(self, _('Error'), _("The following inputs could not be imported") + ':\n'+ '\n'.join(badkeys))
self.update_address_tab()
self.update_history_tab()
def settings_dialog(self):
self.need_restart = False
d = QDialog(self)
d.setWindowTitle(_('Electrum Settings'))
d.setModal(1)
vbox = QVBoxLayout()
grid = QGridLayout()
grid.setColumnStretch(0,1)
widgets = []
lang_label = QLabel(_('Language') + ':')
lang_help = HelpButton(_('Select which language is used in the GUI (after restart).'))
lang_combo = QComboBox()
from electrum_rby.i18n import languages
lang_combo.addItems(languages.values())
try:
index = languages.keys().index(self.config.get("language",''))
except Exception:
index = 0
lang_combo.setCurrentIndex(index)
if not self.config.is_modifiable('language'):
for w in [lang_combo, lang_label]: w.setEnabled(False)
def on_lang(x):
lang_request = languages.keys()[lang_combo.currentIndex()]
if lang_request != self.config.get('language'):
self.config.set_key("language", lang_request, True)
self.need_restart = True
lang_combo.currentIndexChanged.connect(on_lang)
widgets.append((lang_label, lang_combo, lang_help))
nz_label = QLabel(_('Zeros after decimal point') + ':')
nz_help = HelpButton(_('Number of zeros displayed after the decimal point. For example, if this is set to 2, "1." will be displayed as "1.00"'))
nz = QSpinBox()
nz.setMinimum(0)
nz.setMaximum(self.decimal_point)
nz.setValue(self.num_zeros)
if not self.config.is_modifiable('num_zeros'):
for w in [nz, nz_label]: w.setEnabled(False)
def on_nz():
value = nz.value()
if self.num_zeros != value:
self.num_zeros = value
self.config.set_key('num_zeros', value, True)
self.update_history_tab()
self.update_address_tab()
nz.valueChanged.connect(on_nz)
widgets.append((nz_label, nz, nz_help))
fee_label = QLabel(_('Transaction fee per kb') + ':')
fee_help = HelpButton(_('Fee per kilobyte of transaction.') + '\n' \
+ _('Recommended value') + ': ' + self.format_amount(bitcoin.RECOMMENDED_FEE) + ' ' + self.base_unit())
fee_e = BTCAmountEdit(self.get_decimal_point)
fee_e.setAmount(self.wallet.fee_per_kb)
if not self.config.is_modifiable('fee_per_kb'):
for w in [fee_e, fee_label]: w.setEnabled(False)
def on_fee():
fee = fee_e.get_amount()
self.wallet.set_fee(fee)
fee_e.editingFinished.connect(on_fee)
widgets.append((fee_label, fee_e, fee_help))
units = ['RBY', 'mRBY', 'bits']
unit_label = QLabel(_('Base unit') + ':')
unit_combo = QComboBox()
unit_combo.addItems(units)
unit_combo.setCurrentIndex(units.index(self.base_unit()))
msg = _('Base unit of your wallet.')\
+ '\n1BTC=1000mRBY.\n' \
+ _(' These settings affects the fields in the Send tab')+' '
unit_help = HelpButton(msg)
def on_unit(x):
unit_result = units[unit_combo.currentIndex()]
if self.base_unit() == unit_result:
return
if unit_result == 'RBY':
self.decimal_point = 8
elif unit_result == 'mRBY':
self.decimal_point = 5
elif unit_result == 'bits':
self.decimal_point = 2
else:
raise Exception('Unknown base unit')
self.config.set_key('decimal_point', self.decimal_point, True)
self.update_history_tab()
self.update_receive_tab()
self.update_address_tab()
fee_e.setAmount(self.wallet.fee_per_kb)
self.update_status()
unit_combo.currentIndexChanged.connect(on_unit)
widgets.append((unit_label, unit_combo, unit_help))
block_explorers = sorted(block_explorer_info.keys())
block_ex_label = QLabel(_('Online Block Explorer') + ':')
block_ex_combo = QComboBox()
block_ex_combo.addItems(block_explorers)
block_ex_combo.setCurrentIndex(block_explorers.index(block_explorer(self.config)))
block_ex_help = HelpButton(_('Choose which online block explorer to use for functions that open a web browser'))
def on_be(x):
be_result = block_explorers[block_ex_combo.currentIndex()]
self.config.set_key('block_explorer', be_result, True)
block_ex_combo.currentIndexChanged.connect(on_be)
widgets.append((block_ex_label, block_ex_combo, block_ex_help))
from electrum_rby import qrscanner
system_cameras = qrscanner._find_system_cameras()
qr_combo = QComboBox()
qr_combo.addItem("Default","default")
for camera, device in system_cameras.items():
qr_combo.addItem(camera, device)
#combo.addItem("Manually specify a device", config.get("video_device"))
index = qr_combo.findData(self.config.get("video_device"))
qr_combo.setCurrentIndex(index)
qr_label = QLabel(_('Video Device') + ':')
qr_combo.setEnabled(qrscanner.zbar is not None)
qr_help = HelpButton(_("Install the zbar package to enable this.\nOn linux, type: 'apt-get install python-zbar'"))
on_video_device = lambda x: self.config.set_key("video_device", str(qr_combo.itemData(x).toString()), True)
qr_combo.currentIndexChanged.connect(on_video_device)
widgets.append((qr_label, qr_combo, qr_help))
usechange_cb = QCheckBox(_('Use change addresses'))
usechange_cb.setChecked(self.wallet.use_change)
usechange_help = HelpButton(_('Using change addresses makes it more difficult for other people to track your transactions.'))
if not self.config.is_modifiable('use_change'): usechange_cb.setEnabled(False)
def on_usechange(x):
usechange_result = x == Qt.Checked
if self.wallet.use_change != usechange_result:
self.wallet.use_change = usechange_result
self.wallet.storage.put('use_change', self.wallet.use_change)
usechange_cb.stateChanged.connect(on_usechange)
widgets.append((usechange_cb, None, usechange_help))
showtx_cb = QCheckBox(_('Show transaction before broadcast'))
showtx_cb.setChecked(self.config.get('show_before_broadcast', False))
showtx_cb.stateChanged.connect(lambda x: self.config.set_key('show_before_broadcast', showtx_cb.isChecked()))
showtx_help = HelpButton(_('Display the details of your transactions before broadcasting it.'))
widgets.append((showtx_cb, None, showtx_help))
can_edit_fees_cb = QCheckBox(_('Set transaction fees manually'))
can_edit_fees_cb.setChecked(self.config.get('can_edit_fees', False))
def on_editfees(x):
self.config.set_key('can_edit_fees', x == Qt.Checked)
self.update_fee_edit()
can_edit_fees_cb.stateChanged.connect(on_editfees)
can_edit_fees_help = HelpButton(_('This option lets you edit fees in the send tab.'))
widgets.append((can_edit_fees_cb, None, can_edit_fees_help))
for a,b,c in widgets:
i = grid.rowCount()
if b:
grid.addWidget(a, i, 0)
grid.addWidget(b, i, 1)
else:
grid.addWidget(a, i, 0, 1, 2)
grid.addWidget(c, i, 2)
vbox.addLayout(grid)
vbox.addStretch(1)
vbox.addLayout(Buttons(CloseButton(d)))
d.setLayout(vbox)
# run the dialog
d.exec_()
run_hook('close_settings_dialog')
if self.need_restart:
QMessageBox.warning(self, _('Success'), _('Please restart Electrum to activate the new GUI settings'), _('OK'))
def run_network_dialog(self):
if not self.network:
QMessageBox.warning(self, _('Offline'), _('You are using Electrum in offline mode.\nRestart Electrum if you want to get connected.'), _('OK'))
return
NetworkDialog(self.wallet.network, self.config, self).do_exec()
def closeEvent(self, event):
self.config.set_key("is_maximized", self.isMaximized())
if not self.isMaximized():
g = self.geometry()
self.config.set_key("winpos-qt", [g.left(),g.top(),g.width(),g.height()])
self.config.set_key("console-history", self.console.history[-50:], True)
self.wallet.storage.put('accounts_expanded', self.accounts_expanded)
event.accept()
def plugins_dialog(self):
from electrum_rby.plugins import plugins, descriptions, is_available, loader
self.pluginsdialog = d = QDialog(self)
d.setWindowTitle(_('Electrum Plugins'))
d.setModal(1)
vbox = QVBoxLayout(d)
# plugins
scroll = QScrollArea()
scroll.setEnabled(True)
scroll.setWidgetResizable(True)
scroll.setMinimumSize(400,250)
vbox.addWidget(scroll)
w = QWidget()
scroll.setWidget(w)
w.setMinimumHeight(len(plugins)*35)
grid = QGridLayout()
grid.setColumnStretch(0,1)
w.setLayout(grid)
def do_toggle(cb, name, w):
p = plugins.get(name)
if p:
p.disable()
p.close()
plugins.pop(name)
else:
module = loader(name)
plugins[name] = p = module.Plugin(self.config, name)
p.enable()
p.wallet = self.wallet
p.load_wallet(self.wallet)
p.init_qt(self.gui_object)
r = p.is_enabled()
cb.setChecked(r)
if w: w.setEnabled(r)
def mk_toggle(cb, name, w):
return lambda: do_toggle(cb, name, w)
for i, descr in enumerate(descriptions):
name = descr['name']
p = plugins.get(name)
try:
cb = QCheckBox(descr['fullname'])
cb.setEnabled(is_available(name, self.wallet))
cb.setChecked(p is not None)
grid.addWidget(cb, i, 0)
if p and p.requires_settings():
w = p.settings_widget(self)
w.setEnabled(p.is_enabled())
grid.addWidget(w, i, 1)
else:
w = None
cb.clicked.connect(mk_toggle(cb, name, w))
grid.addWidget(HelpButton(descr['description']), i, 2)
except Exception:
print_msg("Error: cannot display plugin", name)
traceback.print_exc(file=sys.stdout)
grid.setRowStretch(i+1,1)
vbox.addLayout(Buttons(CloseButton(d)))
d.exec_()
def show_account_details(self, k):
account = self.wallet.accounts[k]
d = QDialog(self)
d.setWindowTitle(_('Account Details'))
d.setModal(1)
vbox = QVBoxLayout(d)
name = self.wallet.get_account_name(k)
label = QLabel('Name: ' + name)
vbox.addWidget(label)
vbox.addWidget(QLabel(_('Address type') + ': ' + account.get_type()))
vbox.addWidget(QLabel(_('Derivation') + ': ' + k))
vbox.addWidget(QLabel(_('Master Public Key:')))
text = QTextEdit()
text.setReadOnly(True)
text.setMaximumHeight(170)
vbox.addWidget(text)
mpk_text = '\n'.join( account.get_master_pubkeys() )
text.setText(mpk_text)
vbox.addLayout(Buttons(CloseButton(d)))
d.exec_()
@protected
def create_csr(self, alias, challenge, password):
from electrum_rby import x509
import tlslite
xprv = self.wallet.get_master_private_key(self.wallet.root_name, password)
_, _, _, c, k = bitcoin.deserialize_xkey(xprv)
csr = x509.create_csr(alias, challenge, k)
csr = tlslite.utils.pem.pem(bytearray(csr), "CERTIFICATE REQUEST")
with open('test.csr', 'w') as f:
f.write(csr)
#os.system('openssl asn1parse -i -in test.csr')
return 'test.csr'
| gpl-3.0 | 5,672,153,572,525,321,000 | 39.039797 | 454 | 0.585696 | false |
binary-signal/mass-apk-installer | mass_apk/helpers.py | 1 | 1688 | """Mass apk helper functions module."""
import functools
import logging
import os
import platform
from enum import Enum, unique
from timeit import default_timer as timer
__all__ = ["Platform", "detect_platform", "human_time", "elapsed_time", "MB"]
log = logging.getLogger(__name__)
MB = 1024 * 1024
@unique
class Platform(Enum):
"""Platform enum used to detected running operating system."""
OSX = "osx"
LINUX = "linux"
WIN = "win"
def detect_platform() -> Platform:
"""Detect running operating system.
raises RuntimeError if operating system can't be detected.
"""
detected_system = platform.system()
if os.name == "posix" and detected_system == "Darwin":
return Platform.OSX
elif os.name == "posix" and detected_system == "Linux":
return Platform.LINUX
elif os.name == "nt" and detected_system == "Windows":
return Platform.WIN
raise RuntimeError("Unsupported OS")
def human_time(start: float, end: float) -> str:
"""Create a human readable string.
Create a human readable string for elapsed time between
start and end timestamps.
"""
hours, rem = divmod(end - start, 3600)
minutes, seconds = divmod(rem, 60)
return "Elapsed time {:0>2}:{:0>2}:{:05.2f}".format(
int(hours), int(minutes), seconds
)
def elapsed_time(func):
"""Decorate function `func` to measure its execution time."""
@functools.wraps(func)
def wrapper(*args, **kwargs):
start = timer()
result = func(*args, **kwargs)
end = timer()
log.debug("%s elapsed time: %s", func.__name__, human_time(start, end))
return result
return wrapper
| bsd-3-clause | 1,378,833,015,522,749,400 | 24.19403 | 79 | 0.637441 | false |
emosenkis/svndumpmultitool | svndumpmultitool/test_utils.py | 1 | 5261 | # Copyright 2013 Google Inc. All Rights Reserved.
#
# Use of this source code is governed by a MIT-style
# license that can be found in the LICENSE file or at
# http://opensource.org/licenses/MIT
"""Utilities for use in tests."""
from __future__ import absolute_import
import contextlib
import io
import subprocess
class MockPopen(object):
"""Mock class for replacing subprocess.Popen.
Example:
@mock.patch('svndumpmultitool.subprocess.Popen', new=MockPopen)
def testDeleteEverything(self):
with MockPopen.ExpectCommands({
'cmd': ('rm', '-rf', '/'),
'stdout': 'Now erasing your files...done'
}):
mymodule.DeleteEverything()
"""
_expected_commands = None
_finished_commands = None
_in_test = False
def __init__(self, cmd, stdout=None, stderr=None,
bufsize=None):
"""Called when code under test instantiates Popen."""
# Make sure 'with MockPopen.ExpectCommands():' is being used.
assert self._in_test, ('MockPopen instantiated without '
"'with MockPopen.ExpectCommands():'")
try:
# Find out what command is expected next and how it should behave.
expected = self._expected_commands.pop()
except IndexError:
raise AssertionError('No commands expected but got %s' % cmd)
assert cmd == expected['cmd'], ('Expected command %s, got %s'
% (expected['cmd'], cmd))
self._cmd = expected['cmd']
# If Popen was called with stdout or stderr set to subprocess.PIPE, the code
# expects to read from the subprocess's stdout or stderr. MockPopen provides
# these by wrapping the strings provided by the test with BytesIO.
if stdout is subprocess.PIPE:
try:
self.stdout = self._AssertingStream(expected['stdout'])
except IndexError:
raise AssertionError('Popen was called with stdout=subprocess.PIPE but'
' test did not provide stdout')
else:
self.stdout = None
assert 'stdout' not in expected, ('Test provided stdout but Popen was'
' called without'
' stdout=subprocess.PIPE')
if stderr is subprocess.PIPE:
try:
self.stderr = self._AssertingStream(expected['stderr'])
except IndexError:
raise AssertionError('Popen was called with stderr=subprocess.PIPE but'
' test did not provide stderr')
else:
self.stderr = None
assert 'stderr' not in expected, ('Test provided stderr but Popen was'
' called without'
' stderr=subprocess.PIPE')
self._returncode = expected.get('returncode', 0)
self._finished_commands.append(self)
@classmethod
@contextlib.contextmanager
def ExpectCommands(cls, *cmds):
"""Run test code, mocking out use of subprocess.Popen with MockPopen.
Args:
*cmds: the commands expected by the test to be run, in the forms of dicts
with the following fields:
'cmd': the exact command Popen will be called with
'stdout': a string that will be converted to a stream and returned
as the stdout of the subprocess
'stderr': like stdout
'returncode': the code that the subprocess will return when wait()
is called
stdout and stderr must be provided if and only if the code being
tested requests them to be piped by passing subprocess.PIPE as the
stdout and/or stderr keyword arguments to Popen.
Yields:
nothing; yield is used by the contextmanager decorator to turn a function
into a context manager.
See MockPopen for an example.
"""
assert not cls._in_test, 'MockPopen.ExpectCommands can not be nested.'
cls._expected_commands = list(reversed(cmds))
cls._finished_commands = []
cls._in_test = True
try:
yield # Test runs here
cls._AssertAllCommandsWereRunProperly()
finally:
cls._in_test = False
cls._expected_commands = None
cls._finished_commands = None
def wait(self):
return self._returncode
@classmethod
def _AssertAllCommandsWereRunProperly(cls):
"""Asserts that must be run after the test code.
Verifies that each command's output streams were closed and all expected
commands were run.
"""
# stdout and stderr must be closed
for cmd in cls._finished_commands:
assert not cmd.stdout or cmd.stdout.closed, ('stdout was not closed for'
' command: %s' % (cmd._cmd,))
assert not cmd.stderr or cmd.stderr.closed, ('stderr was not closed for'
' command: %s' % (cmd._cmd,))
# all commands expected must have been run
leftovers = [cmd['cmd'] for cmd in cls._expected_commands]
assert not leftovers, 'Expected command(s) never executed: %s' % leftovers
class _AssertingStream(io.BytesIO):
def close(self):
assert not self.read(), 'All stream output must be read before close()'
io.BytesIO.close(self)
| mit | 7,000,763,496,029,865,000 | 36.578571 | 80 | 0.618133 | false |
optimamodel/Optima | tests/testproject.py | 1 | 5062 | #!/usr/bin/env python
"""
Test script to see if Optima works.
To use: comment out lines in the definition of 'tests' to not run those tests.
NOTE: for best results, run in interactive mode, e.g.
python -i tests.py
Version: 2016feb03 by cliffk
"""
## Define tests to run here!!!
tests = [
'makeproject',
'parametercheck',
#'resultsaddition',
#'saveload',
'loadspreadsheet',
#'loadeconomics',
'runsim'
]
##############################################################################
## Initialization -- same for every test script
##############################################################################
from optima import tic, toc, blank, pd # analysis:ignore
if 'doplot' not in locals(): doplot = True
def done(t=0):
print('Done.')
toc(t)
blank()
blank()
print('Running tests:')
for i,test in enumerate(tests): print(('%i. '+test) % (i+1))
blank()
##############################################################################
## The tests
##############################################################################
T = tic()
## Project creation test
if 'makeproject' in tests:
t = tic()
print('Running make project test...')
from optima import Project
P = Project()
print(P)
done(t)
if 'parametercheck' in tests:
from optima import defaultproject, OptimaException
t = tic()
print('Running parameters check test...')
P = defaultproject()
datakeys = P.data.keys()
parkeys = P.pars().keys()
dataonly = set([
'condomcas', 'condomcom', 'condomreg',
'hivprev', 'meta', 'npops',
'numactscas', 'numactscom', 'numactsinj', 'numactsreg',
'optdeath', 'optnewtreat', 'optnumdiag', 'optnuminfect', 'optnumtest', 'optplhiv', 'optprev','optpropdx','optpropcare','optproptx','optpropsupp','optproppmtct',
'partcas', 'partcom', 'partinj', 'partreg',
'pops', 'pships', 'years'])
parsonly = set([
'actscas', 'actscom', 'actsinj', 'actsreg', 'age', 'transnorm',
'condcas', 'condcom', 'condreg', 'numcirc',
'female', 'force', 'inhomo', 'initprev','hivdeath',
'propdx','propcare','proptx','propsupp','proppmtct',
'injects', 'male', 'popkeys', 'fromto', 'transmatrix',
'fixproppmtct', 'fixpropsupp', 'fixpropdx', 'fixpropcare', 'fixproptx'])
dataminuspars = set(datakeys) - set(parkeys)
parsminusdata = set(parkeys) - set(datakeys)
if dataminuspars != dataonly:
mismatch1 = list(dataonly - dataminuspars)
mismatch2 = list(dataminuspars - dataonly)
errormsg = 'Unexpected "dataminuspars" parameter in "%s" or "%s"' % (mismatch1, mismatch2)
raise OptimaException(errormsg)
if parsminusdata != parsonly:
mismatch1 = list(parsonly - parsminusdata)
mismatch2 = list(parsminusdata - parsonly)
errormsg = 'Unexpected "parsminusdata" parameter in "%s" or "%s"' % (mismatch1, mismatch2)
raise OptimaException(errormsg)
done(t)
## Adding results
if 'resultsaddition' in tests:
t = tic()
print('Running results addition test...')
import optima as op
P = op.defaultproject()
Q = op.defaultproject()
R1 = P.results[0]
R2 = Q.results[0]
R3 = R1+R2
if doplot:
multires = op.Multiresultset([R1,R3])
op.pygui(multires, toplot=['prev-tot','numplhiv-tot'])
done(t)
## Project save/load test
if 'saveload' in tests:
t = tic()
print('Running save/load test...')
from optima import Project, saveobj, loadproj
from os import remove
filename = 'testproject.prj'
print(' Checking saving...')
P = Project()
saveobj(filename, P)
print(' Checking loading...')
Q = loadproj(filename)
print('Cleaning up...')
remove(filename)
done(t)
## Load spreadsheet test
if 'loadspreadsheet' in tests:
t = tic()
print('Running loadspreadsheet test...')
from optima import Project
print(' Create a project from a spreadsheet')
P = Project(spreadsheet='simple.xlsx')
print(' Load a project, then load a spreadsheet')
Q = Project()
Q.loadspreadsheet('simple.xlsx')
assert Q.data['effcondom'][0]==0.95, 'Condom efficacy not 95% or not being read in properly'
done(t)
## Load economics spreadsheet test
if 'loadeconomics' in tests:
t = tic()
print('Running loadeconomics test...')
from optima import Project
print(' Create an empty project and add economic data')
P = Project()
P.loadeconomics(filename='testeconomics.xlsx')
print(' Create a project from a spreadsheet and add economic data')
P = Project(spreadsheet='simple.xlsx')
P.loadeconomics(filename='testeconomics.xlsx')
## Run simulation test
if 'runsim' in tests:
t = tic()
print('Running runsim test...')
from optima import Project
P = Project()
P.loadspreadsheet('simple.xlsx',dorun=True)
done(t)
print('\n\n\nDONE: ran %i tests' % len(tests))
toc(T)
| lgpl-3.0 | 5,239,596,296,055,542,000 | 21.801802 | 164 | 0.585539 | false |
ylzmax/vncmanager | site1/settings.py | 1 | 2938 | """
Django settings for site1 project.
Generated by 'django-admin startproject' using Django 1.8.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '!ctts*k9tll(!m7v)tp1#!z-_xl*agj(1%%y6!(z*lg!66k-7-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django_crontab',
'vnc',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
#'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'site1.urls'
#BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR,"templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'site1.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = '/tmp/static'
STATICFILES_DIRS=(
"%s/%s"%(BASE_DIR,"static"),
)
CRONJOBS = [
('*/15 * * * *','vnc.t.updateserver', '>> /tmp/c.log 2>&1'),
]
| gpl-3.0 | 8,641,891,417,207,550,000 | 25 | 71 | 0.677672 | false |
mishka28/NYU-Python | intro-programming/final_project/binomial_tree.py | 1 | 1850 | #!/usr/bin/env python3
from datetime import date
import math
import numpy as np
import sys
today = date(2015, 11, 20)
expiry = date(2017, 1, 20)
#Global inputs
s = 138.65
k = 133
t = (expiry - today).days / 365
r = 0.0106161830925
sig = 0.22545538993
div = .027000
def tree(steps, flavor):
Dt = t / steps
r_Dt = r * Dt
exp_r_Dt = math.exp(r_Dt)
exp_neg_r_Dt = math.exp(-r_Dt)
exp_div_Dt = math.exp(div * Dt)
exp_neg_div_Dt = math.exp(-div * Dt)
sig_sqrt_Dt = sig * math.sqrt(Dt)
exp_sig_sqrt_Dt = math.exp(sig_sqrt_Dt)
u = exp_sig_sqrt_Dt
d = 1/u
q = (exp_r_Dt * exp_neg_div_Dt - d) / (u -d)
p = 1 - q
sv = np.zeros((steps + 1,steps +1), dtype = np.float64)
sv[0,0] = s
z1 = 0
for i in range(1,steps + 1, 1):
z1 = z1 + 1
for n in range(z1 + 1):
sv[n,i] = sv[0,0] * (u ** (i - n)) * (d ** n)
iv = np.zeros((steps + 1,steps +1), dtype = np.float64)
z2 = 0
for i in range(1,steps + 1, 1):
find = False
for n in range(z2 + 2):
if flavor == "C":
iv[n,i] = max(sv[n,i] - k, 0)
elif flavor == "P":
iv[n,i] = max(-1 * (sv[n,i] - k), 0)
else:
print("fravor has to be 'C' for call and 'P' for put")
find = True
break
if find:
break
z2 = z2 + 1
# print(iv)
pv = np.zeros((steps + 1,steps +1), dtype = np.float64)
pv[:, steps] = iv[:, steps]
z3 = steps + 1
for i in range(steps -1, -1, -1):
z3 = z3 - 1
for n in range(z3):
pv[n,i] = (q * pv[n, i + 1] + p * pv[n + 1, i + 1]) * exp_neg_r_Dt
return(pv[0,0])
if __name__ == "__main__":
steps = int(sys.argv[1])
flavor = (sys.argv[2])
print(tree(steps,flavor))
| mit | -1,405,318,716,439,187,000 | 22.125 | 78 | 0.471892 | false |
lukius/mts | set1/challenge8.py | 1 | 1567 | from common.challenge import MatasanoChallenge
from common.tools.misc import FileLines
class ECBEncryptionFinder(object):
BLOCK_SIZE = 16
def __init__(self, hex_strings):
self.hex_strings = hex_strings
def _less_than(self, number1, number2):
return number2 is None or number1 < number2
def _build_block_set(self, hex_string):
return set(hex_string[i:i+2*self.BLOCK_SIZE]
for i in range(0, len(hex_string), 2*self.BLOCK_SIZE))
def value(self):
min_blocks = None
for hex_string in self.hex_strings:
block_set = self._build_block_set(hex_string)
size = len(block_set)
if self._less_than(size, min_blocks):
candidate_string = hex_string
min_blocks = len(block_set)
return candidate_string
class Set1Challenge08(MatasanoChallenge):
FILE = 'set1/data/8.txt'
def expected_value(self):
return 'd880619740a8a19b7840a8a31c810a3d08649af70dc06f4fd5d2d69c744' +\
'cd283e2dd052f6b641dbf9d11b0348542bb5708649af70dc06f4fd5d2d6' +\
'9c744cd2839475c9dfdbc1d46597949d9c7e82bf5a08649af70dc06f4fd' +\
'5d2d69c744cd28397a93eab8d6aecd566489154789a6b0308649af70dc0' +\
'6f4fd5d2d69c744cd283d403180c98c8f6db1f2a3f9c4040deb0ab51b29' +\
'933f2c123c58386b06fba186a'
def value(self):
hex_strings = FileLines(self.FILE).value()
return ECBEncryptionFinder(hex_strings).value() | mit | 7,581,138,453,766,070,000 | 34.636364 | 79 | 0.640715 | false |
DaveA50/lbry | lbrynet/lbrylive/StdinUploader.py | 1 | 5224 | # pylint: skip-file
# This file is not maintained, but might be used in the future
#
import logging
import sys
from lbrynet.lbrylive.LiveStreamCreator import StdOutLiveStreamCreator
from lbrynet.core.BlobManager import TempBlobManager
from lbrynet.core.Session import Session
from lbrynet.core.server.BlobAvailabilityHandler import BlobAvailabilityHandlerFactory
from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory
from lbrynet.core.server.ServerProtocol import ServerProtocolFactory
from lbrynet.lbrylive.PaymentRateManager import BaseLiveStreamPaymentRateManager
from lbrynet.lbrylive.LiveStreamMetadataManager import DBLiveStreamMetadataManager
from lbrynet.lbrylive.server.LiveBlobInfoQueryHandler import CryptBlobInfoQueryHandlerFactory
from lbrynet.dht.node import Node
from twisted.internet import defer, task
class StdinUploader():
"""This class reads from standard in, creates a stream, and makes it available on the network."""
def __init__(self, peer_port, dht_node_port, known_dht_nodes,
stream_info_manager_class=DBLiveStreamMetadataManager, blob_manager_class=TempBlobManager):
"""
@param peer_port: the network port on which to listen for peers
@param dht_node_port: the network port on which to listen for nodes in the DHT
@param known_dht_nodes: a list of (ip_address, dht_port) which will be used to join the DHT network
"""
self.peer_port = peer_port
self.lbry_server_port = None
self.session = Session(blob_manager_class=blob_manager_class,
stream_info_manager_class=stream_info_manager_class,
dht_node_class=Node, dht_node_port=dht_node_port,
known_dht_nodes=known_dht_nodes, peer_port=self.peer_port,
use_upnp=False)
self.payment_rate_manager = BaseLiveStreamPaymentRateManager()
def start(self):
"""Initialize the session and start listening on the peer port"""
d = self.session.setup()
d.addCallback(lambda _: self._start())
return d
def _start(self):
self._start_server()
return True
def _start_server(self):
query_handler_factories = [
CryptBlobInfoQueryHandlerFactory(self.stream_info_manager, self.session.wallet,
self.payment_rate_manager),
BlobAvailabilityHandlerFactory(self.session.blob_manager),
BlobRequestHandlerFactory(self.session.blob_manager, self.session.wallet,
self.payment_rate_manager),
self.session.wallet.get_wallet_info_query_handler_factory()
]
self.server_factory = ServerProtocolFactory(self.session.rate_limiter,
query_handler_factories,
self.session.peer_manager)
from twisted.internet import reactor
self.lbry_server_port = reactor.listenTCP(self.peer_port, self.server_factory)
def start_live_stream(self, stream_name):
"""Create the stream and start reading from stdin
@param stream_name: a string, the suggested name of this stream
"""
stream_creator_helper = StdOutLiveStreamCreator(stream_name, self.session.blob_manager,
self.stream_info_manager)
d = stream_creator_helper.create_and_publish_stream_descriptor()
def print_sd_hash(sd_hash):
print "Stream descriptor hash:", sd_hash
d.addCallback(print_sd_hash)
d.addCallback(lambda _: stream_creator_helper.start_streaming())
return d
def shut_down(self):
"""End the session and stop listening on the server port"""
d = self.session.shut_down()
d.addCallback(lambda _: self._shut_down())
return d
def _shut_down(self):
if self.lbry_server_port is not None:
d = defer.maybeDeferred(self.lbry_server_port.stopListening)
else:
d = defer.succeed(True)
return d
def launch_stdin_uploader():
from twisted.internet import reactor
logging.basicConfig(level=logging.WARNING, filename="ul.log")
if len(sys.argv) == 4:
uploader = StdinUploader(int(sys.argv[2]), int(sys.argv[3]), [])
elif len(sys.argv) == 6:
uploader = StdinUploader(int(sys.argv[2]), int(sys.argv[3]), [(sys.argv[4], int(sys.argv[5]))])
else:
print "Usage: lbrynet-stdin-uploader <stream_name> <peer_port> <dht_node_port>" \
" [<dht_bootstrap_host> <dht_bootstrap port>]"
sys.exit(1)
def start_stdin_uploader():
return uploader.start_live_stream(sys.argv[1])
def shut_down():
logging.debug("Telling the reactor to stop in 60 seconds")
reactor.callLater(60, reactor.stop)
d = task.deferLater(reactor, 0, uploader.start)
d.addCallback(lambda _: start_stdin_uploader())
d.addCallback(lambda _: shut_down())
reactor.addSystemEventTrigger('before', 'shutdown', uploader.shut_down)
reactor.run()
| mit | -3,413,635,974,544,798,000 | 42.173554 | 108 | 0.647779 | false |
Ecpy/ecpy_hqc_legacy | tests/tasks/tasks/instr/test_apply_mag_field_task.py | 1 | 3891 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2015-2018 by ExopyHqcLegacy Authors, see AUTHORS for more details.
#
# Distributed under the terms of the BSD license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
"""Tests for the ApplyMagFieldTask
"""
from multiprocessing import Event
import pytest
import enaml
from exopy.tasks.api import RootTask
from exopy.tasks.tasks.logic.loop_task import LoopTask
from exopy.testing.util import show_and_close_widget
from exopy_hqc_legacy.tasks.tasks.instr.apply_mag_field_task\
import ApplyMagFieldTask
with enaml.imports():
from exopy.tasks.tasks.logic.views.loop_view import LoopView
from exopy_hqc_legacy.tasks.tasks.instr.views.apply_mag_field_view\
import ApplyMagFieldView
from .instr_helper import (InstrHelper, InstrHelperStarter, DummyJob,
PROFILES, DRIVERS)
class TestApplyMagFieldTask(object):
def setup(self):
self.root = RootTask(should_stop=Event(), should_pause=Event())
self.task = ApplyMagFieldTask(name='Test',
parallel={'activated': False})
self.root.add_child_task(0, self.task)
self.root.run_time[DRIVERS] = {'Test': (InstrHelper,
InstrHelperStarter())}
self.root.run_time[PROFILES] =\
{'Test1':
{'connections': {'C': {'owner': [],
'output_fluctuations': 1e-6,
'heater_state': []}},
'settings': {'S': {'sweep_to_field': [DummyJob(), DummyJob(),
DummyJob()],
'sweep_to_persistent_field': [DummyJob()],
'read_persistent_field': [1],
'check_connection': [True]}}
}
}
# This is set simply to make sure the test of InstrTask pass.
self.task.selected_instrument = ('Test1', 'Test', 'C', 'S')
def test_check1(self):
"""Simply test that everything is ok if field can be evaluated.
"""
self.task.field = '3.0'
test, traceback = self.task.check(test_instr=True)
assert test
assert not traceback
assert self.task.get_from_database('Test_field') == 3.0
def test_check2(self):
"""Check handling a wrong field.
"""
self.task.field = '*1.0*'
test, traceback = self.task.check(test_instr=True)
assert not test
assert len(traceback) == 1
assert 'root/Test-field'in traceback
assert self.task.get_from_database('Test_field') == 0.01
def test_perform1(self):
"""Simple test when everything is right.
"""
self.task.field = '2.0'
self.root.prepare()
self.task.perform()
assert self.root.get_from_database('Test_field') == 2.0
@pytest.mark.ui
def test_apply_mag_field_view1(exopy_qtbot, root_view, task_workbench):
"""Test ApplyMagFieldView widget outisde of a LoopTask.
"""
task = ApplyMagFieldTask(name='Test')
root_view.task.add_child_task(0, task)
show_and_close_widget(exopy_qtbot, ApplyMagFieldView(task=task, root=root_view))
@pytest.mark.ui
def test_apply_mag_field_view2(exopy_qtbot, root_view, task_workbench):
"""Test ApplyMagFieldView widget inside of a LoopTask.
"""
task = ApplyMagFieldTask(name='Test')
loop = LoopTask(name='r', task=task)
root_view.task.add_child_task(0, loop)
# XXX check for absence of target field
show_and_close_widget(exopy_qtbot, LoopView(task=loop, root=root_view))
| bsd-3-clause | -5,284,341,737,816,724,000 | 33.433628 | 84 | 0.56772 | false |
dukekuang/thinkpython2e-solution | chapter 7/7.3.py | 1 | 1155 | # -*- coding: utf-8 -*-
"""
Created on Sat Mar 18 13:28:29 2017
@author: dukekuang
"""
#Exercise 7.3. The mathematician Srinivasa Ramanujan
#found an infinite series that can be used to
#generate a numerical approximation of 1/p:
#Write a function called estimate_pi that uses this formula to compute
#and return an estimate of p.
#It should use a while loop to compute terms of the summation until
#the last term is smaller than 1e-15 (which is Python notation for 1015).
#You can check the result by comparing it to math.pi.
#Author Solution: http: // thinkpython2. com/ code/ pi. py .
import math
def estimate_pi():
factor=2*math.sqrt(2)/9801
k=0
total=0
while True:
num=math.factorial(4*k)*(1103+26390*k)
den=((math.factorial(k)**4)*(396**(4*k)))
term=num/den
total += term
if term < 1e-15 :
break
print('term',k,' ',term)
k += 1
print('last term ',term)
print('total ',total)
pi = 1/(factor*total)
print('1/pi ',factor*total)
print('estimate pi ',pi,'\t','math.pi ',math.pi)
estimate_pi() | gpl-3.0 | 634,642,464,086,977,000 | 26.85 | 75 | 0.611979 | false |
mdda/relationships-from-entity-stream | sort_of_clevr_generator.py | 1 | 14041 | import cv2
import os
import numpy as np
import random
import pickle
import argparse
parser = argparse.ArgumentParser(description='PyTorch Relations-from-Stream sort-of-CLVR dataset builder')
parser.add_argument('--dir', type=str, default='./data',
help='Directory in which to store the dataset')
parser.add_argument('--seed', type=int, default=10, metavar='S',
help='random seed (default: 10)')
parser.add_argument('--add_tricky', action='store_true', default=True,
help='Add the tricky cases')
parser.add_argument('-f', type=str, default='', help='Fake for Jupyter notebook import')
args = parser.parse_args()
dirs = args.dir
random.seed(args.seed)
np.random.seed(args.seed)
train_size, test_size = 9800, 200
img_size, size = 75, 5 # Size of img total, radius of sprite
question_size = 11 ##6 for one-hot vector of color, 2 for question type, 3 for question subtype
"""Question:[r, g, b, o, k, y, q1, q2, s1, s2, s3]"""
# answer is returned as an integer index within the following:
"""Answer : [yes, no, rectangle, circle, r, g, b, o, k, y]"""
"""Answer : [yes, no, rectangle, circle, 1, 2, 3, 4, 5, 6]""" # for counting
nb_questions = 10 # questions generated about each image
colors = [
(0,0,255), ##r red
(0,255,0), ##g green
(255,0,0), ##b blue
(0,156,255), ##o orange
(128,128,128), ##k grey
(0,255,255) ##y yellow
]
colors_str = 'red green blue orange grey yellow'.split()
def center_generate(objects):
# Generates a set of centers that do not overlap
while True:
pas = True
center = np.random.randint(0+size, img_size - size, 2)
if len(objects) > 0:
for name,c,shape in objects:
if ((center - c) ** 2).sum() < ((size * 2) ** 2):
pas = False
if pas:
return center
image_number=0
def build_dataset(nb_questions=nb_questions):
global image_number
image_number+=1
print("image %6d" % (image_number,))
objects = []
img = np.ones((img_size,img_size,3)) * 255
for color_id, color in enumerate(colors):
center = center_generate(objects)
if random.random()<0.5:
start = (center[0]-size, center[1]-size)
end = (center[0]+size, center[1]+size)
cv2.rectangle(img, start, end, color, -1)
objects.append((color_id, center, 'r'))
else:
center_ = (center[0], center[1])
cv2.circle(img, center_, size, color, -1)
objects.append((color_id, center, 'c'))
"""Non-relational questions"""
norel_questions, norel_answers = [], []
for i in range(nb_questions):
question = np.zeros((question_size))
color = random.randint(0,5)
question[color] = 1
question[6] = 1
subtype = random.randint(0,2)
question[subtype+8] = 1
norel_questions.append(question)
"""Answer : [yes, no, rectangle, circle, r, g, b, o, k, y]"""
if subtype == 0:
"""query shape->rectangle/circle"""
answer = 2 if objects[color][2] == 'r' else 3
elif subtype == 1:
"""query horizontal position->yes/no"""
answer = 0 if objects[color][1][0] < img_size/2 else 1
elif subtype == 2:
"""query vertical position->yes/no"""
answer = 0 if objects[color][1][1] < img_size/2 else 1
norel_answers.append(answer)
"""Relational questions"""
birel_questions, birel_answers = [], []
for i in range(nb_questions):
question = np.zeros((question_size))
color = random.randint(0,5)
question[color] = 1
question[7] = 1
subtype = random.randint(0,2)
question[subtype+8] = 1
birel_questions.append(question)
if subtype == 0:
"""closest-to->rectangle/circle"""
my_obj = objects[color][1]
dist_list = [((my_obj - obj[1]) ** 2).sum() for obj in objects]
dist_list[dist_list.index(0)] = 999
closest = dist_list.index(min(dist_list))
answer = 2 if objects[closest][2] == 'r' else 3
elif subtype == 1:
"""furthest-from->rectangle/circle"""
my_obj = objects[color][1]
dist_list = [((my_obj - obj[1]) ** 2).sum() for obj in objects]
furthest = dist_list.index(max(dist_list))
answer = 2 if objects[furthest][2] == 'r' else 3
elif subtype == 2:
"""count->1~6"""
"""Answer : [yes, no, rectangle, circle, 1, 2, 3, 4, 5, 6]"""
my_obj = objects[color][2]
count = -1
for obj in objects:
if obj[2] == my_obj:
count +=1
answer = count+4
birel_answers.append(answer)
"""Tricky questions"""
trirel_questions, trirel_answers = [], []
for i in range(nb_questions):
question = np.zeros((question_size))
question[6] = 1 # Both 6 and 7 set
question[7] = 1 # Both 6 and 7 set
subtype = random.randint(0,2)
#subtype=2 # Fix for now
question[subtype+8] = 1
trirel_questions.append(question)
if subtype == 0:
"""How many things are colinear with 2 chosen colours?"""
min_dist = size*5.
while True:
arr = sorted( random.sample(range(0, 6), 2) ) # pick 2 distinct colours, sorted
arr_obj = [ objects[i][1] for i in arr ]
#print("Point 1 : ", colors_str[ objects[ arr[0] ][0] ])
#print("Point 2 : ", colors_str[ objects[ arr[1] ][0] ])
# Want distant to that line to be <shape_size
s1 = arr_obj[1]-arr_obj[0]
#s1_norm = s1 / np.linalg.norm( s1 )
if np.linalg.norm(s1)>min_dist:
break
min_dist *= 0.95 # Make sure it will happen eventually
print("min_dist -> ", min_dist)
for i in arr:question[i]=1
count = -2 # Exclude original things (so min==0) 0=='circle'
for obj in objects:
if np.linalg.norm( np.cross(arr_obj[1]-obj[1], s1) ) / np.linalg.norm( s1 ) < size*2.:
#print("Colinear : ", colors_str[ obj[0] ])
count +=1
answer = count+3
elif subtype == 1:
"""How many things are eqidistant from 2 chosen colours?"""
min_dist = size*5.
while True:
arr = sorted( random.sample(range(0, 6), 2) ) # pick 2 distinct colours, sorted
arr_obj = [ objects[i][1] for i in arr ]
s1 = arr_obj[1] - arr_obj[0]
if np.linalg.norm(s1)>min_dist:
break
min_dist *= 0.95 # Make sure it will happen eventually
print("min_dist -> ", min_dist)
for i in arr:question[i]=1
unit_v = s1 / np.linalg.norm(s1)
count = 0 # (min==0) 0=='circle'
for obj in objects:
proj = arr_obj[1] + np.dot( unit_v, obj[1]-arr_obj[1]) * unit_v
#d1, d2 = np.linalg.norm( arr_obj[1]-obj[1] ), np.linalg.norm( arr_obj[0]-obj[1] )
d1, d2 = np.linalg.norm( arr_obj[1]-proj ), np.linalg.norm( arr_obj[0]-proj )
#print(" Test %10s : %3.0f -%3.0f = %3.0f vs %3.0f" % ( colors_str[ obj[0] ], d1, d2, np.abs(d1-d2), size*2.,))
if np.abs( d1-d2 ) < size*2.:
#print("Equidistant : ", colors_str[ obj[0] ])
count +=1
answer = count+3
elif subtype == 2:
"""How many things are on clockwise side of line joining 2 chosen colours?"""
min_dist = size*5.
while True:
arr = sorted( random.sample(range(0, 6), 2) ) # pick 2 distinct colours, sorted
arr_obj = [ objects[i][1] for i in arr ]
s1 = arr_obj[1]-arr_obj[0]
if np.linalg.norm(s1)>min_dist:
break
min_dist *= 0.95 # Make sure it will happen eventually
print("min_dist -> ", min_dist)
for i in arr:question[i]=1
count = 0 # (min==0) 0=='circle'
for obj in objects:
if np.cross(arr_obj[1]-obj[1], s1) >0.0:
#print("Clockwise : ", colors_str[ obj[0] ])
count +=1
answer = count+3
elif subtype == -1:
"""three colours enclose 'big' area -> yes/no"""
arr = sorted( random.sample(range(0, 6), 3) ) # pick 3 distinct colours, sorted
arr_obj = [ objects[i][1] for i in arr ]
s1, s2 = arr_obj[1]-arr_obj[0], arr_obj[2]-arr_obj[0]
area = 0.5 * np.cross( s1, s2 )
#print("area = ", area)
#normed = np.abs(area) / np.linalg.norm(s1) / np.linalg.norm(s2)
#print("normed = ", normed)
for i in arr:question[i]=1
answer = 0 if np.abs(area)>img_size*img_size/15. else 1
elif subtype == -1:
"""three colours are ordered clockwise -> yes/no"""
iter=0
while True:
#print("clockwise")
arr = sorted( random.sample(range(0, 6), 3) ) # pick 3 distinct colours, sorted
arr_obj = [ objects[i][1] for i in arr ]
#for i in [0,1,2]:print( arr_obj[i] )
# Enclosed area : (http://code.activestate.com/recipes/576896-3-point-area-finder/)
# sign => direction of 'winding'
area = 0.5 * np.cross( arr_obj[1]-arr_obj[0], arr_obj[2]-arr_obj[0] )
#print("area=", area)
if np.abs(area)>img_size*img_size/(10.+iter): # Should not be near co-linear (make it easier...)
for i in arr:question[i]=1
answer = 0 if area>0. else 1
break
iter += 1
elif subtype == -3:
"""What shape is the most isolated -> rectangle/circle"""
iter=0
while True:
if iter>10: print("most isolated %d" % iter)
arr = sorted( random.sample(range(0, 6), 3) ) # pick 3 distinct colours, sorted
arr_obj = [ objects[i][1] for i in arr ]
#for i in [0,1,2]:print( arr_obj[i] )
(l0, l1, l2) = [ np.linalg.norm( arr_obj[i] - arr_obj[ (i+1) % 3 ] ) for i in [0,1,2] ]
#print( "(l0, l1, l2)", (l0, l1, l2))
a = 1. + 1./(1.+iter/10.) # Descends slowly to 1...
furthest=-1
# test : both connected > alpha*opposite
if l2>l1*a and l0>l1*a: furthest=0
if l0>l2*a and l1>l2*a: furthest=1
if l1>l0*a and l2>l0*a: furthest=2
if furthest>=0:
for i in arr:question[i]=1
furthest_o = objects[arr[furthest]]
#print( "objects[arr[furthest]]", colors[furthest_o[0]], furthest_o[2] )
answer = 2 if furthest_o[2] == 'r' else 3
break
iter += 1
trirel_answers.append(answer)
norelations = (norel_questions, norel_answers)
birelations = (birel_questions, birel_answers)
trirelations = (trirel_questions, trirel_answers)
img = img/255.
dataset = (img, norelations, birelations, trirelations)
return dataset
#"""Question:[r, g, b, o, k, y, q1, q2, s1, s2, s3]"""
# Answer is returned as an integer index within the following:
#"""Answer : [yes, no, rectangle, circle, r, g, b, o, k, y]"""
#"""Answer : [yes, no, rectangle, circle, 1, 2, 3, 4, 5, 6]""" # for counting
## Ideas for tougher questions :
# How many things are colinear with 2 chosen colours?
# How many things are eqidistant from 2 chosen colours?
# How many things are on clockwise side of line joining 2 chosen colours?
# For the 3 highlighted colours, are they a 'large' triangle (area)
# For the 3 highlighted colours, are they clockwise (in order)
# For the 3 highlighted colours, what is shape of most isolated one?
# For the 3 highlighted colours, do they enclose another object
# For the 3 highlighted colours, are they in a row? (any orientation - tricky to define)
# For the 2 highlighted colours, what shape is between them?
## Not so tough
# For the n highlighted colours, are they all the same shape?
# But two different => no. So don't have to think more than two deep...
# For the 3 highlighted colours, are they in a row? (horizontal or vertical)
# Can cheat by counting total in a row or column if orientated
## alternative within Jupyter notebook :
# import sort_of_clevr_generator
if __name__ == "__main__":
try:
os.makedirs(dirs)
except:
print('directory {} already exists'.format(dirs))
print('building test datasets...')
test_datasets = [build_dataset() for _ in range(test_size)]
print('building train datasets...')
train_datasets = [build_dataset() for _ in range(train_size)]
#img_count = 0
#cv2.imwrite(os.path.join(dirs,'{}.png'.format(img_count)), cv2.resize(train_datasets[0][0]*255, (512,512)))
print('saving datasets...')
filename = os.path.join(dirs,'sort-of-clevr++.pickle')
with open(filename, 'wb') as f:
pickle.dump((train_datasets, test_datasets), f)
print('datasets saved at {}'.format(filename))
| bsd-3-clause | 1,525,503,148,877,996,800 | 37.468493 | 129 | 0.517627 | false |
CptDemocracy/Python | MITx-6.00.1x-EDX-Introduction-to-Computer-Science/Week-4/PSET-4/game.py | 1 | 2366 | """
PSET-4
Word Game Part 9: You and your Computer
"""
PROMPT_STR = "Enter n to deal a new hand, r to replay the last hand, or e to end game: "
NO_REPL_AVAIL_STR = "You have not played a hand yet. Please play a new hand first!"
INVALID_CMD = "Invalid command."
CHOOSE_PLAYER_STR = "Enter u to have yourself play, c to have the computer play: "
def playGame(wordList):
"""
Allow the user to play an arbitrary number of hands.
1) Asks the user to input 'n' or 'r' or 'e'.
* If the user inputs 'e', immediately exit the game.
* If the user inputs anything that's not 'n', 'r', or 'e', keep asking them again.
2) Asks the user to input a 'u' or a 'c'.
* If the user inputs anything that's not 'c' or 'u', keep asking them again.
3) Switch functionality based on the above choices:
* If the user inputted 'n', play a new (random) hand.
* Else, if the user inputted 'r', play the last hand again.
But if no hand was played, output "You have not played a hand yet.
Please play a new hand first!"
* If the user inputted 'u', let the user play the game
with the selected hand, using playHand.
* If the user inputted 'c', let the computer play the
game with the selected hand, using compPlayHand.
4) After the computer or user has played the hand, repeat from step 1
wordList: list (string)
"""
firstGame = True
lastHand = {}
while True:
userInput = raw_input(PROMPT_STR)
if userInput == 'n':
hand = dealHand(HAND_SIZE)
lastHand = hand.copy()
playHand_AI_and_human(hand, wordList, HAND_SIZE)
elif userInput == 'r':
if len(lastHand) == 0:
print(NO_REPL_AVAIL_STR)
else:
playHand_AI_and_human(lastHand, wordList, HAND_SIZE)
elif userInput == 'e':
break
else:
print(INVALID_CMD)
print
def playHand_AI_and_human(hand, wordList, n):
userInput = ""
while userInput != 'u' and userInput != 'c':
userInput = raw_input(CHOOSE_PLAYER_STR)
if userInput == 'u':
playHand(hand, wordList, n)
elif userInput == 'c':
compPlayHand(hand, wordList, n)
else:
print(INVALID_CMD)
| mit | -341,466,025,357,899,300 | 34.848485 | 90 | 0.587067 | false |
lgfausak/sqlbridge | sqlbridge/twisted/db/ausqlite3.py | 1 | 6540 | ###############################################################################
##
## Copyright (C) 2014 Greg Fausak
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
###############################################################################
## ausqlite3.py - sqlite3 driver
##
## this is driver interface code. It is used with the DB class. It shouldn't
## be called or instantiated independent of that class.
###############################################################################
from __future__ import absolute_import
import sys,os
import sqlite3
from twisted.enterprise import adbapi
from twisted.python import log
from twisted.internet.defer import inlineCallbacks, returnValue
from .dbbase import dbbase
def dict_factory(cursor, row):
d = {}
for idx, col in enumerate(cursor.description):
d[col[0]] = row[idx]
return d
def set_dict_factory(conn):
conn.row_factory = dict_factory
class SQLITE3_3_8_2(dbbase):
"""
basic sqlite3 3.8.2 driver
"""
def __init__(self, topic_base, app_session, debug):
if debug is not None and debug:
log.startLogging(sys.stdout)
log.msg("SQLITE3_3_8_2:__init__()")
self.engine_version = "SQLITE3_3_8_2"
self.engine = "SQLITE"
self.conn = None
self.dsn = None
self.topic_base = topic_base
self.app_session = app_session
self.debug = debug
return
#
# connect
# simply connect to a database
# dsn is the only argument, it is a string, in psycopg2 connect
# format. basically it looks like
# dbname=DBNAME host=MACHINE user=DBUSER
# DBNAME is the database name
# MACHINE is the ip address or dns name of the machine
# DBUSER is the user to connect as
#
def connect(self,*args,**kwargs):
log.msg("SQLITE3_3_8_2:connect({},{})".format(args,kwargs))
self.dsn = args[0]
# there must be an easier way.
# this converts db=x host=y shatever=z to a dictionary.
try:
md = dict(s.split('=') for s in self.dsn.split())
md['cp_openfun'] = set_dict_factory
#self.conn = adbapi.ConnectionPool("sqlite3",**dict(s.split('=') for s in self.dsn.split()))
self.conn = adbapi.ConnectionPool("sqlite3",**dict(md))
log.msg("SQLITE3_3_8_2:connect() established")
except Exception as err:
log.msg("SQLITE3_3_8_2:connect({}),error({})".format(self.dsn,err))
raise err
return
#
# disconnect
# this disconnects from the currently connected database. if no database
# is currently connected then this does nothing.
def disconnect(self,*args,**kwargs):
log.msg("SQLITE3_3_8_2:disconnect({},{})".format(args,kwargs))
if self.conn:
c = self.conn
self.conn = None
c.close()
return
#
# query:
# s - query to run (with dictionary substitution embedded, like %(key)s
# a - dictionary pointing to arguments.
# example:
# s = 'select * from login where id = %(id)s'
# a = { 'id': 100 }
# returns:
# dictionary result of query
# note:
# there MUST be a result, otherwise use the operation call!
# well, the query can return 0 rows, that is ok. but, if the query
# by its nature doesn't return any rows then don't use this call!
# for example, a query that says 'insert into table x (c) values(r)'
# by its nature it doesn't return a row, so, this isn't the right
# method to use, use operation instead
#
@inlineCallbacks
def query(self,*args,**kwargs):
log.msg("SQLITE3_3_8_2:query({},{})".format(args,kwargs))
s = args[0]
a = args[1]
if self.conn:
try:
log.msg("SQLITE3_3_8_2:query().running({} with args {})".format(s,a))
rv = yield self.conn.runQuery(s,a)
log.msg("SQLITE3_3_8_2:query().results({})".format(rv))
returnValue(rv)
except Exception as err:
log.msg("SQLITE3_3_8_2:query({}),error({})".format(s,err))
raise err
# error here, probably should raise exception
return
#
# operation:
# identical to query, except, there is no result returned.
# note:
# it is important that your query does NOT return anything! If it does,
# use the query call!
#
# see also:
# query method has a good description of this and query.
#
@inlineCallbacks
def operation(self,*args,**kwargs):
log.msg("SQLITE3_3_8_2:operation({},{})".format(args,kwargs))
s = args[0]
a = args[1]
if self.conn:
try:
log.msg("SQLITE3_3_8_2:query().running({} with args {})".format(s,a))
rv = yield self.conn.runOperation(s,a)
log.msg("SQLITE3_3_8_2:query().results({})".format(rv))
returnValue(rv)
except Exception as err:
log.msg("SQLITE3_3_8_2:query({}),error({})".format(s,err))
raise err
# error here, probably should raise exception
return
#
# watch:
# this is specific to postgres NOTIFY/LISTEN. other drivers will need to stub this out
#
def watch(self,*args,**kwargs):
raise Exception("sqlite3 is trying to add watch, can only do this in postgres")
return
#
# info:
# return information about this connection
#
@inlineCallbacks
def info(self,*args,**kwargs):
log.msg("SQLITE3_3_8_2:info({},{})".format(args,kwargs))
rv = yield [{
"engine":self.engine,
"engine_version":self.engine_version,
"dsn":self.dsn,
"topic_base":self.topic_base,
"debug":self.debug
}]
returnValue(rv)
return
| apache-2.0 | 6,459,614,035,259,180,000 | 32.88601 | 104 | 0.562997 | false |
jdfreder/leaftletwidget | leafletwidget/notebook.py | 1 | 1202 | from __future__ import print_function
import os
from IPython.display import display, HTML, Javascript
leaflet_css = '//cdn.leafletjs.com/leaflet-0.7.2/leaflet.css'
# leaftlet_js = "//cdn.leafletjs.com/leaflet-0.7.2/leaflet"
# leaflet_draw_js = ['//cdnjs.cloudflare.com/ajax/libs/leaflet.draw/0.2.3/leaflet.draw-src.js',
# '//cdnjs.cloudflare.com/ajax/libs/leaflet.draw/0.2.3/leaflet.draw.js']
# leaflet_draw_png = ['//cdnjs.cloudflare.com/ajax/libs/leaflet.draw/0.2.3/images/spritesheet-2x.png',
# '//cdnjs.cloudflare.com/ajax/libs/leaflet.draw/0.2.3/images/spritesheet.png']
leaflet_draw_css = '//cdnjs.cloudflare.com/ajax/libs/leaflet.draw/0.2.3/leaflet.draw.css'
def get_static_path():
return os.path.join(os.path.split(__file__)[0], 'static')
css_template = '<link rel="stylesheet" href="{}" />'
def display_css(url):
display(HTML(css_template.format(url)))
def initialize_notebook(leaflet_css=leaflet_css, leaflet_js=leaflet_css):
display_css(leaflet_css)
display_css(leaflet_draw_css)
for filename in ['leaflet.js']:
with open(os.path.join(get_static_path(), filename)) as f:
display(Javascript(f.read()))
| mit | 2,223,657,766,573,104,400 | 36.5625 | 102 | 0.680532 | false |
fluxcapacitor/pipeline | libs/pipeline_runtime/setup.py | 1 | 1300 | # -*- coding: utf-8 -*-
"""setup.py: setuptools control."""
import re
from setuptools import setup
#import sys
#if not sys.version_info[0] == 3:
# print("\n \
# sys.exit("\n \
# ****************************************************************\n \
# * The CLI has only been tested with Python 3+ at this time. *\n \
# * Report any issues with Python 2 by emailing [email protected] *\n \
# ****************************************************************\n")
version = re.search(
'^__version__\s*=\s*"(.*)"',
open('pipeline_runtime/__init__.py').read(),
re.M
).group(1)
# Get the long description from the relevant file
with open('README.rst', encoding='utf-8') as f:
long_description = f.read()
with open('requirements.txt', encoding='utf-8') as f:
requirements = [line.rstrip() for line in f.readlines()]
setup(
name = "pipeline-runtime",
packages = ["pipeline_runtime"],
version = version,
description = "PipelineAI Runtime",
long_description = "%s\n\nRequirements:\n%s" % (long_description, requirements),
author = "Chris Fregly",
author_email = "[email protected]",
url = "https://github.com/PipelineAI/",
install_requires=requirements,
dependency_links=[
]
)
| apache-2.0 | 2,087,528,225,292,709,600 | 29.952381 | 84 | 0.540769 | false |
Zhenxingzhang/kaggle-cdiscount-classification | src/misc/inception_bson.py | 1 | 1345 | from src.freezing import inception
from src.freezing.inception import NodeLookup
import tensorflow as tf
import bson
from tqdm import tqdm
import numpy as np
if __name__ == '__main__':
input_bson_filename = "/data/data/train_example.bson"
node_lookup = NodeLookup()
inception_graph = tf.Graph()
inception_sess = tf.Session(graph=inception_graph)
with inception_graph.as_default(), inception_sess.as_default() as sess:
inception_model = inception.inception_inference()
z = 0
n = 82
data = bson.decode_file_iter(open(input_bson_filename, 'rb'))
opts = tf.python_io.TFRecordOptions(tf.python_io.TFRecordCompressionType.ZLIB)
with open('inception_feature.txt', 'w') as f:
for c, d in tqdm(enumerate(data), total=n):
n_img = len(d['imgs'])
for index in range(n_img):
img_raw = d['imgs'][index]['picture']
# height = img.shape[0]
# width = img.shape[1]
product_id = d['_id']
prediction = inception_model(sess, img_raw)
predictions = np.squeeze(prediction)
top_5 = predictions.argsort()[-5:][::-1]
f.write(str(d["_id"]) + " " + node_lookup.id_to_string(top_5[0]) + "\n")
| apache-2.0 | 7,757,940,009,184,435,000 | 34.394737 | 92 | 0.565056 | false |
moniker-dns/contractor | contractor/openstack/common/gettextutils.py | 1 | 13133 | # Copyright 2012 Red Hat, Inc.
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
gettext for openstack-common modules.
Usual usage in an openstack.common module:
from contractor.openstack.common.gettextutils import _
"""
import copy
import gettext
import logging
import os
import re
try:
import UserString as _userString
except ImportError:
import collections as _userString
from babel import localedata
import six
_localedir = os.environ.get('contractor'.upper() + '_LOCALEDIR')
_t = gettext.translation('contractor', localedir=_localedir, fallback=True)
_AVAILABLE_LANGUAGES = {}
USE_LAZY = False
def enable_lazy():
"""Convenience function for configuring _() to use lazy gettext
Call this at the start of execution to enable the gettextutils._
function to use lazy gettext functionality. This is useful if
your project is importing _ directly instead of using the
gettextutils.install() way of importing the _ function.
"""
global USE_LAZY
USE_LAZY = True
def _(msg):
if USE_LAZY:
return Message(msg, 'contractor')
else:
if six.PY3:
return _t.gettext(msg)
return _t.ugettext(msg)
def install(domain, lazy=False):
"""Install a _() function using the given translation domain.
Given a translation domain, install a _() function using gettext's
install() function.
The main difference from gettext.install() is that we allow
overriding the default localedir (e.g. /usr/share/locale) using
a translation-domain-specific environment variable (e.g.
NOVA_LOCALEDIR).
:param domain: the translation domain
:param lazy: indicates whether or not to install the lazy _() function.
The lazy _() introduces a way to do deferred translation
of messages by installing a _ that builds Message objects,
instead of strings, which can then be lazily translated into
any available locale.
"""
if lazy:
# NOTE(mrodden): Lazy gettext functionality.
#
# The following introduces a deferred way to do translations on
# messages in OpenStack. We override the standard _() function
# and % (format string) operation to build Message objects that can
# later be translated when we have more information.
#
# Also included below is an example LocaleHandler that translates
# Messages to an associated locale, effectively allowing many logs,
# each with their own locale.
def _lazy_gettext(msg):
"""Create and return a Message object.
Lazy gettext function for a given domain, it is a factory method
for a project/module to get a lazy gettext function for its own
translation domain (i.e. nova, glance, cinder, etc.)
Message encapsulates a string so that we can translate
it later when needed.
"""
return Message(msg, domain)
from six import moves
moves.builtins.__dict__['_'] = _lazy_gettext
else:
localedir = '%s_LOCALEDIR' % domain.upper()
if six.PY3:
gettext.install(domain,
localedir=os.environ.get(localedir))
else:
gettext.install(domain,
localedir=os.environ.get(localedir),
unicode=True)
class Message(_userString.UserString, object):
"""Class used to encapsulate translatable messages."""
def __init__(self, msg, domain):
# _msg is the gettext msgid and should never change
self._msg = msg
self._left_extra_msg = ''
self._right_extra_msg = ''
self._locale = None
self.params = None
self.domain = domain
@property
def data(self):
# NOTE(mrodden): this should always resolve to a unicode string
# that best represents the state of the message currently
localedir = os.environ.get(self.domain.upper() + '_LOCALEDIR')
if self.locale:
lang = gettext.translation(self.domain,
localedir=localedir,
languages=[self.locale],
fallback=True)
else:
# use system locale for translations
lang = gettext.translation(self.domain,
localedir=localedir,
fallback=True)
if six.PY3:
ugettext = lang.gettext
else:
ugettext = lang.ugettext
full_msg = (self._left_extra_msg +
ugettext(self._msg) +
self._right_extra_msg)
if self.params is not None:
full_msg = full_msg % self.params
return six.text_type(full_msg)
@property
def locale(self):
return self._locale
@locale.setter
def locale(self, value):
self._locale = value
if not self.params:
return
# This Message object may have been constructed with one or more
# Message objects as substitution parameters, given as a single
# Message, or a tuple or Map containing some, so when setting the
# locale for this Message we need to set it for those Messages too.
if isinstance(self.params, Message):
self.params.locale = value
return
if isinstance(self.params, tuple):
for param in self.params:
if isinstance(param, Message):
param.locale = value
return
if isinstance(self.params, dict):
for param in self.params.values():
if isinstance(param, Message):
param.locale = value
def _save_dictionary_parameter(self, dict_param):
full_msg = self.data
# look for %(blah) fields in string;
# ignore %% and deal with the
# case where % is first character on the line
keys = re.findall('(?:[^%]|^)?%\((\w*)\)[a-z]', full_msg)
# if we don't find any %(blah) blocks but have a %s
if not keys and re.findall('(?:[^%]|^)%[a-z]', full_msg):
# apparently the full dictionary is the parameter
params = copy.deepcopy(dict_param)
else:
params = {}
for key in keys:
try:
params[key] = copy.deepcopy(dict_param[key])
except TypeError:
# cast uncopyable thing to unicode string
params[key] = six.text_type(dict_param[key])
return params
def _save_parameters(self, other):
# we check for None later to see if
# we actually have parameters to inject,
# so encapsulate if our parameter is actually None
if other is None:
self.params = (other, )
elif isinstance(other, dict):
self.params = self._save_dictionary_parameter(other)
else:
# fallback to casting to unicode,
# this will handle the problematic python code-like
# objects that cannot be deep-copied
try:
self.params = copy.deepcopy(other)
except TypeError:
self.params = six.text_type(other)
return self
# overrides to be more string-like
def __unicode__(self):
return self.data
def __str__(self):
if six.PY3:
return self.__unicode__()
return self.data.encode('utf-8')
def __getstate__(self):
to_copy = ['_msg', '_right_extra_msg', '_left_extra_msg',
'domain', 'params', '_locale']
new_dict = self.__dict__.fromkeys(to_copy)
for attr in to_copy:
new_dict[attr] = copy.deepcopy(self.__dict__[attr])
return new_dict
def __setstate__(self, state):
for (k, v) in state.items():
setattr(self, k, v)
# operator overloads
def __add__(self, other):
copied = copy.deepcopy(self)
copied._right_extra_msg += other.__str__()
return copied
def __radd__(self, other):
copied = copy.deepcopy(self)
copied._left_extra_msg += other.__str__()
return copied
def __mod__(self, other):
# do a format string to catch and raise
# any possible KeyErrors from missing parameters
self.data % other
copied = copy.deepcopy(self)
return copied._save_parameters(other)
def __mul__(self, other):
return self.data * other
def __rmul__(self, other):
return other * self.data
def __getitem__(self, key):
return self.data[key]
def __getslice__(self, start, end):
return self.data.__getslice__(start, end)
def __getattribute__(self, name):
# NOTE(mrodden): handle lossy operations that we can't deal with yet
# These override the UserString implementation, since UserString
# uses our __class__ attribute to try and build a new message
# after running the inner data string through the operation.
# At that point, we have lost the gettext message id and can just
# safely resolve to a string instead.
ops = ['capitalize', 'center', 'decode', 'encode',
'expandtabs', 'ljust', 'lstrip', 'replace', 'rjust', 'rstrip',
'strip', 'swapcase', 'title', 'translate', 'upper', 'zfill']
if name in ops:
return getattr(self.data, name)
else:
return _userString.UserString.__getattribute__(self, name)
def get_available_languages(domain):
"""Lists the available languages for the given translation domain.
:param domain: the domain to get languages for
"""
if domain in _AVAILABLE_LANGUAGES:
return copy.copy(_AVAILABLE_LANGUAGES[domain])
localedir = '%s_LOCALEDIR' % domain.upper()
find = lambda x: gettext.find(domain,
localedir=os.environ.get(localedir),
languages=[x])
# NOTE(mrodden): en_US should always be available (and first in case
# order matters) since our in-line message strings are en_US
language_list = ['en_US']
# NOTE(luisg): Babel <1.0 used a function called list(), which was
# renamed to locale_identifiers() in >=1.0, the requirements master list
# requires >=0.9.6, uncapped, so defensively work with both. We can remove
# this check when the master list updates to >=1.0, and update all projects
list_identifiers = (getattr(localedata, 'list', None) or
getattr(localedata, 'locale_identifiers'))
locale_identifiers = list_identifiers()
for i in locale_identifiers:
if find(i) is not None:
language_list.append(i)
_AVAILABLE_LANGUAGES[domain] = language_list
return copy.copy(language_list)
def get_localized_message(message, user_locale):
"""Gets a localized version of the given message in the given locale.
If the message is not a Message object the message is returned as-is.
If the locale is None the message is translated to the default locale.
:returns: the translated message in unicode, or the original message if
it could not be translated
"""
translated = message
if isinstance(message, Message):
original_locale = message.locale
message.locale = user_locale
translated = six.text_type(message)
message.locale = original_locale
return translated
class LocaleHandler(logging.Handler):
"""Handler that can have a locale associated to translate Messages.
A quick example of how to utilize the Message class above.
LocaleHandler takes a locale and a target logging.Handler object
to forward LogRecord objects to after translating the internal Message.
"""
def __init__(self, locale, target):
"""Initialize a LocaleHandler
:param locale: locale to use for translating messages
:param target: logging.Handler object to forward
LogRecord objects to after translation
"""
logging.Handler.__init__(self)
self.locale = locale
self.target = target
def emit(self, record):
if isinstance(record.msg, Message):
# set the locale and resolve to a string
record.msg.locale = self.locale
self.target.emit(record)
| apache-2.0 | 4,492,199,157,041,093,000 | 34.398922 | 79 | 0.60565 | false |
Ziemin/telepathy-gabble | tests/twisted/version.py | 2 | 1257 | # vim: set encoding=utf-8 :
"""
Tests Gabble's implementation of XEP-0092.
"""
from twisted.words.xish import xpath
from servicetest import assertLength
from gabbletest import exec_test, elem_iq, elem
import ns
def test(q, bus, conn, stream):
request = elem_iq(stream, 'get')(
elem(ns.VERSION, 'query')
)
stream.send(request)
reply = q.expect('stream-iq', iq_id=request['id'],
query_ns=ns.VERSION, query_name='query')
# Both <name/> and <version/> are REQUIRED. What they actually contain is
# someone else's problem™.
names = xpath.queryForNodes('/query/name', reply.query)
assertLength(1, names)
versions = xpath.queryForNodes('/query/version', reply.query)
assertLength(1, versions)
# <os/> is OPTIONAL. “Revealing the application's underlying operating
# system may open the user or system to attacks directed against that
# operating system; therefore, an application MUST provide a way for a
# human user or administrator to disable sharing of information about the
# operating system.” The “way” that we provide is never to send it.
oss = xpath.queryForNodes('/query/os', reply.query)
assert oss is None
if __name__ == '__main__':
exec_test(test)
| lgpl-2.1 | -2,559,544,410,421,607,000 | 32.702703 | 77 | 0.688051 | false |
hwroitzsch/DayLikeTodayClone | examples/main.py | 1 | 1357 | import json
from urllib.parse import urlencode
from urllib.request import urlopen, Request
# HTTP URL is constructed accordingly with JSON query results format in mind.
def sparqlQuery(query, baseURL, format="application/json"):
params = {
"default-graph": "",
"should-sponge": "soft",
"query": query,
"debug": "on",
"timeout": "",
"format": format,
"save": "display",
"fname": ""
}
querypart = urlencode(params)
binary_query = querypart.encode('utf8')
request = Request(baseURL, binary_query)
response = urlopen(request).read()
return json.loads(response)
# Setting Data Source Name (DSN)
data_source_name = "http://dbpedia.org/sparql"
# Virtuoso pragmas for instructing SPARQL engine to perform an HTTP GET
# using the IRI in FROM clause as Data Source URL
query = """DEFINE get:soft "replace" SELECT DISTINCT * FROM <%s> WHERE {?s ?p ?o}""" %data_source_name
query_people_born_before_1900 = """
SELECT ?name ?birth ?death ?person
WHERE {
?person dbo:birthPlace :Berlin .
?person dbo:birthDate ?birth .
?person foaf:name ?name .
?person dbo:deathDate ?death .
FILTER (?birth < "1900-01-01"^^xsd:date) .
}
ORDER BY ?name
"""
data = sparqlQuery(query, "http://localhost:8890/sparql/")
print("Retrieved data:\n" + json.dumps(data, sort_keys=True, indent=4))
| mit | 3,912,693,555,853,568,500 | 28.5 | 102 | 0.666912 | false |
TeamHG-Memex/frontera | frontera/tests/test_overused_buffer.py | 1 | 1784 | # -*- coding: utf-8 -*-
from frontera.tests.backends import BackendSequenceTest, TEST_SITES
from frontera.utils.tester import DownloaderSimulator, BaseDownloaderSimulator
from urlparse import urlparse
class DFSOverusedBackendTest(BackendSequenceTest):
EXPECTED_SEQUENCES = {
"SEQUENCE_01_A": [
'https://www.a.com', 'http://b.com', 'http://www.a.com/2', 'http://www.a.com/2/1', 'http://www.a.com/3',
'http://www.a.com/2/1/3', 'http://www.a.com/2/4/1', 'http://www.a.net', 'http://b.com/2',
'http://test.cloud.c.com', 'http://cloud.c.com', 'http://test.cloud.c.com/2',
'http://b.com/entries?page=2', 'http://www.a.com/2/4/2'
],
"SEQUENCE_02_A": [
'https://www.a.com', 'http://b.com', 'http://www.a.com/2', 'http://www.a.com/2/1', 'http://www.a.com/3',
'http://www.a.com/2/1/3', 'http://www.a.com/2/4/1', 'http://www.a.com/2/4/2', 'http://www.a.net',
'http://b.com/2', 'http://test.cloud.c.com', 'http://cloud.c.com', 'http://test.cloud.c.com/2',
'http://b.com/entries?page=2'
]
}
def test_sequence1(self):
sequence = self.get_sequence(TEST_SITES['SITE_09'], max_next_requests=5,
downloader_simulator=DownloaderSimulator(rate=1))
assert len(sequence) == 7
all_domains = set()
for requests, iteration, dl_info in sequence:
overused_keys = set(dl_info['overused_keys'])
for r in requests:
url = urlparse(r.url)
all_domains.add(url.hostname)
if not overused_keys:
continue
assert url.hostname not in overused_keys
assert overused_keys.issubset(all_domains)
| bsd-3-clause | 7,326,246,749,919,205,000 | 47.216216 | 116 | 0.554933 | false |
brendannee/Bikesy-Backend | misc/tripplanner/main.py | 2 | 1089 | from graphserver.graphdb import GraphDatabase
from graphserver.ext.osm.osmdb import OSMDB
from graphserver.core import State, WalkOptions, Graph, Street, Combination, EdgePayload, ContractionHierarchy
import sys
def make_native_ch(basename):
gdb = GraphDatabase( basename+".gdb" )
gg = gdb.incarnate()
wo = WalkOptions()
wo.hill_reluctance=1
ch = gg.get_contraction_hierarchies( wo )
chdowndb = GraphDatabase( basename+".down.gdb", overwrite=True )
chdowndb.populate( ch.downgraph, reporter=sys.stdout )
chupdb = GraphDatabase( basename+".up.gdb", overwrite=True )
chupdb.populate( ch.upgraph, reporter=sys.stdout )
def reincarnate_chdbs(basename):
chdowndb = GraphDatabase( basename+".down.gdb" )
chupdb = GraphDatabase( basename+".up.gdb" )
upgg = chupdb.incarnate()
downgg = chdowndb.incarnate()
return ContractionHierarchy(upgg, downgg)
if __name__ == '__main__':
try:
make_native_ch( sys.argv[1] )
except IndexError:
print "usage: python ch.py gdb_basename"
| bsd-3-clause | 4,947,394,258,959,224,000 | 30.114286 | 110 | 0.676768 | false |
eHealthAfrica/rapidpro | temba/settings_common.py | 1 | 37743 | from __future__ import unicode_literals
import iptools
import os
import sys
from celery.schedules import crontab
from datetime import timedelta
from django.utils.translation import ugettext_lazy as _
# -----------------------------------------------------------------------------------
# Default to debugging
# -----------------------------------------------------------------------------------
DEBUG = True
# -----------------------------------------------------------------------------------
# Sets TESTING to True if this configuration is read during a unit test
# -----------------------------------------------------------------------------------
TESTING = sys.argv[1:2] == ['test']
if TESTING:
PASSWORD_HASHERS = ('django.contrib.auth.hashers.MD5PasswordHasher',)
DEBUG = False
ADMINS = (
('RapidPro', '[email protected]'),
)
MANAGERS = ADMINS
# hardcode the postgis version so we can do reset db's from a blank database
POSTGIS_VERSION = (2, 1)
# -----------------------------------------------------------------------------------
# set the mail settings, override these in your settings.py
# if your site was at http://temba.io, it might look like this:
# -----------------------------------------------------------------------------------
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = '[email protected]'
DEFAULT_FROM_EMAIL = '[email protected]'
EMAIL_HOST_PASSWORD = 'mypassword'
EMAIL_USE_TLS = True
# Used when sending email from within a flow and the user hasn't configured
# their own SMTP server.
FLOW_FROM_EMAIL = '[email protected]'
# where recordings and exports are stored
AWS_STORAGE_BUCKET_NAME = 'dl-temba-io'
AWS_BUCKET_DOMAIN = AWS_STORAGE_BUCKET_NAME + '.s3.amazonaws.com'
STORAGE_ROOT_DIR = 'test_orgs' if TESTING else 'orgs'
# -----------------------------------------------------------------------------------
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone
# -----------------------------------------------------------------------------------
USE_TZ = True
TIME_ZONE = 'GMT'
USER_TIME_ZONE = 'Africa/Kigali'
MODELTRANSLATION_TRANSLATION_REGISTRY = "translation"
# -----------------------------------------------------------------------------------
# Default language used for this installation
# -----------------------------------------------------------------------------------
LANGUAGE_CODE = 'en-us'
# -----------------------------------------------------------------------------------
# Available languages for translation
# -----------------------------------------------------------------------------------
LANGUAGES = (
('en-us', _("English")),
('pt-br', _("Portuguese")),
('fr', _("French")),
('es', _("Spanish")))
DEFAULT_LANGUAGE = "en-us"
DEFAULT_SMS_LANGUAGE = "en-us"
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'your own secret key'
EMAIL_CONTEXT_PROCESSORS = ('temba.utils.email.link_components',)
# -----------------------------------------------------------------------------------
# Directory Configuration
# -----------------------------------------------------------------------------------
PROJECT_DIR = os.path.join(os.path.abspath(os.path.dirname(__file__)))
LOCALE_PATHS = (os.path.join(PROJECT_DIR, '../locale'),)
RESOURCES_DIR = os.path.join(PROJECT_DIR, '../resources')
FIXTURE_DIRS = (os.path.join(PROJECT_DIR, '../fixtures'),)
TESTFILES_DIR = os.path.join(PROJECT_DIR, '../testfiles')
STATICFILES_DIRS = (os.path.join(PROJECT_DIR, '../static'), os.path.join(PROJECT_DIR, '../media'), )
STATIC_ROOT = os.path.join(PROJECT_DIR, '../sitestatic')
STATIC_URL = '/sitestatic/'
COMPRESS_ROOT = os.path.join(PROJECT_DIR, '../sitestatic')
MEDIA_ROOT = os.path.join(PROJECT_DIR, '../media')
MEDIA_URL = "/media/"
# -----------------------------------------------------------------------------------
# Templates Configuration
# -----------------------------------------------------------------------------------
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(PROJECT_DIR, '../templates')],
'OPTIONS': {
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.request',
'temba.context_processors.branding',
'temba.orgs.context_processors.user_group_perms_processor',
'temba.orgs.context_processors.unread_count_processor',
'temba.channels.views.channel_status_processor',
'temba.msgs.views.send_message_auto_complete_processor',
'temba.orgs.context_processors.settings_includer',
],
'loaders': [
'temba.utils.haml.HamlFilesystemLoader',
'temba.utils.haml.HamlAppDirectoriesLoader',
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.Loader'
],
'debug': False if TESTING else DEBUG
},
},
]
if TESTING:
TEMPLATES[0]['OPTIONS']['context_processors'] += ('temba.tests.add_testing_flag_to_context', )
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'temba.middleware.BrandingMiddleware',
'temba.middleware.OrgTimezoneMiddleware',
'temba.middleware.FlowSimulationMiddleware',
'temba.middleware.ActivateLanguageMiddleware',
'temba.middleware.OrgHeaderMiddleware',
)
ROOT_URLCONF = 'temba.urls'
# other urls to add
APP_URLS = []
SITEMAP = ('public.public_index',
'public.public_blog',
'public.video_list',
'api')
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'django.contrib.gis',
'django.contrib.sitemaps',
'django.contrib.postgres',
# Haml-like templates
'hamlpy',
# Redis cache
'redis',
# rest framework for api access
'rest_framework',
'rest_framework.authtoken',
# compress our CSS and js
'compressor',
# smartmin
'smartmin',
'smartmin.csv_imports',
'smartmin.users',
# django-timezone-field
'timezone_field',
# temba apps
'temba.assets',
'temba.auth_tweaks',
'temba.api',
'temba.public',
'temba.schedules',
'temba.orgs',
'temba.contacts',
'temba.channels',
'temba.msgs',
'temba.flows',
'temba.reports',
'temba.triggers',
'temba.utils',
'temba.campaigns',
'temba.ivr',
'temba.ussd',
'temba.locations',
'temba.values',
'temba.airtime',
)
# the last installed app that uses smartmin permissions
PERMISSIONS_APP = 'temba.airtime'
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'root': {
'level': 'WARNING',
'handlers': ['console'],
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s'
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
},
'null': {
'class': 'logging.NullHandler',
},
},
'loggers': {
'pycountry': {
'level': 'ERROR',
'handlers': ['console'],
'propagate': False,
},
'django.security.DisallowedHost': {
'handlers': ['null'],
'propagate': False,
},
'django.db.backends': {
'level': 'ERROR',
'handlers': ['console'],
'propagate': False,
},
},
}
# -----------------------------------------------------------------------------------
# Branding Configuration
# -----------------------------------------------------------------------------------
BRANDING = {
'rapidpro.io': {
'slug': 'rapidpro',
'name': 'RapidPro',
'org': 'UNICEF',
'colors': dict(primary='#0c6596'),
'styles': ['brands/rapidpro/font/style.css'],
'welcome_topup': 1000,
'email': '[email protected]',
'support_email': '[email protected]',
'link': 'https://app.rapidpro.io',
'api_link': 'https://api.rapidpro.io',
'docs_link': 'http://docs.rapidpro.io',
'domain': 'app.rapidpro.io',
'favico': 'brands/rapidpro/rapidpro.ico',
'splash': '/brands/rapidpro/splash.jpg',
'logo': '/brands/rapidpro/logo.png',
'allow_signups': True,
'tiers': dict(import_flows=0, multi_user=0, multi_org=0),
'bundles': [],
'welcome_packs': [dict(size=5000, name="Demo Account"), dict(size=100000, name="UNICEF Account")],
'description': _("Visually build nationally scalable mobile applications from anywhere in the world."),
'credits': _("Copyright © 2012-2017 UNICEF, Nyaruka. All Rights Reserved.")
}
}
DEFAULT_BRAND = 'rapidpro.io'
# -----------------------------------------------------------------------------------
# Permission Management
# -----------------------------------------------------------------------------------
# this lets us easily create new permissions across our objects
PERMISSIONS = {
'*': ('create', # can create an object
'read', # can read an object, viewing it's details
'update', # can update an object
'delete', # can delete an object,
'list'), # can view a list of the objects
'api.apitoken': ('refresh',),
'api.resthook': ('api', 'list'),
'api.webhookevent': ('api',),
'api.resthooksubscriber': ('api',),
'campaigns.campaign': ('api',
'archived',
),
'campaigns.campaignevent': ('api',),
'contacts.contact': ('api',
'block',
'blocked',
'break_anon',
'customize',
'export',
'stopped',
'filter',
'history',
'import',
'omnibox',
'unblock',
'unstop',
'update_fields',
'update_fields_input'
),
'contacts.contactfield': ('api',
'json',
'managefields'),
'contacts.contactgroup': ('api',),
'ivr.ivrcall': ('start',),
'locations.adminboundary': ('alias',
'api',
'boundaries',
'geometry'),
'orgs.org': ('accounts',
'smtp_server',
'api',
'country',
'chatbase',
'clear_cache',
'create_login',
'create_sub_org',
'download',
'edit',
'edit_sub_org',
'export',
'grant',
'home',
'import',
'join',
'languages',
'manage',
'manage_accounts',
'manage_accounts_sub_org',
'nexmo_configuration',
'nexmo_account',
'nexmo_connect',
'plivo_connect',
'profile',
'resthooks',
'service',
'signup',
'sub_orgs',
'surveyor',
'transfer_credits',
'transfer_to_account',
'trial',
'twilio_account',
'twilio_connect',
'webhook',
),
'orgs.usersettings': ('phone',),
'channels.channel': ('api',
'bulk_sender_options',
'claim',
'configuration',
'create_bulk_sender',
'create_caller',
'errors',
'facebook_whitelist',
'search_nexmo',
'search_numbers',
),
'channels.channellog': ('session',),
'channels.channelevent': ('api',
'calls'),
'flows.flowstart': ('api',),
'flows.flow': ('activity',
'activity_chart',
'activity_list',
'analytics',
'api',
'archived',
'broadcast',
'campaign',
'completion',
'copy',
'editor',
'export',
'export_results',
'filter',
'json',
'read',
'recent_messages',
'results',
'revisions',
'run_table',
'simulate',
'upload_action_recording',
'upload_media_action',
),
'flows.ruleset': ('analytics',
'choropleth',
'map',
'results',
),
'msgs.msg': ('api',
'archive',
'archived',
'export',
'failed',
'filter',
'flow',
'inbox',
'label',
'outbox',
'sent',
'test',
'update',
),
'msgs.broadcast': ('api',
'detail',
'schedule',
'schedule_list',
'schedule_read',
'send',
),
'msgs.label': ('api', 'create', 'create_folder'),
'orgs.topup': ('manage',),
'triggers.trigger': ('archived',
'catchall',
'follow',
'inbound_call',
'keyword',
'missed_call',
'new_conversation',
'referral',
'register',
'schedule',
'ussd',
),
}
# assigns the permissions that each group should have
GROUP_PERMISSIONS = {
"Service Users": ( # internal Temba services have limited permissions
'msgs.msg_create',
),
"Alpha": (
),
"Beta": (
),
"Surveyors": (
'contacts.contact_api',
'contacts.contactfield_api',
'flows.flow_api',
'locations.adminboundary_api',
'orgs.org_api',
'orgs.org_surveyor',
'msgs.msg_api',
),
"Granters": (
'orgs.org_grant',
),
"Customer Support": (
'auth.user_list',
'auth.user_update',
'contacts.contact_break_anon',
'flows.flow_editor',
'flows.flow_json',
'flows.flow_read',
'flows.flow_revisions',
'flows.flowrun_delete',
'orgs.org_dashboard',
'orgs.org_grant',
'orgs.org_manage',
'orgs.org_update',
'orgs.org_service',
'orgs.topup_create',
'orgs.topup_manage',
'orgs.topup_update',
),
"Administrators": (
'airtime.airtimetransfer_list',
'airtime.airtimetransfer_read',
'api.apitoken_refresh',
'api.resthook_api',
'api.resthook_list',
'api.resthooksubscriber_api',
'api.webhookevent_api',
'api.webhookevent_list',
'api.webhookevent_read',
'campaigns.campaign.*',
'campaigns.campaignevent.*',
'contacts.contact_api',
'contacts.contact_block',
'contacts.contact_blocked',
'contacts.contact_create',
'contacts.contact_customize',
'contacts.contact_delete',
'contacts.contact_export',
'contacts.contact_filter',
'contacts.contact_history',
'contacts.contact_import',
'contacts.contact_list',
'contacts.contact_omnibox',
'contacts.contact_read',
'contacts.contact_stopped',
'contacts.contact_unblock',
'contacts.contact_unstop',
'contacts.contact_update',
'contacts.contact_update_fields',
'contacts.contact_update_fields_input',
'contacts.contactfield.*',
'contacts.contactgroup.*',
'csv_imports.importtask.*',
'ivr.ivrcall.*',
'ussd.ussdsession.*',
'locations.adminboundary_alias',
'locations.adminboundary_api',
'locations.adminboundary_boundaries',
'locations.adminboundary_geometry',
'orgs.org_accounts',
'orgs.org_smtp_server',
'orgs.org_api',
'orgs.org_country',
'orgs.org_chatbase',
'orgs.org_create_sub_org',
'orgs.org_download',
'orgs.org_edit',
'orgs.org_edit_sub_org',
'orgs.org_export',
'orgs.org_home',
'orgs.org_import',
'orgs.org_languages',
'orgs.org_manage_accounts',
'orgs.org_manage_accounts_sub_org',
'orgs.org_nexmo_account',
'orgs.org_nexmo_connect',
'orgs.org_nexmo_configuration',
'orgs.org_plivo_connect',
'orgs.org_profile',
'orgs.org_resthooks',
'orgs.org_sub_orgs',
'orgs.org_transfer_credits',
'orgs.org_transfer_to_account',
'orgs.org_twilio_account',
'orgs.org_twilio_connect',
'orgs.org_webhook',
'orgs.topup_list',
'orgs.topup_read',
'orgs.usersettings_phone',
'orgs.usersettings_update',
'channels.channel_api',
'channels.channel_bulk_sender_options',
'channels.channel_claim',
'channels.channel_configuration',
'channels.channel_create',
'channels.channel_create_bulk_sender',
'channels.channel_create_caller',
'channels.channel_facebook_whitelist',
'channels.channel_delete',
'channels.channel_list',
'channels.channel_read',
'channels.channel_search_nexmo',
'channels.channel_search_numbers',
'channels.channel_update',
'channels.channelevent.*',
'channels.channellog_list',
'channels.channellog_read',
'channels.channellog_session',
'reports.report.*',
'flows.flow.*',
'flows.flowstart_api',
'flows.flowlabel.*',
'flows.ruleset.*',
'flows.flowrun_delete',
'schedules.schedule.*',
'msgs.broadcast.*',
'msgs.broadcastschedule.*',
'msgs.label.*',
'msgs.msg_api',
'msgs.msg_archive',
'msgs.msg_archived',
'msgs.msg_delete',
'msgs.msg_export',
'msgs.msg_failed',
'msgs.msg_filter',
'msgs.msg_flow',
'msgs.msg_inbox',
'msgs.msg_label',
'msgs.msg_outbox',
'msgs.msg_sent',
'msgs.msg_update',
'triggers.trigger.*',
),
"Editors": (
'api.apitoken_refresh',
'api.resthook_api',
'api.resthook_list',
'api.resthooksubscriber_api',
'api.webhookevent_api',
'api.webhookevent_list',
'api.webhookevent_read',
'airtime.airtimetransfer_list',
'airtime.airtimetransfer_read',
'campaigns.campaign.*',
'campaigns.campaignevent.*',
'contacts.contact_api',
'contacts.contact_block',
'contacts.contact_blocked',
'contacts.contact_create',
'contacts.contact_customize',
'contacts.contact_delete',
'contacts.contact_export',
'contacts.contact_filter',
'contacts.contact_history',
'contacts.contact_import',
'contacts.contact_list',
'contacts.contact_omnibox',
'contacts.contact_read',
'contacts.contact_stopped',
'contacts.contact_unblock',
'contacts.contact_unstop',
'contacts.contact_update',
'contacts.contact_update_fields',
'contacts.contact_update_fields_input',
'contacts.contactfield.*',
'contacts.contactgroup.*',
'csv_imports.importtask.*',
'ivr.ivrcall.*',
'ussd.ussdsession.*',
'locations.adminboundary_alias',
'locations.adminboundary_api',
'locations.adminboundary_boundaries',
'locations.adminboundary_geometry',
'orgs.org_api',
'orgs.org_download',
'orgs.org_export',
'orgs.org_home',
'orgs.org_import',
'orgs.org_profile',
'orgs.org_resthooks',
'orgs.org_webhook',
'orgs.topup_list',
'orgs.topup_read',
'orgs.usersettings_phone',
'orgs.usersettings_update',
'channels.channel_api',
'channels.channel_bulk_sender_options',
'channels.channel_claim',
'channels.channel_configuration',
'channels.channel_create',
'channels.channel_create_bulk_sender',
'channels.channel_create_caller',
'channels.channel_delete',
'channels.channel_list',
'channels.channel_read',
'channels.channel_search_numbers',
'channels.channel_update',
'channels.channelevent.*',
'reports.report.*',
'flows.flow.*',
'flows.flowstart_api',
'flows.flowlabel.*',
'flows.ruleset.*',
'schedules.schedule.*',
'msgs.broadcast.*',
'msgs.broadcastschedule.*',
'msgs.label.*',
'msgs.msg_api',
'msgs.msg_archive',
'msgs.msg_archived',
'msgs.msg_delete',
'msgs.msg_export',
'msgs.msg_failed',
'msgs.msg_filter',
'msgs.msg_flow',
'msgs.msg_inbox',
'msgs.msg_label',
'msgs.msg_outbox',
'msgs.msg_sent',
'msgs.msg_update',
'triggers.trigger.*',
),
"Viewers": (
'api.resthook_list',
'campaigns.campaign_archived',
'campaigns.campaign_list',
'campaigns.campaign_read',
'campaigns.campaignevent_read',
'contacts.contact_blocked',
'contacts.contact_export',
'contacts.contact_filter',
'contacts.contact_history',
'contacts.contact_list',
'contacts.contact_read',
'contacts.contact_stopped',
'locations.adminboundary_boundaries',
'locations.adminboundary_geometry',
'locations.adminboundary_alias',
'orgs.org_download',
'orgs.org_export',
'orgs.org_home',
'orgs.org_profile',
'orgs.topup_list',
'orgs.topup_read',
'channels.channel_list',
'channels.channel_read',
'channels.channelevent_calls',
'flows.flow_activity',
'flows.flow_activity_chart',
'flows.flow_archived',
'flows.flow_campaign',
'flows.flow_completion',
'flows.flow_export',
'flows.flow_export_results',
'flows.flow_filter',
'flows.flow_list',
'flows.flow_read',
'flows.flow_editor',
'flows.flow_json',
'flows.flow_recent_messages',
'flows.flow_results',
'flows.flow_run_table',
'flows.flow_simulate',
'flows.ruleset_analytics',
'flows.ruleset_results',
'flows.ruleset_choropleth',
'msgs.broadcast_schedule_list',
'msgs.broadcast_schedule_read',
'msgs.msg_archived',
'msgs.msg_export',
'msgs.msg_failed',
'msgs.msg_filter',
'msgs.msg_flow',
'msgs.msg_inbox',
'msgs.msg_outbox',
'msgs.msg_sent',
'triggers.trigger_archived',
'triggers.trigger_list',
)
}
# -----------------------------------------------------------------------------------
# Login / Logout
# -----------------------------------------------------------------------------------
LOGIN_URL = "/users/login/"
LOGOUT_URL = "/users/logout/"
LOGIN_REDIRECT_URL = "/org/choose/"
LOGOUT_REDIRECT_URL = "/"
AUTHENTICATION_BACKENDS = (
'smartmin.backends.CaseInsensitiveBackend',
)
ANONYMOUS_USER_NAME = 'AnonymousUser'
# -----------------------------------------------------------------------------------
# Our test runner includes a mocked HTTP server and the ability to exclude apps
# -----------------------------------------------------------------------------------
TEST_RUNNER = 'temba.tests.TembaTestRunner'
TEST_EXCLUDE = ('smartmin',)
# -----------------------------------------------------------------------------------
# Debug Toolbar
# -----------------------------------------------------------------------------------
INTERNAL_IPS = iptools.IpRangeList(
'127.0.0.1',
'192.168.0.10',
'192.168.0.0/24', # network block
'0.0.0.0'
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False, # disable redirect traps
}
# -----------------------------------------------------------------------------------
# Crontab Settings ..
# -----------------------------------------------------------------------------------
CELERYBEAT_SCHEDULE = {
"retry-webhook-events": {
'task': 'retry_events_task',
'schedule': timedelta(seconds=300),
},
"check-channels": {
'task': 'check_channels_task',
'schedule': timedelta(seconds=300),
},
"schedules": {
'task': 'check_schedule_task',
'schedule': timedelta(seconds=60),
},
"campaigns": {
'task': 'check_campaigns_task',
'schedule': timedelta(seconds=60),
},
"check-flows": {
'task': 'check_flows_task',
'schedule': timedelta(seconds=60),
},
"check-flow-timeouts": {
'task': 'check_flow_timeouts_task',
'schedule': timedelta(seconds=20),
},
"check-credits": {
'task': 'check_credits_task',
'schedule': timedelta(seconds=900)
},
"check-messages-task": {
'task': 'check_messages_task',
'schedule': timedelta(seconds=300)
},
"fail-old-messages": {
'task': 'fail_old_messages',
'schedule': crontab(hour=0, minute=0),
},
"purge-broadcasts": {
'task': 'purge_broadcasts_task',
'schedule': crontab(hour=1, minute=0),
},
"clear-old-msg-external-ids": {
'task': 'clear_old_msg_external_ids',
'schedule': crontab(hour=2, minute=0),
},
"trim-channel-log": {
'task': 'trim_channel_log_task',
'schedule': crontab(hour=3, minute=0),
},
"trim-webhook-event": {
'task': 'trim_webhook_event_task',
'schedule': crontab(hour=3, minute=0),
},
"calculate-credit-caches": {
'task': 'calculate_credit_caches',
'schedule': timedelta(days=3),
},
"squash-flowruncounts": {
'task': 'squash_flowruncounts',
'schedule': timedelta(seconds=300),
},
"squash-flowpathcounts": {
'task': 'squash_flowpathcounts',
'schedule': timedelta(seconds=300),
},
"prune-recentmessages": {
'task': 'prune_recentmessages',
'schedule': timedelta(seconds=300),
},
"squash-channelcounts": {
'task': 'squash_channelcounts',
'schedule': timedelta(seconds=300),
},
"squash-systemlabels": {
'task': 'squash_systemlabels',
'schedule': timedelta(seconds=300),
},
"squash-topupcredits": {
'task': 'squash_topupcredits',
'schedule': timedelta(seconds=300),
},
"squash-contactgroupcounts": {
'task': 'squash_contactgroupcounts',
'schedule': timedelta(seconds=300),
},
"refresh-jiochat-access-tokens": {
'task': 'refresh_jiochat_access_tokens',
'schedule': timedelta(seconds=3600),
},
}
# Mapping of task name to task function path, used when CELERY_ALWAYS_EAGER is set to True
CELERY_TASK_MAP = {
'send_msg_task': 'temba.channels.tasks.send_msg_task',
'start_msg_flow_batch': 'temba.flows.tasks.start_msg_flow_batch_task',
'handle_event_task': 'temba.msgs.tasks.handle_event_task',
}
# -----------------------------------------------------------------------------------
# Async tasks with celery
# -----------------------------------------------------------------------------------
REDIS_HOST = 'localhost'
REDIS_PORT = 6379
# we use a redis db of 10 for testing so that we maintain caches for dev
REDIS_DB = 10 if TESTING else 15
BROKER_URL = 'redis://%s:%d/%d' % (REDIS_HOST, REDIS_PORT, REDIS_DB)
# by default, celery doesn't have any timeout on our redis connections, this fixes that
BROKER_TRANSPORT_OPTIONS = {'socket_timeout': 5}
CELERY_RESULT_BACKEND = None
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
IS_PROD = False
HOSTNAME = "localhost"
# The URL and port of the proxy server to use when needed (if any, in requests format)
OUTGOING_PROXIES = {}
# -----------------------------------------------------------------------------------
# Cache to Redis
# -----------------------------------------------------------------------------------
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://%s:%s/%s" % (REDIS_HOST, REDIS_PORT, REDIS_DB),
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
}
}
}
# -----------------------------------------------------------------------------------
# Django-rest-framework configuration
# -----------------------------------------------------------------------------------
REST_FRAMEWORK = {
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.SessionAuthentication',
'temba.api.support.APITokenAuthentication',
),
'DEFAULT_THROTTLE_CLASSES': (
'temba.api.support.OrgRateThrottle',
),
'DEFAULT_THROTTLE_RATES': {
'v2': '2500/hour',
'v2.contacts': '2500/hour',
'v2.messages': '2500/hour',
'v2.runs': '2500/hour',
'v2.api': '2500/hour',
},
'PAGE_SIZE': 250,
'DEFAULT_RENDERER_CLASSES': (
'temba.api.support.DocumentationRenderer',
'rest_framework.renderers.JSONRenderer'
),
'EXCEPTION_HANDLER': 'temba.api.support.temba_exception_handler',
'UNICODE_JSON': False
}
REST_HANDLE_EXCEPTIONS = not TESTING
# -----------------------------------------------------------------------------------
# Django Compressor configuration
# -----------------------------------------------------------------------------------
if TESTING:
# if only testing, disable coffeescript and less compilation
COMPRESS_PRECOMPILERS = ()
else:
COMPRESS_PRECOMPILERS = (
('text/less', 'lessc --include-path="%s" {infile} {outfile}' % os.path.join(PROJECT_DIR, '../static', 'less')),
('text/coffeescript', 'coffee --compile --stdio')
)
COMPRESS_ENABLED = False
COMPRESS_OFFLINE = False
# build up our offline compression context based on available brands
COMPRESS_OFFLINE_CONTEXT = []
for brand in BRANDING.values():
context = dict(STATIC_URL=STATIC_URL, base_template='frame.html', debug=False, testing=False)
context['brand'] = dict(slug=brand['slug'], styles=brand['styles'])
COMPRESS_OFFLINE_CONTEXT.append(context)
MAGE_API_URL = 'http://localhost:8026/api/v1'
MAGE_AUTH_TOKEN = '___MAGE_TOKEN_YOU_PICK__'
# -----------------------------------------------------------------------------------
# RapidPro configuration settings
# -----------------------------------------------------------------------------------
######
# DANGER: only turn this on if you know what you are doing!
# could cause messages to be sent to live customer aggregators
SEND_MESSAGES = False
######
# DANGER: only turn this on if you know what you are doing!
# could cause external APIs to be called in test environment
SEND_WEBHOOKS = False
######
# DANGER: only turn this on if you know what you are doing!
# could cause emails to be sent in test environment
SEND_EMAILS = False
######
# DANGER: only turn this on if you know what you are doing!
# could cause airtime transfers in test environment
SEND_AIRTIME = False
######
# DANGER: only turn this on if you know what you are doing!
# could cause data to be sent to Chatbase in test environment
SEND_CHATBASE = False
######
# DANGER: only turn this on if you know what you are doing!
# could cause calls in test environments
SEND_CALLS = False
MESSAGE_HANDLERS = [
'temba.triggers.handlers.TriggerHandler',
'temba.flows.handlers.FlowHandler',
'temba.triggers.handlers.CatchAllHandler'
]
CHANNEL_TYPES = [
'temba.channels.types.external.ExternalType',
'temba.channels.types.facebook.FacebookType',
'temba.channels.types.firebase.FirebaseCloudMessagingType',
'temba.channels.types.infobip.InfobipType',
'temba.channels.types.jiochat.JioChatType',
'temba.channels.types.line.LineType',
'temba.channels.types.telegram.TelegramType',
'temba.channels.types.twitter.TwitterType',
'temba.channels.types.twitter_activity.TwitterActivityType',
'temba.channels.types.viber_public.ViberPublicType',
]
# -----------------------------------------------------------------------------------
# Store sessions in our cache
# -----------------------------------------------------------------------------------
SESSION_ENGINE = "django.contrib.sessions.backends.cached_db"
SESSION_CACHE_ALIAS = "default"
# -----------------------------------------------------------------------------------
# 3rd Party Integration Keys
# -----------------------------------------------------------------------------------
TWITTER_API_KEY = os.environ.get('TWITTER_API_KEY', 'MISSING_TWITTER_API_KEY')
TWITTER_API_SECRET = os.environ.get('TWITTER_API_SECRET', 'MISSING_TWITTER_API_SECRET')
SEGMENT_IO_KEY = os.environ.get('SEGMENT_IO_KEY', '')
LIBRATO_USER = os.environ.get('LIBRATO_USER', '')
LIBRATO_TOKEN = os.environ.get('LIBRATO_TOKEN', '')
# -----------------------------------------------------------------------------------
# IP Addresses
# These are the externally accessible IP addresses of the servers running RapidPro.
# Needed for channel types that authenticate by whitelisting public IPs.
#
# You need to change these to real addresses to work with these.
# -----------------------------------------------------------------------------------
IP_ADDRESSES = ('172.16.10.10', '162.16.10.20')
# -----------------------------------------------------------------------------------
# Installs may choose how big they want their text messages and contact fields to be.
# -----------------------------------------------------------------------------------
MSG_FIELD_SIZE = 640
VALUE_FIELD_SIZE = 640
# -----------------------------------------------------------------------------------
# Installs may choose how long to keep the channel logs in hours
# by default we keep success logs for 48 hours and error_logs for 30 days(30 * 24 hours)
# Falsy values to keep the logs forever
# -----------------------------------------------------------------------------------
SUCCESS_LOGS_TRIM_TIME = 48
ALL_LOGS_TRIM_TIME = 24 * 30
# -----------------------------------------------------------------------------------
# Which channel types will be sent using Courier instead of RapidPro
# -----------------------------------------------------------------------------------
COURIER_CHANNELS = set()
# -----------------------------------------------------------------------------------
# Chatbase integration
# -----------------------------------------------------------------------------------
CHATBASE_API_URL = 'https://chatbase.com/api/message'
| agpl-3.0 | -3,088,125,174,749,044,000 | 31.231426 | 119 | 0.498953 | false |
jimblandy/plife | setup.py | 1 | 1112 | from distutils.core import setup, Extension
from glob import glob
from os.path import join
version = "0.4.1"
bindir = "bin"
docdir = join ("share", "doc", "plife-" + version)
setup (
name = "plife-python",
version = version,
description = "Pattern construction tool for Conway's Game of Life",
long_description = """\
Python package intended to help in designing complex patterns for
Conway's Game of Life and related cellular automata.
Sample pattern-describing scripts and resulting patterns are included.
You may also want to install the 'plife' package to view them.""",
author = "Eugene Langvagen",
author_email = "[email protected]",
url = "http://plife.sourceforge.net/",
license = "GPL",
packages = ["life"],
package_dir = {"life": "python/life"},
ext_modules = [Extension ("life.lifeint", [
"src/life.cc", "src/life_io.cc",
"src/life_rect.cc", "src/lifeint.cc"
], libraries = ["stdc++"])],
data_files = [
(join (docdir, "samples"), glob (join ("samples", "*.py")) + glob (join ("samples", "*.lif"))),
(join (docdir, "samples", "lib"), glob (join ("samples", "lib", "*.py")))
]
)
| gpl-2.0 | 8,316,089,656,023,166,000 | 32.69697 | 97 | 0.666367 | false |
JNeiger/robocup-software | soccer/gameplay/tests/test_role_assignment.py | 1 | 1914 | import unittest
import role_assignment
import robocup
class TestRoleAssignment(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestRoleAssignment, self).__init__(*args, **kwargs)
# Some objects initialized below depend on the config system being setup
self.config = robocup.Configuration.FromRegisteredConfigurables()
self.context = robocup.Context()
def test_pos_cost(self):
"""Ensure that when requirements specify a target position, it is taken
into account in assignment"""
bot1 = robocup.OurRobot(1, self.context)
bot1.set_pos_for_testing(robocup.Point(1, 6))
bot2 = robocup.OurRobot(2, self.context)
bot2.set_pos_for_testing(robocup.Point(2, 3))
req1 = role_assignment.RoleRequirements()
req1.destination_shape = robocup.Point(1, 7)
req2 = role_assignment.RoleRequirements()
req2.destination_shape = robocup.Point(3, 4)
req_tree = {'role1': req1, 'role2': req2}
assignments = role_assignment.assign_roles([bot1, bot2], req_tree)
self.assertEqual(len(assignments), 2)
self.assertEqual(assignments['role1'][1], bot1)
self.assertEqual(assignments['role2'][1], bot2)
def test_not_enough_bots(self):
"""If there's not enough robots to do an assignment, it should raise an error"""
bot1 = robocup.OurRobot(1, self.context)
bot1.set_pos_for_testing(robocup.Point(1, 6))
req1 = role_assignment.RoleRequirements()
req1.pos = robocup.Point(1, 7)
req1.required = True
req2 = role_assignment.RoleRequirements()
req2.pos = robocup.Point(3, 4)
req2.required = True
req_tree = {'role1': req1, 'role2': req2}
self.assertRaises(role_assignment.ImpossibleAssignmentError,
role_assignment.assign_roles, [bot1], req_tree)
| apache-2.0 | -9,196,590,269,994,909,000 | 36.529412 | 88 | 0.646813 | false |
NikNitro/Python-iBeacon-Scan | sympy/core/decorators.py | 1 | 4111 | """
SymPy core decorators.
The purpose of this module is to expose decorators without any other
dependencies, so that they can be easily imported anywhere in sympy/core.
"""
from __future__ import print_function, division
from functools import wraps
from .sympify import SympifyError, sympify
from sympy.core.compatibility import get_function_code
def deprecated(**decorator_kwargs):
"""This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emitted
when the function is used."""
def deprecated_decorator(func):
@wraps(func)
def new_func(*args, **kwargs):
from sympy.utilities.exceptions import SymPyDeprecationWarning
decorator_kwargs.setdefault('feature', func.__name__)
SymPyDeprecationWarning(**decorator_kwargs).warn(stacklevel=3)
return func(*args, **kwargs)
new_func._sympy_deprecated_func = func
return new_func
return deprecated_decorator
def _sympifyit(arg, retval=None):
"""decorator to smartly _sympify function arguments
@_sympifyit('other', NotImplemented)
def add(self, other):
...
In add, other can be thought of as already being a SymPy object.
If it is not, the code is likely to catch an exception, then other will
be explicitly _sympified, and the whole code restarted.
if _sympify(arg) fails, NotImplemented will be returned
see: __sympifyit
"""
def deco(func):
return __sympifyit(func, arg, retval)
return deco
def __sympifyit(func, arg, retval=None):
"""decorator to _sympify `arg` argument for function `func`
don't use directly -- use _sympifyit instead
"""
# we support f(a,b) only
if not get_function_code(func).co_argcount:
raise LookupError("func not found")
# only b is _sympified
assert get_function_code(func).co_varnames[1] == arg
if retval is None:
@wraps(func)
def __sympifyit_wrapper(a, b):
return func(a, sympify(b, strict=True))
else:
@wraps(func)
def __sympifyit_wrapper(a, b):
try:
# If an external class has _op_priority, it knows how to deal
# with sympy objects. Otherwise, it must be converted.
if not hasattr(b, '_op_priority'):
b = sympify(b, strict=True)
return func(a, b)
except SympifyError:
return retval
return __sympifyit_wrapper
def call_highest_priority(method_name):
"""A decorator for binary special methods to handle _op_priority.
Binary special methods in Expr and its subclasses use a special attribute
'_op_priority' to determine whose special method will be called to
handle the operation. In general, the object having the highest value of
'_op_priority' will handle the operation. Expr and subclasses that define
custom binary special methods (__mul__, etc.) should decorate those
methods with this decorator to add the priority logic.
The ``method_name`` argument is the name of the method of the other class
that will be called. Use this decorator in the following manner::
# Call other.__rmul__ if other._op_priority > self._op_priority
@call_highest_priority('__rmul__')
def __mul__(self, other):
...
# Call other.__mul__ if other._op_priority > self._op_priority
@call_highest_priority('__mul__')
def __rmul__(self, other):
...
"""
def priority_decorator(func):
@wraps(func)
def binary_op_wrapper(self, other):
if hasattr(other, '_op_priority'):
if other._op_priority > self._op_priority:
try:
f = getattr(other, method_name)
except AttributeError:
pass
else:
return f(self)
return func(self, other)
return binary_op_wrapper
return priority_decorator
| gpl-3.0 | -6,863,624,794,790,311,000 | 32.975207 | 78 | 0.620044 | false |
impactlab/jps-handoff | model/postgres_dump_recent.py | 1 | 1839 | #!/usr/bin/env python
import psycopg2
import csv, sys, os, datetime
datadir = '/data/extract/'
with open('meter_list.csv', 'r') as f:
fcsv = csv.reader(f)
meters = fcsv.next()
stop_date = datetime.datetime.now()
start_date = stop_date - datetime.timedelta(days=365)
start_date_evt = stop_date - datetime.timedelta(days=30)
conn = psycopg2.connect(service='jps')
cur = conn.cursor()
for meter in meters:
out_filename = meter + '__' + \
datetime.datetime.strftime(start_date,'%Y-%m-%dT%H%M%S') + '__' + \
datetime.datetime.strftime(stop_date,'%Y-%m-%dT%H%M%S') + '.csv'
out_filename_evt = 'evt__' + meter + '__' + \
datetime.datetime.strftime(start_date_evt,'%Y-%m-%dT%H%M%S') + '__' + \
datetime.datetime.strftime(stop_date,'%Y-%m-%dT%H%M%S') + '.csv'
cur.execute('SELECT m.meter_id, p.ts, p.kw, p.kva FROM viewer_meter m JOIN viewer_profiledatapoint p ON '+
'm.id=p.meter_id WHERE m.meter_id=%s AND p.ts > %s AND p.ts <= %s',(meter,start_date,stop_date))
with open(datadir+out_filename,'w') as f:
fcsv = csv.writer(f)
for line in cur:
if len(line) != 4: continue
ts = datetime.datetime.strftime(line[1], '%Y/%m/%d %H:%M')
fcsv.writerow([line[0], ts, line[2], line[3]])
cur.execute('SELECT m.meter_id, p.ts, p.event FROM viewer_meter m JOIN viewer_eventdatapoint p ON '+
'm.id=p.meter_id WHERE m.meter_id=%s AND p.ts > %s AND p.ts <= %s',(meter,start_date,stop_date))
with open(datadir+out_filename_evt,'w') as f:
fcsv = csv.writer(f)
fcsv.writerow(['Device Id','Time','Event'])
for line in cur:
if len(line) != 3: continue
ts = datetime.datetime.strftime(line[1], '%Y-%m-%d %H:%M:%S')
fcsv.writerow([line[0], ts, line[2]])
| mit | 6,000,819,424,975,845,000 | 40.795455 | 112 | 0.586188 | false |
iceflow/aws-demo | es/cf_logs_to_es/cf_logs_to_es.py | 1 | 4234 | #!/usr/bin/python
# -*- coding: utf8 -*-
import sys
import json
from log import *
from es_put import *
from ip2geo import *
### 常量定义
DUMP_PROCESS_NUM = 2000 # 当日志条数累计到DUMP_PROCESS_NUM数量,触发入库
INDICES_PREFIX = "cf-logs-"
DEFAULT_TYPE = "log"
CF_LOGS_WEB_FORMAT_SIZE = 24
CF_LOGS_RTMP_FORMAT_SIZE = 13
################ 全局变量 - 开始 ########################################
#### 日志
log = Log('CF_LOGS_TO_ES', '/var/log/cf_logs_to_es.log')
#### 应用数据
es_server = None # ES 服务器
g_value_body = "" # 数据待入库列表
################ 全局变量 - 结束 ########################################
# //完整网络包格式:总长度(4)+协议ID(2)+protobuf流数据长度(4)+protobuf流数据内容
def process_line(s):
if CF_LOGS_WEB_FORMAT_SIZE != len(s):
log.info('日志字段数量不匹配%d: %d(%s)'%(CF_LOGS_WEB_FORMAT_SIZE, len(s), ' '.join(s)))
return
# 数据分段
data = {}
data["@timestamp"] = "%s:%s"%(s[0], s[1]);
data["x-edge-location"] = s[2];
data["sc-bytes"] = int(s[3]);
data["c-ip"] = s[4];
data["location"] = get_geo_location(s[4]);
data["cs-method"] = s[5];
data["cs-host"] = s[6];
data["cs-uri-stem"] = s[7];
data["sc-status"] = s[8];
data["cs-feferer"] = s[9];
data["cs-user-agent"] = s[10];
data["cs-uri-query"] = s[11];
data["cs-cookie"] = s[12];
data["x-edge-result-type"] = s[13];
data["x-edge-request-id"] = s[14];
data["x-host-header"] = s[15];
data["cs-protocol"] = s[16];
data["cs-bytes"] = s[17];
data["time-taken"] = s[18];
data["x-forwarded-for"] = s[19];
data["ssl-protocol"] = s[20];
data["ssl-cipher"] = s[21];
data["x-edge-response-result-type"] = s[22];
data["cs-protocol-version"] = s[23];
#print data
#put_data_to_es(es_server, '%s%s'%(INDICES_PREFIX, s[0]), DEFAULT_TYPE, data)
#put_data_to_es(es_server, 'cf-logs-2017-02-25', 'log', data)
global g_value_body
g_value_body += '{"index":{"_index":"%s%s","_type":"%s"}}\n%s\n'%(INDICES_PREFIX, s[0], DEFAULT_TYPE, json.dumps(data))
def put_data_to_es(filename):
''' def put_data_to_es(filename):
批量将数据入库
'''
global g_value_body
#print "put_data_to_es: ", filename
if len(g_value_body) > 0:
try:
bulk_data_to_es(es_server, g_value_body)
#log.debug('数据入库成功:')
print "+",
except Exception,data:
log.debug('数据文件:%s 数据入库失败: Data[%s]'%(filename, g_value_body))
print(data)
# 清空buffer
g_value_body = ""
def parse_file(es_server, filename):
log.debug('开始分析文件:%s'%(filename))
if not os.path.exists(filename):
log.debug('文件%s不存在'%(filename))
return
total_num = 0 # 处理数量总数
process_num = 0 # 未入库数量
with open(filename) as f:
for line in f.readlines():
line = line.strip()
if not len(line) or line.startswith('#'):
continue
sections = line.split('\t')
if len(sections) > 1:
#print ("sections[%d]"%len(sections))
data = process_line(sections)
if ( process_num > DUMP_PROCESS_NUM ):
put_data_to_es(filename)
process_num = 0
total_num += 1
process_num += 1
## 分析完毕后,入库剩余数据
if process_num > 0:
put_data_to_es(filename)
log.debug('完成分析文件:%s 数量:%d'%(filename, total_num))
def usage(prog):
print( "%s usage:"%(prog))
print(" %s es_server log_file [log_file] [log_file] ... : 分析日志文件列表"%(prog))
if __name__ == '__main__':
# 参数检查
argc = len(sys.argv)
if argc < 2:
usage(sys.argv[0])
sys.exit(1)
es_server = sys.argv[1]
log.info('开始批量分析日志文件到%s: %s'%(es_server, ' '.join(sys.argv[1:])))
# 数据分析
for pos in xrange(argc-2):
parse_file(es_server, sys.argv[pos+2])
sys.exit(0)
| gpl-3.0 | 3,759,973,895,854,942,700 | 25.575342 | 123 | 0.515722 | false |
rhdedgar/openshift-tools | openshift/installer/vendored/openshift-ansible-3.6.173/roles/openshift_health_checker/openshift_checks/docker_image_availability.py | 1 | 9598 | """Check that required Docker images are available."""
from openshift_checks import OpenShiftCheck
from openshift_checks.mixins import DockerHostMixin
NODE_IMAGE_SUFFIXES = ["haproxy-router", "docker-registry", "deployer", "pod"]
DEPLOYMENT_IMAGE_INFO = {
"origin": {
"namespace": "openshift",
"name": "origin",
"registry_console_image": "cockpit/kubernetes",
},
"openshift-enterprise": {
"namespace": "openshift3",
"name": "ose",
"registry_console_image": "registry.access.redhat.com/openshift3/registry-console",
},
}
class DockerImageAvailability(DockerHostMixin, OpenShiftCheck):
"""Check that required Docker images are available.
Determine docker images that an install would require and check that they
are either present in the host's docker index, or available for the host to pull
with known registries as defined in our inventory file (or defaults).
"""
name = "docker_image_availability"
tags = ["preflight"]
# we use python-docker-py to check local docker for images, and skopeo
# to look for images available remotely without waiting to pull them.
dependencies = ["python-docker-py", "skopeo"]
skopeo_img_check_command = "timeout 10 skopeo inspect --tls-verify=false docker://{registry}/{image}"
def __init__(self, *args, **kwargs):
super(DockerImageAvailability, self).__init__(*args, **kwargs)
# record whether we could reach a registry or not (and remember results)
self.reachable_registries = {}
def is_active(self):
"""Skip hosts with unsupported deployment types."""
deployment_type = self.get_var("openshift_deployment_type")
has_valid_deployment_type = deployment_type in DEPLOYMENT_IMAGE_INFO
return super(DockerImageAvailability, self).is_active() and has_valid_deployment_type
def run(self):
msg, failed = self.ensure_dependencies()
if failed:
return {
"failed": True,
"msg": "Some dependencies are required in order to check Docker image availability.\n" + msg
}
required_images = self.required_images()
missing_images = set(required_images) - set(self.local_images(required_images))
# exit early if all images were found locally
if not missing_images:
return {}
registries = self.known_docker_registries()
if not registries:
return {"failed": True, "msg": "Unable to retrieve any docker registries."}
available_images = self.available_images(missing_images, registries)
unavailable_images = set(missing_images) - set(available_images)
if unavailable_images:
registries = [
reg if self.reachable_registries.get(reg, True) else reg + " (unreachable)"
for reg in registries
]
msg = (
"One or more required Docker images are not available:\n {}\n"
"Configured registries: {}\n"
"Checked by: {}"
).format(
",\n ".join(sorted(unavailable_images)),
", ".join(registries),
self.skopeo_img_check_command
)
return dict(failed=True, msg=msg)
return {}
def required_images(self):
"""
Determine which images we expect to need for this host.
Returns: a set of required images like 'openshift/origin:v3.6'
The thorny issue of determining the image names from the variables is under consideration
via https://github.com/openshift/openshift-ansible/issues/4415
For now we operate as follows:
* For containerized components (master, node, ...) we look at the deployment type and
use openshift/origin or openshift3/ose as the base for those component images. The
version is openshift_image_tag as determined by the openshift_version role.
* For OpenShift-managed infrastructure (router, registry...) we use oreg_url if
it is defined; otherwise we again use the base that depends on the deployment type.
Registry is not included in constructed images. It may be in oreg_url or etcd image.
"""
required = set()
deployment_type = self.get_var("openshift_deployment_type")
host_groups = self.get_var("group_names")
# containerized etcd may not have openshift_image_tag, see bz 1466622
image_tag = self.get_var("openshift_image_tag", default="latest")
image_info = DEPLOYMENT_IMAGE_INFO[deployment_type]
if not image_info:
return required
# template for images that run on top of OpenShift
image_url = "{}/{}-{}:{}".format(image_info["namespace"], image_info["name"], "${component}", "${version}")
image_url = self.get_var("oreg_url", default="") or image_url
if 'nodes' in host_groups:
for suffix in NODE_IMAGE_SUFFIXES:
required.add(image_url.replace("${component}", suffix).replace("${version}", image_tag))
# The registry-console is for some reason not prefixed with ose- like the other components.
# Nor is it versioned the same, so just look for latest.
# Also a completely different name is used for Origin.
required.add(image_info["registry_console_image"])
# images for containerized components
if self.get_var("openshift", "common", "is_containerized"):
components = set()
if 'nodes' in host_groups:
components.update(["node", "openvswitch"])
if 'masters' in host_groups: # name is "origin" or "ose"
components.add(image_info["name"])
for component in components:
required.add("{}/{}:{}".format(image_info["namespace"], component, image_tag))
if 'etcd' in host_groups: # special case, note it is the same for origin/enterprise
required.add("registry.access.redhat.com/rhel7/etcd") # and no image tag
return required
def local_images(self, images):
"""Filter a list of images and return those available locally."""
registries = self.known_docker_registries()
found_images = []
for image in images:
# docker could have the image name as-is or prefixed with any registry
imglist = [image] + [reg + "/" + image for reg in registries]
if self.is_image_local(imglist):
found_images.append(image)
return found_images
def is_image_local(self, image):
"""Check if image is already in local docker index."""
result = self.execute_module("docker_image_facts", {"name": image})
return bool(result.get("images")) and not result.get("failed")
def known_docker_registries(self):
"""Build a list of docker registries available according to inventory vars."""
regs = list(self.get_var("openshift.docker.additional_registries", default=[]))
deployment_type = self.get_var("openshift_deployment_type")
if deployment_type == "origin" and "docker.io" not in regs:
regs.append("docker.io")
elif "enterprise" in deployment_type and "registry.access.redhat.com" not in regs:
regs.append("registry.access.redhat.com")
return regs
def available_images(self, images, default_registries):
"""Search remotely for images. Returns: list of images found."""
return [
image for image in images
if self.is_available_skopeo_image(image, default_registries)
]
def is_available_skopeo_image(self, image, default_registries):
"""Use Skopeo to determine if required image exists in known registry(s)."""
registries = default_registries
# If image already includes a registry, only use that.
# NOTE: This logic would incorrectly identify images that do not use a namespace, e.g.
# registry.access.redhat.com/rhel7 as if the registry were a namespace.
# It's not clear that there's any way to distinguish them, but fortunately
# the current set of images all look like [registry/]namespace/name[:version].
if image.count("/") > 1:
registry, image = image.split("/", 1)
registries = [registry]
for registry in registries:
if registry not in self.reachable_registries:
self.reachable_registries[registry] = self.connect_to_registry(registry)
if not self.reachable_registries[registry]:
continue
args = {"_raw_params": self.skopeo_img_check_command.format(registry=registry, image=image)}
result = self.execute_module_with_retries("command", args)
if result.get("rc", 0) == 0 and not result.get("failed"):
return True
if result.get("rc") == 124: # RC 124 == timed out; mark unreachable
self.reachable_registries[registry] = False
return False
def connect_to_registry(self, registry):
"""Use ansible wait_for module to test connectivity from host to registry. Returns bool."""
# test a simple TCP connection
host, _, port = registry.partition(":")
port = port or 443
args = dict(host=host, port=port, state="started", timeout=30)
result = self.execute_module("wait_for", args)
return result.get("rc", 0) == 0 and not result.get("failed")
| apache-2.0 | 5,285,902,716,826,715,000 | 44.704762 | 115 | 0.627735 | false |
grumpycoin/grumpycoin-v.1.2 | contrib/spendfrom/spendfrom.py | 1 | 10177 | #!/usr/bin/env python
#
# Use the raw transactions API to spend grumpycoins received on particular addresses,
# and send any change back to that same address.
#
# Example usage:
# spendfrom.py # Lists available funds
# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00
#
# Assumes it will talk to a grumpycoind or GrumpyCoin-Qt running
# on localhost.
#
# Depends on jsonrpc
#
from decimal import *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal("0.001")
def check_json_precision():
"""Make sure json library being used does not lose precision converting MEC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def determine_db_dir():
"""Return the default location of the grumpycoin data directory"""
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/GrumpyCoin/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "GrumpyCoin")
return os.path.expanduser("~/.grumpycoin")
def read_grumpycoin_config(dbdir):
"""Read the grumpycoin.conf file from dbdir, returns dictionary of settings"""
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +"\n"
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "grumpycoin.conf"))))
return dict(config_parser.items("all"))
def connect_JSON(config):
"""Connect to a grumpycoin JSON-RPC server"""
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False
if not 'rpcport' in config:
config['rpcport'] = 45998 if testnet else 55903
connect = "http://%s:%[email protected]:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
# ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,
# but also make sure the grumpycoind we're talking to is/isn't testnet:
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n")
sys.exit(1)
return result
except:
sys.stderr.write("Error connecting to RPC server at "+connect+"\n")
sys.exit(1)
def unlock_wallet(grumpycoind):
info = grumpycoind.getinfo()
if 'unlocked_until' not in info:
return True # wallet is not encrypted
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass("Wallet is locked; enter passphrase: ")
grumpycoind.walletpassphrase(passphrase, 5)
except:
sys.stderr.write("Wrong passphrase\n")
info = grumpycoind.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(grumpycoind):
address_summary = dict()
address_to_account = dict()
for info in grumpycoind.listreceivedbyaddress(0):
address_to_account[info["address"]] = info["account"]
unspent = grumpycoind.listunspent(0)
for output in unspent:
# listunspent doesn't give addresses, so:
rawtx = grumpycoind.getrawtransaction(output['txid'], 1)
vout = rawtx["vout"][output['vout']]
pk = vout["scriptPubKey"]
# This code only deals with ordinary pay-to-grumpycoin-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash":
continue
address = pk["addresses"][0]
if address in address_summary:
address_summary[address]["total"] += vout["value"]
address_summary[address]["outputs"].append(output)
else:
address_summary[address] = {
"total" : vout["value"],
"outputs" : [output],
"account" : address_to_account.get(address, "")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal("0.0")
n = 0
while have < needed and n < len(inputs):
outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]})
have += inputs[n]["amount"]
n += 1
return (outputs, have-needed)
def create_tx(grumpycoind, fromaddresses, toaddress, amount, fee):
all_coins = list_available(grumpycoind)
total_available = Decimal("0.0")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr]["outputs"])
total_available += all_coins[addr]["total"]
if total_available < needed:
sys.stderr.write("Error, only %f MEC available, need %f\n"%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
# Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode
# Decimals, I'm casting amounts to float before sending them to grumpycoind.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = grumpycoind.createrawtransaction(inputs, outputs)
signed_rawtx = grumpycoind.signrawtransaction(rawtx)
if not signed_rawtx["complete"]:
sys.stderr.write("signrawtransaction failed\n")
sys.exit(1)
txdata = signed_rawtx["hex"]
return txdata
def compute_amount_in(grumpycoind, txinfo):
result = Decimal("0.0")
for vin in txinfo['vin']:
in_info = grumpycoind.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal("0.0")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(grumpycoind, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = grumpycoind.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(grumpycoind, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000 # integer division rounds down
if kb > 1 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee, tiny-amount transaction")
# Exercise for the reader: compute transaction priority, and
# warn if this is a very-low-priority transaction
except FeeError as err:
sys.stderr.write((str(err)+"\n"))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--from", dest="fromaddresses", default=None,
help="addresses to get grumpycoins from")
parser.add_option("--to", dest="to", default=None,
help="address to get send grumpycoins to")
parser.add_option("--amount", dest="amount", default=None,
help="amount to send")
parser.add_option("--fee", dest="fee", default="0.0",
help="fee to include")
parser.add_option("--datadir", dest="datadir", default=determine_db_dir(),
help="location of grumpycoin.conf file with RPC username/password (default: %default)")
parser.add_option("--testnet", dest="testnet", default=False, action="store_true",
help="Use the test network")
parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true",
help="Don't broadcast the transaction, just create and print the transaction data")
(options, args) = parser.parse_args()
check_json_precision()
config = read_grumpycoin_config(options.datadir)
if options.testnet: config['testnet'] = True
grumpycoind = connect_JSON(config)
if options.amount is None:
address_summary = list_available(grumpycoind)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions))
else:
print("%s %.8f %s"%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(grumpycoind) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(grumpycoind, options.fromaddresses.split(","), options.to, amount, fee)
sanity_test_fee(grumpycoind, txdata, amount*Decimal("0.01"))
if options.dry_run:
print(txdata)
else:
txid = grumpycoind.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
| mit | -3,352,800,166,498,725,000 | 37.116105 | 111 | 0.624447 | false |
named-data/ndn-atmos | lib/ndn_cmmap_translators/atmos2ndn_parser/conf_file_parser.py | 1 | 4328 | #!/usr/bin/env python3
# -*- Mode:python; c-file-style:"gnu"; indent-tabs-mode:nil -*- */
#
# Copyright (c) 2015, Colorado State University.
#
# This file is part of ndn-atmos.
#
# ndn-atmos is free software: you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free Software
# Foundation, either version 3 of the License, or (at your option) any later version.
#
# ndn-atmos is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
# PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
#
# You should have received copies of the GNU General Public License and GNU Lesser
# General Public License along with ndn-atmos, e.g., in COPYING.md file. If not, see
# <http://www.gnu.org/licenses/>.
#
# See AUTHORS.md for complete list of ndn-atmos authors and contributors.
'''This is the config file parser module.
Input = object with command line parameters.
Output = list of components for different config sections'''
import configparser
import sys, traceback
class ParseConf(object):
'''parses the name schema file and returns name mappings for translated output'''
def __init__(self, confName):
self.confName = confName
if __debug__:
print("Config file name: %s" %(self.confName))
self.filenameMap = []
self.ndnNameMap = []
self.seperatorsMap = []
self.userDefinedConfDir = {}
self.translator = []
#initialize the parser
self.parser = configparser.SafeConfigParser()
self.parser.optionxform=str
self.parser.read(self.confName)
self.fullConf = {}
#do the mapping
res = self.getMappings(confName)
if res is False:
print("Error getting values from config file")
raise error.with_traceback(sys.exc_info()[2])
def _parseConf(self):
#iterate over them and store the name components in fullConf
try:
for sectionName in self.parser.sections():
self.conf = {}
for name, value in self.parser.items(sectionName):
self.conf[name] = value
self.fullConf[sectionName] = self.conf
if __debug__:
print(self.fullConf)
except KeyError:
print("Key %s is not found in config file" %(name))
print(sys.exc_info()[2])
except TypeError:
print("TypeError while parsing config file")
print(sys.exc_info()[2])
return self.fullConf
def _doParsing(self):
#parser now contain a dictionary with the sections in conf
# first elements are section and second ones are variables defined in config file
try:
self.filenameMap = self.fullConf['Name']['filenameMapping'].replace(" ", "").split(',')
self.ndnNameMap = self.fullConf['Name']['ndnMapping'].replace(" ", "").split(',')
# user defined components look like this
#activity:cmip5, subactivity:atmos, organization:csu, ensemble:r3i1p1
userDefinedConf = self.fullConf['Name']['userDefinedComps'].replace(" ", "").split(',')
for item in userDefinedConf:
key, value = item.split(":")
self.userDefinedConfDir[key] = [value]
self.seperatorsMap = self.fullConf['Name']['seperators'].replace(" ", "").split(',')
#reads which translator to use
self.translator = self.fullConf['Translator']['translator'].replace(" ", "")
except KeyError:
print("Key %s is not found in config file" %(name))
print(sys.exc_info()[2])
except TypeError:
print("TypeError while parsing config file")
print(sys.exc_info()[2])
def getMappings(self, confName):
'''parses the schema file and provides name mappings'''
fullConf = self._parseConf()
#if dict is not empty
if fullConf:
res = self._doParsing()
if len(self.filenameMap) == 0 or len(self.ndnNameMap) == 0 or len(self.translator) == 0:
return False
else:
return True
else:
return False
| gpl-3.0 | -796,081,374,918,111,900 | 39.074074 | 99 | 0.620379 | false |
tim-clifford/py-cipher | src/core.py | 1 | 3094 | '''
A few common functions for cipher cracking
Functionality:
- Return a list of letter frequency from a given string
- Sort a string with a given linear function into a list of inputs based on letter frequency
- Shift a given string based on a linear function and inputs
Sample Usage:
>>> from cipher import core
>>> letterFrequency = core.frequencyList(<encrypted bytearray>)
>>> core.sortLinear(lambda x, a, b: a*x + b,<encrypted bytearray>,range(1,5),range(26))
[(<a1>,<b1>),(<a2>,<b2>)...(<a104>,<b104>)]
>>> core.shiftLinear(lambda x, a, b: a*x + b,<encrypted bytearray>,<a1>,<b1>)
<decrypted string>
'''
def frequencyList(input1,utf8=False):
'''
Returns a list of the frequency of characters in a string as fractions of the total
>>> frequencyList("abcde",utf8=True)
[0.2, 0.2, 0.2, 0.2, 0.2, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
>>> frequencyList(bytearray("abcde","ascii"))
[0.2, 0.2, 0.2, 0.2, 0.2, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
'''
cipherLetterFrequency = []
for letter in range(97,123):
tempFrequency = 0
for i in input1.lower():
if utf8:
if ord(i) == letter: tempFrequency += 1
elif i == letter:
tempFrequency += 1
cipherLetterFrequency.append((tempFrequency))
return cipherLetterFrequency
def sortLinear(function, list1, a, b, cipherLetterFrequency):
'''
Returns a list of possible values for a given function
sorted by similarity of the letter frequency to english
>>> core.sortLinear(lambda x, a, b: a*x + b,<encrypted string>,range(1,5),range(26))
[(<a1>,<b1>),(<a2>,<b2>)...(<a104>,<b104>)]
'''
letterFrequency = [0.0817, 0.0149, 0.0278, 0.0425, 0.127, 0.0223, 0.0202, 0.0609, 0.0697, 0.0015, 0.0077, 0.0402, 0.0241, 0.0675, 0.0751, 0.0193, 0.0009, 0.0599, 0.0633, 0.0906, 0.0276, 0.0098, 0.0236, 0.0015, 0.0197, 0.0007]
shiftPossibility = []
paramList = []
for param1 in a:
for param2 in b:
tempPossibility = 0
for letter in list1:
if 65 <= letter <= 90:
newLetter = (function(letter-65,param1,param2))%26
tempPossibility += letterFrequency[newLetter]
shiftPossibility.append(tempPossibility)
paramList.append((param1,param2))
return [(a,b) for _,(a,b) in sorted(zip(shiftPossibility, paramList))][::-1]
def shiftLinear(function, list1, a, b, utf8=False):
'''
Shifts a given string by the function and two input values `a` and `b`
>>> core.shiftLinear(lambda x, a, b: a*(x - b),"NGGNPX NG QNJA",1,13,utf8=True)
'attack at dawn'
>>> core.shiftLinear(lambda x, a, b: a*(x - b),bytearray("NGGNPX NG QNJA","ascii"),1,13)
bytearray(b'attack at dawn')
'''
if utf8:
newInput=""
for i in list1.lower():
if ord(i) < 97 or ord(i) > 122:
newInput += i
else:
newInput += chr((function(ord(i)-97,a,b) % 26 + 97))
return newInput
else:
newInput = bytearray("","ascii")
for i in list1.lower():
if i < 97 or i > 122:
newInput += bytes([i])
else:
newInput += bytes([(function(i-97,a,b)) % 26 + 97])
return newInput
| mit | 1,851,057,241,019,089,200 | 35.4 | 227 | 0.641241 | false |
mrtumnus/scrape-tedtalks | download_tedtalk.py | 1 | 1586 | # File: download_tedtalk.py
# Author: E. Partridge
# Date: 8 August 2012
# Description:
# This script parses the TED Talk audio feed and proceeds to
# download all audio files into the same directory that
# this script is located in. Files are prepended with the publication
# date for convenience.
#
# Note: This has only been tested on Windows 7 64-bit, with Python 2.7.2.5
# Note2: TED Talk audio files contain ID3v2.4 tags, which are not supported
# natively by Windows. I used foobar2000 to convert the tags to ID3v2.3,
# which Windows does support. To do this, open the MP3 files in
# foobar2000, right click and select Tagging > MP3 Tag Types... Check
# "Override ID3v2 revision:" and select the ID3v2.3 radio button.
# After that, I was able to view metadata in Windows Explorer and
# Windows Media Player.
import urllib
import feedparser
import time
tedtalk_rss_url = 'http://feeds.feedburner.com/TEDTalks_audio'
tedtalk_feed = feedparser.parse(tedtalk_rss_url)
def GetFeedContent(entry):
content_url = entry.enclosures[0].href
file_name = content_url.split('/')[-1]
file_date = time.strptime(entry.published[5:16], '%d %b %Y')
date_str = '{:04}-{:02}-{:02}'.format(file_date.tm_year, file_date.tm_mon, file_date.tm_mday)
file_name = date_str + ' ' + file_name
try:
with open(file_name) as f:
print('File exists: ' + file_name)
except IOError as e:
print('Downloading: ' + file_name)
urllib.urlretrieve(content_url, file_name)
return
for entry in tedtalk_feed.entries:
GetFeedContent(entry) | gpl-3.0 | -5,137,724,765,449,738,000 | 37.7 | 94 | 0.703026 | false |
ChristosChristofidis/django-debug-toolbar | tests/panels/test_cache.py | 1 | 1678 | # coding: utf-8
from __future__ import absolute_import, unicode_literals
import django
from django.core import cache
from ..base import BaseTestCase
from debug_toolbar.compat import unittest
class CachePanelTestCase(BaseTestCase):
def setUp(self):
super(CachePanelTestCase, self).setUp()
self.panel = self.toolbar.get_panel_by_id('CachePanel')
self.panel.enable_instrumentation()
def tearDown(self):
self.panel.disable_instrumentation()
super(CachePanelTestCase, self).tearDown()
def test_recording(self):
self.assertEqual(len(self.panel.calls), 0)
cache.cache.set('foo', 'bar')
cache.cache.get('foo')
cache.cache.delete('foo')
# Verify that the cache has a valid clear method.
cache.cache.clear()
self.assertEqual(len(self.panel.calls), 4)
@unittest.skipIf(django.VERSION < (1, 7), "Caches was added in Django 1.7")
def test_recording_caches(self):
self.assertEqual(len(self.panel.calls), 0)
default_cache = cache.caches[cache.DEFAULT_CACHE_ALIAS]
second_cache = cache.caches['second']
default_cache.set('foo', 'bar')
second_cache.get('foo')
self.assertEqual(len(self.panel.calls), 2)
@unittest.skipIf(django.VERSION > (1, 6), "get_cache was deprecated in Django 1.7")
def test_recording_get_cache(self):
self.assertEqual(len(self.panel.calls), 0)
default_cache = cache.get_cache(cache.DEFAULT_CACHE_ALIAS)
second_cache = cache.get_cache('second')
default_cache.set('foo', 'bar')
second_cache.get('foo')
self.assertEqual(len(self.panel.calls), 2)
| bsd-3-clause | -979,569,211,508,806,300 | 33.958333 | 87 | 0.657926 | false |
carlgao/lenga | images/lenny64-peon/usr/share/python-support/mercurial-common/mercurial/context.py | 1 | 21456 | # context.py - changeset and file context objects for mercurial
#
# Copyright 2006, 2007 Matt Mackall <[email protected]>
#
# This software may be used and distributed according to the terms
# of the GNU General Public License, incorporated herein by reference.
from node import nullid, nullrev, short
from i18n import _
import ancestor, bdiff, revlog, util, os, errno
class changectx(object):
"""A changecontext object makes access to data related to a particular
changeset convenient."""
def __init__(self, repo, changeid=None):
"""changeid is a revision number, node, or tag"""
self._repo = repo
if not changeid and changeid != 0:
p1, p2 = self._repo.dirstate.parents()
self._rev = self._repo.changelog.rev(p1)
if self._rev == -1:
changeid = 'tip'
else:
self._node = p1
return
self._node = self._repo.lookup(changeid)
self._rev = self._repo.changelog.rev(self._node)
def __str__(self):
return short(self.node())
def __repr__(self):
return "<changectx %s>" % str(self)
def __eq__(self, other):
try:
return self._rev == other._rev
except AttributeError:
return False
def __ne__(self, other):
return not (self == other)
def __nonzero__(self):
return self._rev != nullrev
def __getattr__(self, name):
if name == '_changeset':
self._changeset = self._repo.changelog.read(self.node())
return self._changeset
elif name == '_manifest':
self._manifest = self._repo.manifest.read(self._changeset[0])
return self._manifest
elif name == '_manifestdelta':
md = self._repo.manifest.readdelta(self._changeset[0])
self._manifestdelta = md
return self._manifestdelta
else:
raise AttributeError, name
def __contains__(self, key):
return key in self._manifest
def __getitem__(self, key):
return self.filectx(key)
def __iter__(self):
a = self._manifest.keys()
a.sort()
for f in a:
yield f
def changeset(self): return self._changeset
def manifest(self): return self._manifest
def rev(self): return self._rev
def node(self): return self._node
def user(self): return self._changeset[1]
def date(self): return self._changeset[2]
def files(self): return self._changeset[3]
def description(self): return self._changeset[4]
def branch(self): return self._changeset[5].get("branch")
def extra(self): return self._changeset[5]
def tags(self): return self._repo.nodetags(self._node)
def parents(self):
"""return contexts for each parent changeset"""
p = self._repo.changelog.parents(self._node)
return [changectx(self._repo, x) for x in p]
def children(self):
"""return contexts for each child changeset"""
c = self._repo.changelog.children(self._node)
return [changectx(self._repo, x) for x in c]
def _fileinfo(self, path):
if '_manifest' in self.__dict__:
try:
return self._manifest[path], self._manifest.flags(path)
except KeyError:
raise revlog.LookupError(self._node, path,
_('not found in manifest'))
if '_manifestdelta' in self.__dict__ or path in self.files():
if path in self._manifestdelta:
return self._manifestdelta[path], self._manifestdelta.flags(path)
node, flag = self._repo.manifest.find(self._changeset[0], path)
if not node:
raise revlog.LookupError(self._node, path,
_('not found in manifest'))
return node, flag
def filenode(self, path):
return self._fileinfo(path)[0]
def fileflags(self, path):
try:
return self._fileinfo(path)[1]
except revlog.LookupError:
return ''
def filectx(self, path, fileid=None, filelog=None):
"""get a file context from this changeset"""
if fileid is None:
fileid = self.filenode(path)
return filectx(self._repo, path, fileid=fileid,
changectx=self, filelog=filelog)
def filectxs(self):
"""generate a file context for each file in this changeset's
manifest"""
mf = self.manifest()
m = mf.keys()
m.sort()
for f in m:
yield self.filectx(f, fileid=mf[f])
def ancestor(self, c2):
"""
return the ancestor context of self and c2
"""
n = self._repo.changelog.ancestor(self._node, c2._node)
return changectx(self._repo, n)
class filectx(object):
"""A filecontext object makes access to data related to a particular
filerevision convenient."""
def __init__(self, repo, path, changeid=None, fileid=None,
filelog=None, changectx=None):
"""changeid can be a changeset revision, node, or tag.
fileid can be a file revision or node."""
self._repo = repo
self._path = path
assert (changeid is not None
or fileid is not None
or changectx is not None)
if filelog:
self._filelog = filelog
if changeid is not None:
self._changeid = changeid
if changectx is not None:
self._changectx = changectx
if fileid is not None:
self._fileid = fileid
def __getattr__(self, name):
if name == '_changectx':
self._changectx = changectx(self._repo, self._changeid)
return self._changectx
elif name == '_filelog':
self._filelog = self._repo.file(self._path)
return self._filelog
elif name == '_changeid':
if '_changectx' in self.__dict__:
self._changeid = self._changectx.rev()
else:
self._changeid = self._filelog.linkrev(self._filenode)
return self._changeid
elif name == '_filenode':
if '_fileid' in self.__dict__:
self._filenode = self._filelog.lookup(self._fileid)
else:
self._filenode = self._changectx.filenode(self._path)
return self._filenode
elif name == '_filerev':
self._filerev = self._filelog.rev(self._filenode)
return self._filerev
elif name == '_repopath':
self._repopath = self._path
return self._repopath
else:
raise AttributeError, name
def __nonzero__(self):
try:
n = self._filenode
return True
except revlog.LookupError:
# file is missing
return False
def __str__(self):
return "%s@%s" % (self.path(), short(self.node()))
def __repr__(self):
return "<filectx %s>" % str(self)
def __eq__(self, other):
try:
return (self._path == other._path
and self._fileid == other._fileid)
except AttributeError:
return False
def __ne__(self, other):
return not (self == other)
def filectx(self, fileid):
'''opens an arbitrary revision of the file without
opening a new filelog'''
return filectx(self._repo, self._path, fileid=fileid,
filelog=self._filelog)
def filerev(self): return self._filerev
def filenode(self): return self._filenode
def fileflags(self): return self._changectx.fileflags(self._path)
def isexec(self): return 'x' in self.fileflags()
def islink(self): return 'l' in self.fileflags()
def filelog(self): return self._filelog
def rev(self):
if '_changectx' in self.__dict__:
return self._changectx.rev()
if '_changeid' in self.__dict__:
return self._changectx.rev()
return self._filelog.linkrev(self._filenode)
def linkrev(self): return self._filelog.linkrev(self._filenode)
def node(self): return self._changectx.node()
def user(self): return self._changectx.user()
def date(self): return self._changectx.date()
def files(self): return self._changectx.files()
def description(self): return self._changectx.description()
def branch(self): return self._changectx.branch()
def manifest(self): return self._changectx.manifest()
def changectx(self): return self._changectx
def data(self): return self._filelog.read(self._filenode)
def path(self): return self._path
def size(self): return self._filelog.size(self._filerev)
def cmp(self, text): return self._filelog.cmp(self._filenode, text)
def renamed(self):
"""check if file was actually renamed in this changeset revision
If rename logged in file revision, we report copy for changeset only
if file revisions linkrev points back to the changeset in question
or both changeset parents contain different file revisions.
"""
renamed = self._filelog.renamed(self._filenode)
if not renamed:
return renamed
if self.rev() == self.linkrev():
return renamed
name = self.path()
fnode = self._filenode
for p in self._changectx.parents():
try:
if fnode == p.filenode(name):
return None
except revlog.LookupError:
pass
return renamed
def parents(self):
p = self._path
fl = self._filelog
pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)]
r = self._filelog.renamed(self._filenode)
if r:
pl[0] = (r[0], r[1], None)
return [filectx(self._repo, p, fileid=n, filelog=l)
for p,n,l in pl if n != nullid]
def children(self):
# hard for renames
c = self._filelog.children(self._filenode)
return [filectx(self._repo, self._path, fileid=x,
filelog=self._filelog) for x in c]
def annotate(self, follow=False, linenumber=None):
'''returns a list of tuples of (ctx, line) for each line
in the file, where ctx is the filectx of the node where
that line was last changed.
This returns tuples of ((ctx, linenumber), line) for each line,
if "linenumber" parameter is NOT "None".
In such tuples, linenumber means one at the first appearance
in the managed file.
To reduce annotation cost,
this returns fixed value(False is used) as linenumber,
if "linenumber" parameter is "False".'''
def decorate_compat(text, rev):
return ([rev] * len(text.splitlines()), text)
def without_linenumber(text, rev):
return ([(rev, False)] * len(text.splitlines()), text)
def with_linenumber(text, rev):
size = len(text.splitlines())
return ([(rev, i) for i in xrange(1, size + 1)], text)
decorate = (((linenumber is None) and decorate_compat) or
(linenumber and with_linenumber) or
without_linenumber)
def pair(parent, child):
for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
child[0][b1:b2] = parent[0][a1:a2]
return child
getlog = util.cachefunc(lambda x: self._repo.file(x))
def getctx(path, fileid):
log = path == self._path and self._filelog or getlog(path)
return filectx(self._repo, path, fileid=fileid, filelog=log)
getctx = util.cachefunc(getctx)
def parents(f):
# we want to reuse filectx objects as much as possible
p = f._path
if f._filerev is None: # working dir
pl = [(n.path(), n.filerev()) for n in f.parents()]
else:
pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)]
if follow:
r = f.renamed()
if r:
pl[0] = (r[0], getlog(r[0]).rev(r[1]))
return [getctx(p, n) for p, n in pl if n != nullrev]
# use linkrev to find the first changeset where self appeared
if self.rev() != self.linkrev():
base = self.filectx(self.filerev())
else:
base = self
# find all ancestors
needed = {base: 1}
visit = [base]
files = [base._path]
while visit:
f = visit.pop(0)
for p in parents(f):
if p not in needed:
needed[p] = 1
visit.append(p)
if p._path not in files:
files.append(p._path)
else:
# count how many times we'll use this
needed[p] += 1
# sort by revision (per file) which is a topological order
visit = []
for f in files:
fn = [(n.rev(), n) for n in needed.keys() if n._path == f]
visit.extend(fn)
visit.sort()
hist = {}
for r, f in visit:
curr = decorate(f.data(), f)
for p in parents(f):
if p != nullid:
curr = pair(hist[p], curr)
# trim the history of unneeded revs
needed[p] -= 1
if not needed[p]:
del hist[p]
hist[f] = curr
return zip(hist[f][0], hist[f][1].splitlines(1))
def ancestor(self, fc2):
"""
find the common ancestor file context, if any, of self, and fc2
"""
acache = {}
# prime the ancestor cache for the working directory
for c in (self, fc2):
if c._filerev == None:
pl = [(n.path(), n.filenode()) for n in c.parents()]
acache[(c._path, None)] = pl
flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog}
def parents(vertex):
if vertex in acache:
return acache[vertex]
f, n = vertex
if f not in flcache:
flcache[f] = self._repo.file(f)
fl = flcache[f]
pl = [(f, p) for p in fl.parents(n) if p != nullid]
re = fl.renamed(n)
if re:
pl.append(re)
acache[vertex] = pl
return pl
a, b = (self._path, self._filenode), (fc2._path, fc2._filenode)
v = ancestor.ancestor(a, b, parents)
if v:
f, n = v
return filectx(self._repo, f, fileid=n, filelog=flcache[f])
return None
class workingctx(changectx):
"""A workingctx object makes access to data related to
the current working directory convenient."""
def __init__(self, repo):
self._repo = repo
self._rev = None
self._node = None
def __str__(self):
return str(self._parents[0]) + "+"
def __nonzero__(self):
return True
def __getattr__(self, name):
if name == '_parents':
self._parents = self._repo.parents()
return self._parents
if name == '_status':
self._status = self._repo.status()
return self._status
if name == '_manifest':
self._buildmanifest()
return self._manifest
else:
raise AttributeError, name
def _buildmanifest(self):
"""generate a manifest corresponding to the working directory"""
man = self._parents[0].manifest().copy()
copied = self._repo.dirstate.copies()
is_exec = util.execfunc(self._repo.root,
lambda p: man.execf(copied.get(p,p)))
is_link = util.linkfunc(self._repo.root,
lambda p: man.linkf(copied.get(p,p)))
modified, added, removed, deleted, unknown = self._status[:5]
for i, l in (("a", added), ("m", modified), ("u", unknown)):
for f in l:
man[f] = man.get(copied.get(f, f), nullid) + i
try:
man.set(f, is_exec(f), is_link(f))
except OSError:
pass
for f in deleted + removed:
if f in man:
del man[f]
self._manifest = man
def manifest(self): return self._manifest
def user(self): return self._repo.ui.username()
def date(self): return util.makedate()
def description(self): return ""
def files(self):
f = self.modified() + self.added() + self.removed()
f.sort()
return f
def modified(self): return self._status[0]
def added(self): return self._status[1]
def removed(self): return self._status[2]
def deleted(self): return self._status[3]
def unknown(self): return self._status[4]
def clean(self): return self._status[5]
def branch(self): return self._repo.dirstate.branch()
def tags(self):
t = []
[t.extend(p.tags()) for p in self.parents()]
return t
def parents(self):
"""return contexts for each parent changeset"""
return self._parents
def children(self):
return []
def fileflags(self, path):
if '_manifest' in self.__dict__:
try:
return self._manifest.flags(path)
except KeyError:
return ''
pnode = self._parents[0].changeset()[0]
orig = self._repo.dirstate.copies().get(path, path)
node, flag = self._repo.manifest.find(pnode, orig)
is_link = util.linkfunc(self._repo.root,
lambda p: flag and 'l' in flag)
is_exec = util.execfunc(self._repo.root,
lambda p: flag and 'x' in flag)
try:
return (is_link(path) and 'l' or '') + (is_exec(path) and 'e' or '')
except OSError:
pass
if not node or path in self.deleted() or path in self.removed():
return ''
return flag
def filectx(self, path, filelog=None):
"""get a file context from the working directory"""
return workingfilectx(self._repo, path, workingctx=self,
filelog=filelog)
def ancestor(self, c2):
"""return the ancestor context of self and c2"""
return self._parents[0].ancestor(c2) # punt on two parents for now
class workingfilectx(filectx):
"""A workingfilectx object makes access to data related to a particular
file in the working directory convenient."""
def __init__(self, repo, path, filelog=None, workingctx=None):
"""changeid can be a changeset revision, node, or tag.
fileid can be a file revision or node."""
self._repo = repo
self._path = path
self._changeid = None
self._filerev = self._filenode = None
if filelog:
self._filelog = filelog
if workingctx:
self._changectx = workingctx
def __getattr__(self, name):
if name == '_changectx':
self._changectx = workingctx(self._repo)
return self._changectx
elif name == '_repopath':
self._repopath = (self._repo.dirstate.copied(self._path)
or self._path)
return self._repopath
elif name == '_filelog':
self._filelog = self._repo.file(self._repopath)
return self._filelog
else:
raise AttributeError, name
def __nonzero__(self):
return True
def __str__(self):
return "%s@%s" % (self.path(), self._changectx)
def filectx(self, fileid):
'''opens an arbitrary revision of the file without
opening a new filelog'''
return filectx(self._repo, self._repopath, fileid=fileid,
filelog=self._filelog)
def rev(self):
if '_changectx' in self.__dict__:
return self._changectx.rev()
return self._filelog.linkrev(self._filenode)
def data(self): return self._repo.wread(self._path)
def renamed(self):
rp = self._repopath
if rp == self._path:
return None
return rp, self._changectx._parents[0]._manifest.get(rp, nullid)
def parents(self):
'''return parent filectxs, following copies if necessary'''
p = self._path
rp = self._repopath
pcl = self._changectx._parents
fl = self._filelog
pl = [(rp, pcl[0]._manifest.get(rp, nullid), fl)]
if len(pcl) > 1:
if rp != p:
fl = None
pl.append((p, pcl[1]._manifest.get(p, nullid), fl))
return [filectx(self._repo, p, fileid=n, filelog=l)
for p,n,l in pl if n != nullid]
def children(self):
return []
def size(self): return os.stat(self._repo.wjoin(self._path)).st_size
def date(self):
t, tz = self._changectx.date()
try:
return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz)
except OSError, err:
if err.errno != errno.ENOENT: raise
return (t, tz)
def cmp(self, text): return self._repo.wread(self._path) == text
| mit | -2,856,619,026,517,189,000 | 33.220096 | 81 | 0.546001 | false |
dimtruck/magnum | magnum/api/controllers/v1/service.py | 1 | 13306 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_utils import timeutils
import pecan
from pecan import rest
import wsme
from wsme import types as wtypes
from magnum.api.controllers import link
from magnum.api.controllers.v1 import base as v1_base
from magnum.api.controllers.v1 import collection
from magnum.api.controllers.v1 import types
from magnum.api.controllers.v1 import utils as api_utils
from magnum.api import expose
from magnum.api import validation
from magnum.common import exception
from magnum.common import k8s_manifest
from magnum.common import policy
from magnum import objects
# NOTE(dims): We don't depend on oslo*i18n yet
_ = _LI = _LW = _LE = _LC = lambda x: x
class ServicePatchType(v1_base.K8sPatchType):
@staticmethod
def internal_attrs():
defaults = v1_base.K8sPatchType.internal_attrs()
return defaults + ['/selector', '/ports', '/ip']
class Service(v1_base.K8sResourceBase):
uuid = types.uuid
"""Unique UUID for this service"""
selector = wsme.wsattr({wtypes.text: wtypes.text}, readonly=True)
"""Selector of this service"""
ip = wtypes.text
"""IP of this service"""
ports = wsme.wsattr([{wtypes.text: wtypes.IntegerType()}], readonly=True)
"""Port of this service"""
links = wsme.wsattr([link.Link], readonly=True)
"""A list containing a self link and associated service links"""
def __init__(self, **kwargs):
super(Service, self).__init__()
self.fields = []
for field in objects.Service.fields:
# Skip fields we do not expose.
if not hasattr(self, field):
continue
self.fields.append(field)
setattr(self, field, kwargs.get(field, wtypes.Unset))
@staticmethod
def _convert_with_links(service, url, expand=True):
if not expand:
service.unset_fields_except(['uuid', 'name', 'bay_uuid', 'labels',
'selector', 'ip', 'ports'])
service.links = [link.Link.make_link('self', url,
'services', service.uuid),
link.Link.make_link('bookmark', url,
'services', service.uuid,
bookmark=True)
]
return service
@classmethod
def convert_with_links(cls, rpc_service, expand=True):
service = Service(**rpc_service.as_dict())
return cls._convert_with_links(service, pecan.request.host_url, expand)
@classmethod
def sample(cls, expand=True):
sample = cls(uuid='fe78db47-9a37-4e9f-8572-804a10abc0aa',
name='MyService',
bay_uuid='7ae81bb3-dec3-4289-8d6c-da80bd8001ae',
labels={'label1': 'foo'},
selector={'label1': 'foo'},
ip='172.17.2.2',
ports=[{"port": 88,
"targetPort": 6379,
"protocol": "TCP"}],
manifest_url='file:///tmp/rc.yaml',
manifest='''{
"metadata": {
"name": "test",
"labels": {
"key": "value"
}
},
"spec": {
"ports": [
{
"port": 88,
"targetPort": 6379,
"protocol": "TCP"
}
],
"selector": {
"bar": "foo"
}
}
}''',
created_at=timeutils.utcnow(),
updated_at=timeutils.utcnow())
return cls._convert_with_links(sample, 'http://localhost:9511', expand)
def parse_manifest(self):
try:
manifest = k8s_manifest.parse(self._get_manifest())
except ValueError as e:
raise exception.InvalidParameterValue(message=str(e))
try:
self.name = manifest["metadata"]["name"]
except (KeyError, TypeError):
raise exception.InvalidParameterValue(
"Field metadata['name'] can't be empty in manifest.")
try:
self.ports = manifest["spec"]["ports"][:]
except (KeyError, TypeError):
raise exception.InvalidParameterValue(
"Field spec['ports'] can't be empty in manifest.")
if "selector" in manifest["spec"]:
self.selector = manifest["spec"]["selector"]
if "labels" in manifest["metadata"]:
self.labels = manifest["metadata"]["labels"]
class ServiceCollection(collection.Collection):
"""API representation of a collection of services."""
services = [Service]
"""A list containing services objects"""
def __init__(self, **kwargs):
self._type = 'services'
@staticmethod
def convert_with_links(rpc_services, limit, url=None,
expand=False, **kwargs):
collection = ServiceCollection()
collection.services = [Service.convert_with_links(p, expand)
for p in rpc_services]
collection.next = collection.get_next(limit, url=url, **kwargs)
return collection
@classmethod
def sample(cls):
sample = cls()
sample.services = [Service.sample(expand=False)]
return sample
class ServicesController(rest.RestController):
"""REST controller for Services."""
def __init__(self):
super(ServicesController, self).__init__()
_custom_actions = {
'detail': ['GET'],
}
def _get_services_collection(self, marker, limit,
sort_key, sort_dir,
bay_ident, expand=False,
resource_url=None):
limit = api_utils.validate_limit(limit)
sort_dir = api_utils.validate_sort_dir(sort_dir)
marker_obj = None
if marker:
marker_obj = objects.Service.get_by_uuid(pecan.request.context,
marker)
services = pecan.request.rpcapi.service_list(pecan.request.context,
limit,
marker_obj,
sort_key=sort_key,
sort_dir=sort_dir)
return ServiceCollection.convert_with_links(services, limit,
url=resource_url,
expand=expand,
sort_key=sort_key,
sort_dir=sort_dir)
@policy.enforce_wsgi("service")
@expose.expose(ServiceCollection, types.uuid, int, wtypes.text,
wtypes.text, types.uuid_or_name)
def get_all(self, marker=None, limit=None, sort_key='id',
sort_dir='asc', bay_ident=None):
"""Retrieve a list of services.
:param marker: pagination marker for large data sets.
:param limit: maximum number of resources to return in a single result.
:param sort_key: column to sort results by. Default: id.
:param sort_dir: direction to sort. "asc" or "desc". Default: asc.
:param bay_ident: UUID or logical name of the Bay.
"""
return self._get_services_collection(marker, limit, sort_key,
sort_dir, bay_ident)
@policy.enforce_wsgi("service")
@expose.expose(ServiceCollection, types.uuid, int, wtypes.text,
wtypes.text, types.uuid_or_name)
def detail(self, marker=None, limit=None, sort_key='id',
sort_dir='asc', bay_ident=None):
"""Retrieve a list of services with detail.
:param marker: pagination marker for large data sets.
:param limit: maximum number of resources to return in a single result.
:param sort_key: column to sort results by. Default: id.
:param sort_dir: direction to sort. "asc" or "desc". Default: asc.
:param bay_ident: UUID or logical name of the Bay.
"""
# NOTE(lucasagomes): /detail should only work agaist collections
parent = pecan.request.path.split('/')[:-1][-1]
if parent != "services":
raise exception.HTTPNotFound
expand = True
resource_url = '/'.join(['services', 'detail'])
return self._get_services_collection(marker, limit,
sort_key, sort_dir, expand,
resource_url)
@policy.enforce_wsgi("service", "get")
@expose.expose(Service, types.uuid_or_name,
types.uuid_or_name)
def get_one(self, service_ident, bay_ident):
"""Retrieve information about the given service.
:param service_ident: UUID or logical name of the service.
:param bay_ident: UUID or logical name of the Bay.
"""
rpc_service = api_utils.get_rpc_resource('Service', service_ident)
return Service.convert_with_links(rpc_service)
@policy.enforce_wsgi("service", "create")
@expose.expose(Service, body=Service, status_code=201)
@validation.enforce_bay_types('kubernetes')
def post(self, service):
"""Create a new service.
:param service: a service within the request body.
"""
service.parse_manifest()
service_dict = service.as_dict()
context = pecan.request.context
service_dict['project_id'] = context.project_id
service_dict['user_id'] = context.user_id
service_obj = objects.Service(context, **service_dict)
new_service = pecan.request.rpcapi.service_create(service_obj)
if new_service is None:
raise exception.InvalidState()
# Set the HTTP Location Header
pecan.response.location = link.build_url('services', new_service.uuid)
return Service.convert_with_links(new_service)
@policy.enforce_wsgi("service", "update")
@wsme.validate(types.uuid, [ServicePatchType])
@expose.expose(Service, types.uuid_or_name,
types.uuid_or_name, body=[ServicePatchType])
def patch(self, service_ident, bay_ident, patch):
"""Update an existing service.
:param service_ident: UUID or logical name of a service.
:param bay_ident: UUID or logical name of the Bay.
:param patch: a json PATCH document to apply to this service.
"""
rpc_service = api_utils.get_rpc_resource('Service', service_ident)
# Init manifest and manifest_url field because we don't store them
# in database.
rpc_service['manifest'] = None
rpc_service['manifest_url'] = None
try:
service_dict = rpc_service.as_dict()
service = Service(**api_utils.apply_jsonpatch(service_dict, patch))
if service.manifest or service.manifest_url:
service.parse_manifest()
except api_utils.JSONPATCH_EXCEPTIONS as e:
raise exception.PatchError(patch=patch, reason=e)
# Update only the fields that have changed
for field in objects.Service.fields:
try:
patch_val = getattr(service, field)
except AttributeError:
# Ignore fields that aren't exposed in the API
continue
if patch_val == wtypes.Unset:
patch_val = None
if rpc_service[field] != patch_val:
rpc_service[field] = patch_val
if service.manifest or service.manifest_url:
pecan.request.rpcapi.service_update(rpc_service)
else:
rpc_service.save()
return Service.convert_with_links(rpc_service)
@policy.enforce_wsgi("service")
@expose.expose(None, types.uuid_or_name,
types.uuid_or_name, status_code=204)
def delete(self, service_ident, bay_ident):
"""Delete a service.
:param service_ident: UUID or logical name of a service.
:param bay_ident: UUID or logical name of the Bay.
"""
rpc_service = api_utils.get_rpc_resource('Service', service_ident)
pecan.request.rpcapi.service_delete(rpc_service.uuid)
| apache-2.0 | -2,565,025,987,527,595,000 | 38.48368 | 79 | 0.55118 | false |
Altair3/Tanks | bzagents/PigeonAgent.py | 1 | 2822 | import sys
import math
import time
import random
from bzrc import BZRC, Command
class PigeonAgent(object):
def __init__(self, bzrc, mode, time):
self.bzrc = bzrc
self.mode = mode
self.num_tanks = 1
self.cur_time = time
self.const_velocity = .5
self.time_move = self.cur_time
self.time_turn = self.cur_time
self.move_interval = 2.0
self.turn_interval = 1.0
def behave(self, time):
if self.mode == "sit":
return
elif self.mode == "const":
#self.mytanks = self.bzrc.get_mytanks()
for i in range(self.num_tanks):
self.bzrc.speed(i, self.const_velocity)
elif self.mode == "wild":
for i in range(self.num_tanks):
if (time - self.time_move) > self.move_interval:
for i in range(self.num_tanks):
speed = self.getRandomSpeed()
self.bzrc.speed(i, speed)
self.time_move = time
if (time - self.time_turn) > self.turn_interval:
for i in range(self.num_tanks):
angvel = self.getRandomAngvel()
self.bzrc.angvel(i, angvel)
self.time_turn = time
def getRandomAngvel(self):
rval = random.random()
rval *= self.getDirection()
return rval
def getDirection(self):
threshold = .5
n = random.random()
if n <= threshold:
direction = -1.0
else:
direction = 1.0
return direction
def getRandomSpeed(self):
rval = random.uniform(0.4, 1.0)
return rval
def stop(self):
for tank in self.bzrc.get_mytanks():
self.bzrc.speed(tank.index, 0)
self.bzrc.angvel(tank.index, 0)
def main():
# Process CLI arguments.
try:
execname, host, port, mode = sys.argv
except ValueError:
execname = sys.argv[0]
print >>sys.stderr, '%s: incorrect number of arguments' % execname
print >>sys.stderr, 'usage: %s hostname port [sit|const|wild]' % sys.argv[0]
sys.exit(-1)
bzrc = BZRC(host, int(port))
cur_time = time.time()
agent = PigeonAgent(bzrc, mode, cur_time)
# Run the agent
try:
while True:
cur_time = time.time()
agent.behave(cur_time)
except KeyboardInterrupt:
print "Exiting due to keyboard interrupt."
agent.stop()
bzrc.close()
if __name__ == '__main__':
main()
| gpl-3.0 | 8,886,061,043,608,434,000 | 26.398058 | 84 | 0.488661 | false |
hybrid-storage-dev/cinder-fs-111t-hybrid-cherry | api/v2/views/volumes.py | 1 | 5777 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cinder.api import common
from cinder.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class ViewBuilder(common.ViewBuilder):
"""Model a server API response as a python dictionary."""
_collection_name = "volumes"
def __init__(self):
"""Initialize view builder."""
super(ViewBuilder, self).__init__()
def summary_list(self, request, volumes):
"""Show a list of volumes without many details."""
return self._list_view(self.summary, request, volumes)
def detail_list(self, request, volumes):
"""Detailed view of a list of volumes."""
return self._list_view(self.detail, request, volumes,
coll_name=self._collection_name + '/detail')
def summary(self, request, volume):
"""Generic, non-detailed view of an volume."""
return {
'volume': {
'id': volume['id'],
'name': volume['display_name'],
'links': self._get_links(request,
volume['id']),
},
}
def detail(self, request, volume):
"""Detailed view of a single volume."""
return {
'volume': {
'id': volume.get('id'),
'status': volume.get('status'),
'size': volume.get('size'),
'availability_zone': volume.get('availability_zone'),
'created_at': volume.get('created_at'),
'attachments': self._get_attachments(volume),
'name': volume.get('display_name'),
'description': volume.get('display_description'),
'volume_type': self._get_volume_type(volume),
'snapshot_id': volume.get('snapshot_id'),
'source_volid': volume.get('source_volid'),
'metadata': self._get_volume_metadata(volume),
'links': self._get_links(request, volume['id']),
'user_id': volume.get('user_id'),
'bootable': str(volume.get('bootable')).lower(),
'encrypted': self._is_volume_encrypted(volume),
'replication_status': volume.get('replication_status'),
'consistencygroup_id': volume.get('consistencygroup_id'),
'shareable': str(volume.get('shareable')).lower(),
'updated_at': volume.get('updated_at')
}
}
def _is_volume_encrypted(self, volume):
"""Determine if volume is encrypted."""
return volume.get('encryption_key_id') is not None
def _get_attachments(self, volume):
"""Retrieve the attachments of the volume object."""
attachments = []
if volume['attach_status'] == 'attached':
attaches = volume.get('volume_attachment', [])
for attachment in attaches:
if attachment.get('attach_status') == 'attached':
a = {'id': attachment.get('volume_id'),
'attachment_id': attachment.get('id'),
'volume_id': attachment.get('volume_id'),
'server_id': attachment.get('instance_uuid'),
'host_name': attachment.get('attached_host'),
'device': attachment.get('mountpoint'),
}
attachments.append(a)
return attachments
def _get_volume_metadata(self, volume):
"""Retrieve the metadata of the volume object."""
if volume.get('volume_metadata'):
metadata = volume.get('volume_metadata')
return dict((item['key'], item['value']) for item in metadata)
# avoid circular ref when vol is a Volume instance
elif volume.get('metadata') and isinstance(volume.get('metadata'),
dict):
return volume['metadata']
return {}
def _get_volume_type(self, volume):
"""Retrieve the type the volume object."""
if volume['volume_type_id'] and volume.get('volume_type'):
return volume['volume_type']['name']
else:
return volume['volume_type_id']
def _list_view(self, func, request, volumes, coll_name=_collection_name):
"""Provide a view for a list of volumes.
:param func: Function used to format the volume data
:param request: API request
:param servers: List of volumes in dictionary format
:param coll_name: Name of collection, used to generate the next link
for a pagination query
:returns: Volume data in dictionary format
"""
volumes_list = [func(request, volume)['volume'] for volume in volumes]
volumes_links = self._get_collection_links(request,
volumes,
coll_name)
volumes_dict = dict(volumes=volumes_list)
if volumes_links:
volumes_dict['volumes_links'] = volumes_links
return volumes_dict
| apache-2.0 | 6,775,183,563,880,163,000 | 40.561151 | 78 | 0.556344 | false |
pattisdr/osf.io | api_tests/schemas/views/test_registration_schemas_detail.py | 1 | 3072 | import pytest
from api.base.settings.defaults import API_BASE
from osf.models import RegistrationSchema
from osf_tests.factories import (
AuthUserFactory,
)
from website.project.metadata.schemas import LATEST_SCHEMA_VERSION
@pytest.mark.django_db
class TestMetaSchemaDetail:
@pytest.fixture()
def user(self):
return AuthUserFactory()
@pytest.fixture()
def schema(self):
return RegistrationSchema.objects.filter(
name='Prereg Challenge',
schema_version=LATEST_SCHEMA_VERSION
).first()
def test_deprecated_metaschemas_routes(self, app, user, schema):
# test base /metaschemas/ GET with min version
url = '/{}metaschemas/?version=2.7'.format(API_BASE)
res = app.get(url, auth=user.auth)
assert res.status_code == 200
# test GET with higher version
url = '/{}metaschemas/?version=2.8'.format(API_BASE)
res = app.get(url, auth=user.auth, expect_errors=True)
assert res.status_code == 404
assert res.json['errors'][0]['detail'] == 'This route has been deprecated. It was last available in version 2.7'
# test /metaschemas/registrations/
url = '/{}metaschemas/registrations/{}/?version=2.8'.format(API_BASE, schema._id)
res = app.get(url, auth=user.auth)
assert res.status_code == 200
# test /metaschemas/registrations/ deprecated version
url = '/{}metaschemas/registrations/{}/?version=2.9'.format(API_BASE, schema._id)
res = app.get(url, auth=user.auth, expect_errors=True)
assert res.status_code == 404
assert res.json['errors'][0]['detail'] == 'This route has been deprecated. It was last available in version 2.8'
def test_schemas_detail_visibility(self, app, user, schema):
# test_pass_authenticated_user_can_retrieve_schema
url = '/{}schemas/registrations/{}/'.format(API_BASE, schema._id)
res = app.get(url, auth=user.auth)
assert res.status_code == 200
data = res.json['data']['attributes']
assert data['name'] == 'Prereg Challenge'
assert data['schema_version'] == 2
assert data['active']
assert res.json['data']['id'] == schema._id
# test_pass_unauthenticated_user_can_view_schemas
res = app.get(url)
assert res.status_code == 200
# test_inactive_metaschema_returned
inactive_schema = RegistrationSchema.objects.get(
name='Election Research Preacceptance Competition', active=False)
url = '/{}schemas/registrations/{}/'.format(API_BASE, inactive_schema._id)
res = app.get(url)
assert res.status_code == 200
assert res.json['data']['attributes']['name'] == 'Election Research Preacceptance Competition'
assert res.json['data']['attributes']['active'] is False
# test_invalid_metaschema_not_found
url = '/{}schemas/registrations/garbage/'.format(API_BASE)
res = app.get(url, auth=user.auth, expect_errors=True)
assert res.status_code == 404
| apache-2.0 | -8,240,229,088,860,935,000 | 39.96 | 120 | 0.643555 | false |
vortex-ape/scikit-learn | sklearn/decomposition/tests/test_dict_learning.py | 7 | 14472 | from __future__ import division
import pytest
import numpy as np
import itertools
from sklearn.exceptions import ConvergenceWarning
from sklearn.utils import check_array
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import ignore_warnings
from sklearn.utils.testing import TempMemmap
from sklearn.decomposition import DictionaryLearning
from sklearn.decomposition import MiniBatchDictionaryLearning
from sklearn.decomposition import SparseCoder
from sklearn.decomposition import dict_learning_online
from sklearn.decomposition import sparse_encode
rng_global = np.random.RandomState(0)
n_samples, n_features = 10, 8
X = rng_global.randn(n_samples, n_features)
def test_sparse_encode_shapes_omp():
rng = np.random.RandomState(0)
algorithms = ['omp', 'lasso_lars', 'lasso_cd', 'lars', 'threshold']
for n_components, n_samples in itertools.product([1, 5], [1, 9]):
X_ = rng.randn(n_samples, n_features)
dictionary = rng.randn(n_components, n_features)
for algorithm, n_jobs in itertools.product(algorithms, [1, 3]):
code = sparse_encode(X_, dictionary, algorithm=algorithm,
n_jobs=n_jobs)
assert_equal(code.shape, (n_samples, n_components))
def test_dict_learning_shapes():
n_components = 5
dico = DictionaryLearning(n_components, random_state=0).fit(X)
assert_equal(dico.components_.shape, (n_components, n_features))
n_components = 1
dico = DictionaryLearning(n_components, random_state=0).fit(X)
assert_equal(dico.components_.shape, (n_components, n_features))
assert_equal(dico.transform(X).shape, (X.shape[0], n_components))
def test_dict_learning_overcomplete():
n_components = 12
dico = DictionaryLearning(n_components, random_state=0).fit(X)
assert_true(dico.components_.shape == (n_components, n_features))
# positive lars deprecated 0.22
@pytest.mark.filterwarnings('ignore::DeprecationWarning')
@pytest.mark.parametrize("transform_algorithm", [
"lasso_lars",
"lasso_cd",
"lars",
"threshold",
])
@pytest.mark.parametrize("positive_code", [
False,
True,
])
@pytest.mark.parametrize("positive_dict", [
False,
True,
])
def test_dict_learning_positivity(transform_algorithm,
positive_code,
positive_dict):
n_components = 5
dico = DictionaryLearning(
n_components, transform_algorithm=transform_algorithm, random_state=0,
positive_code=positive_code, positive_dict=positive_dict).fit(X)
code = dico.transform(X)
if positive_dict:
assert_true((dico.components_ >= 0).all())
else:
assert_true((dico.components_ < 0).any())
if positive_code:
assert_true((code >= 0).all())
else:
assert_true((code < 0).any())
def test_dict_learning_reconstruction():
n_components = 12
dico = DictionaryLearning(n_components, transform_algorithm='omp',
transform_alpha=0.001, random_state=0)
code = dico.fit(X).transform(X)
assert_array_almost_equal(np.dot(code, dico.components_), X)
dico.set_params(transform_algorithm='lasso_lars')
code = dico.transform(X)
assert_array_almost_equal(np.dot(code, dico.components_), X, decimal=2)
# used to test lars here too, but there's no guarantee the number of
# nonzero atoms is right.
def test_dict_learning_reconstruction_parallel():
# regression test that parallel reconstruction works with n_jobs=-1
n_components = 12
dico = DictionaryLearning(n_components, transform_algorithm='omp',
transform_alpha=0.001, random_state=0, n_jobs=-1)
code = dico.fit(X).transform(X)
assert_array_almost_equal(np.dot(code, dico.components_), X)
dico.set_params(transform_algorithm='lasso_lars')
code = dico.transform(X)
assert_array_almost_equal(np.dot(code, dico.components_), X, decimal=2)
def test_dict_learning_lassocd_readonly_data():
n_components = 12
with TempMemmap(X) as X_read_only:
dico = DictionaryLearning(n_components, transform_algorithm='lasso_cd',
transform_alpha=0.001, random_state=0,
n_jobs=-1)
with ignore_warnings(category=ConvergenceWarning):
code = dico.fit(X_read_only).transform(X_read_only)
assert_array_almost_equal(np.dot(code, dico.components_), X_read_only,
decimal=2)
def test_dict_learning_nonzero_coefs():
n_components = 4
dico = DictionaryLearning(n_components, transform_algorithm='lars',
transform_n_nonzero_coefs=3, random_state=0)
code = dico.fit(X).transform(X[np.newaxis, 1])
assert_true(len(np.flatnonzero(code)) == 3)
dico.set_params(transform_algorithm='omp')
code = dico.transform(X[np.newaxis, 1])
assert_equal(len(np.flatnonzero(code)), 3)
def test_dict_learning_unknown_fit_algorithm():
n_components = 5
dico = DictionaryLearning(n_components, fit_algorithm='<unknown>')
assert_raises(ValueError, dico.fit, X)
def test_dict_learning_split():
n_components = 5
dico = DictionaryLearning(n_components, transform_algorithm='threshold',
random_state=0)
code = dico.fit(X).transform(X)
dico.split_sign = True
split_code = dico.transform(X)
assert_array_almost_equal(split_code[:, :n_components] -
split_code[:, n_components:], code)
def test_dict_learning_online_shapes():
rng = np.random.RandomState(0)
n_components = 8
code, dictionary = dict_learning_online(X, n_components=n_components,
alpha=1, random_state=rng)
assert_equal(code.shape, (n_samples, n_components))
assert_equal(dictionary.shape, (n_components, n_features))
assert_equal(np.dot(code, dictionary).shape, X.shape)
# positive lars deprecated 0.22
@pytest.mark.filterwarnings('ignore::DeprecationWarning')
@pytest.mark.parametrize("transform_algorithm", [
"lasso_lars",
"lasso_cd",
"lars",
"threshold",
])
@pytest.mark.parametrize("positive_code", [
False,
True,
])
@pytest.mark.parametrize("positive_dict", [
False,
True,
])
def test_dict_learning_online_positivity(transform_algorithm,
positive_code,
positive_dict):
rng = np.random.RandomState(0)
n_components = 8
dico = MiniBatchDictionaryLearning(
n_components, transform_algorithm=transform_algorithm, random_state=0,
positive_code=positive_code, positive_dict=positive_dict).fit(X)
code = dico.transform(X)
if positive_dict:
assert_true((dico.components_ >= 0).all())
else:
assert_true((dico.components_ < 0).any())
if positive_code:
assert_true((code >= 0).all())
else:
assert_true((code < 0).any())
code, dictionary = dict_learning_online(X, n_components=n_components,
alpha=1, random_state=rng,
positive_dict=positive_dict,
positive_code=positive_code)
if positive_dict:
assert_true((dictionary >= 0).all())
else:
assert_true((dictionary < 0).any())
if positive_code:
assert_true((code >= 0).all())
else:
assert_true((code < 0).any())
def test_dict_learning_online_verbosity():
n_components = 5
# test verbosity
from sklearn.externals.six.moves import cStringIO as StringIO
import sys
old_stdout = sys.stdout
try:
sys.stdout = StringIO()
dico = MiniBatchDictionaryLearning(n_components, n_iter=20, verbose=1,
random_state=0)
dico.fit(X)
dico = MiniBatchDictionaryLearning(n_components, n_iter=20, verbose=2,
random_state=0)
dico.fit(X)
dict_learning_online(X, n_components=n_components, alpha=1, verbose=1,
random_state=0)
dict_learning_online(X, n_components=n_components, alpha=1, verbose=2,
random_state=0)
finally:
sys.stdout = old_stdout
assert_true(dico.components_.shape == (n_components, n_features))
def test_dict_learning_online_estimator_shapes():
n_components = 5
dico = MiniBatchDictionaryLearning(n_components, n_iter=20, random_state=0)
dico.fit(X)
assert_true(dico.components_.shape == (n_components, n_features))
def test_dict_learning_online_overcomplete():
n_components = 12
dico = MiniBatchDictionaryLearning(n_components, n_iter=20,
random_state=0).fit(X)
assert_true(dico.components_.shape == (n_components, n_features))
def test_dict_learning_online_initialization():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features)
dico = MiniBatchDictionaryLearning(n_components, n_iter=0,
dict_init=V, random_state=0).fit(X)
assert_array_equal(dico.components_, V)
def test_dict_learning_online_readonly_initialization():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features)
V.setflags(write=False)
MiniBatchDictionaryLearning(n_components, n_iter=1, dict_init=V,
random_state=0, shuffle=False).fit(X)
def test_dict_learning_online_partial_fit():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features) # random init
V /= np.sum(V ** 2, axis=1)[:, np.newaxis]
dict1 = MiniBatchDictionaryLearning(n_components, n_iter=10 * len(X),
batch_size=1,
alpha=1, shuffle=False, dict_init=V,
random_state=0).fit(X)
dict2 = MiniBatchDictionaryLearning(n_components, alpha=1,
n_iter=1, dict_init=V,
random_state=0)
for i in range(10):
for sample in X:
dict2.partial_fit(sample[np.newaxis, :])
assert_true(not np.all(sparse_encode(X, dict1.components_, alpha=1) ==
0))
assert_array_almost_equal(dict1.components_, dict2.components_,
decimal=2)
def test_sparse_encode_shapes():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features) # random init
V /= np.sum(V ** 2, axis=1)[:, np.newaxis]
for algo in ('lasso_lars', 'lasso_cd', 'lars', 'omp', 'threshold'):
code = sparse_encode(X, V, algorithm=algo)
assert_equal(code.shape, (n_samples, n_components))
# positive lars deprecated 0.22
@pytest.mark.filterwarnings('ignore::DeprecationWarning')
@pytest.mark.parametrize("positive", [
False,
True,
])
def test_sparse_encode_positivity(positive):
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features) # random init
V /= np.sum(V ** 2, axis=1)[:, np.newaxis]
for algo in ('lasso_lars', 'lasso_cd', 'lars', 'threshold'):
code = sparse_encode(X, V, algorithm=algo, positive=positive)
if positive:
assert_true((code >= 0).all())
else:
assert_true((code < 0).any())
try:
sparse_encode(X, V, algorithm='omp', positive=positive)
except ValueError:
if not positive:
raise
def test_sparse_encode_input():
n_components = 100
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features) # random init
V /= np.sum(V ** 2, axis=1)[:, np.newaxis]
Xf = check_array(X, order='F')
for algo in ('lasso_lars', 'lasso_cd', 'lars', 'omp', 'threshold'):
a = sparse_encode(X, V, algorithm=algo)
b = sparse_encode(Xf, V, algorithm=algo)
assert_array_almost_equal(a, b)
def test_sparse_encode_error():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features) # random init
V /= np.sum(V ** 2, axis=1)[:, np.newaxis]
code = sparse_encode(X, V, alpha=0.001)
assert_true(not np.all(code == 0))
assert_less(np.sqrt(np.sum((np.dot(code, V) - X) ** 2)), 0.1)
def test_sparse_encode_error_default_sparsity():
rng = np.random.RandomState(0)
X = rng.randn(100, 64)
D = rng.randn(2, 64)
code = ignore_warnings(sparse_encode)(X, D, algorithm='omp',
n_nonzero_coefs=None)
assert_equal(code.shape, (100, 2))
def test_unknown_method():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features) # random init
assert_raises(ValueError, sparse_encode, X, V, algorithm="<unknown>")
def test_sparse_coder_estimator():
n_components = 12
rng = np.random.RandomState(0)
V = rng.randn(n_components, n_features) # random init
V /= np.sum(V ** 2, axis=1)[:, np.newaxis]
code = SparseCoder(dictionary=V, transform_algorithm='lasso_lars',
transform_alpha=0.001).transform(X)
assert_true(not np.all(code == 0))
assert_less(np.sqrt(np.sum((np.dot(code, V) - X) ** 2)), 0.1)
def test_sparse_coder_parallel_mmap():
# Non-regression test for:
# https://github.com/scikit-learn/scikit-learn/issues/5956
# Test that SparseCoder does not error by passing reading only
# arrays to child processes
rng = np.random.RandomState(777)
n_components, n_features = 40, 64
init_dict = rng.rand(n_components, n_features)
# Ensure that `data` is >2M. Joblib memory maps arrays
# if they are larger than 1MB. The 4 accounts for float32
# data type
n_samples = int(2e6) // (4 * n_features)
data = np.random.rand(n_samples, n_features).astype(np.float32)
sc = SparseCoder(init_dict, transform_algorithm='omp', n_jobs=2)
sc.fit_transform(data)
| bsd-3-clause | 74,665,017,287,911,260 | 34.91067 | 79 | 0.622996 | false |
ragupta-git/ImcSdk | imcsdk/mometa/bios/BiosVfCDNEnable.py | 1 | 3109 | """This module contains the general information for BiosVfCDNEnable ManagedObject."""
from ...imcmo import ManagedObject
from ...imccoremeta import MoPropertyMeta, MoMeta
from ...imcmeta import VersionMeta
class BiosVfCDNEnableConsts:
VP_CDNENABLE_DISABLED = "Disabled"
VP_CDNENABLE_ENABLED = "Enabled"
_VP_CDNENABLE_DISABLED = "disabled"
_VP_CDNENABLE_ENABLED = "enabled"
VP_CDNENABLE_PLATFORM_DEFAULT = "platform-default"
class BiosVfCDNEnable(ManagedObject):
"""This is BiosVfCDNEnable class."""
consts = BiosVfCDNEnableConsts()
naming_props = set([])
mo_meta = {
"classic": MoMeta("BiosVfCDNEnable", "biosVfCDNEnable", "CDN-Enable", VersionMeta.Version204c, "InputOutput", 0x1f, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"]),
"modular": MoMeta("BiosVfCDNEnable", "biosVfCDNEnable", "CDN-Enable", VersionMeta.Version2013e, "InputOutput", 0x1f, [], ["admin"], [u'biosPlatformDefaults', u'biosSettings'], [], ["Get", "Set"])
}
prop_meta = {
"classic": {
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version204c, MoPropertyMeta.READ_WRITE, 0x2, 0, 255, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version204c, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version204c, MoPropertyMeta.READ_WRITE, 0x8, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"vp_cdn_enable": MoPropertyMeta("vp_cdn_enable", "vpCDNEnable", "string", VersionMeta.Version204c, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["Disabled", "Enabled", "disabled", "enabled", "platform-default"], []),
},
"modular": {
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x2, 0, 255, None, [], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x4, 0, 255, None, [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x8, None, None, None, ["", "created", "deleted", "modified", "removed"], []),
"vp_cdn_enable": MoPropertyMeta("vp_cdn_enable", "vpCDNEnable", "string", VersionMeta.Version2013e, MoPropertyMeta.READ_WRITE, 0x10, None, None, None, ["Disabled", "Enabled", "disabled", "enabled", "platform-default"], []),
},
}
prop_map = {
"classic": {
"dn": "dn",
"rn": "rn",
"status": "status",
"vpCDNEnable": "vp_cdn_enable",
},
"modular": {
"dn": "dn",
"rn": "rn",
"status": "status",
"vpCDNEnable": "vp_cdn_enable",
},
}
def __init__(self, parent_mo_or_dn, **kwargs):
self._dirty_mask = 0
self.status = None
self.vp_cdn_enable = None
ManagedObject.__init__(self, "BiosVfCDNEnable", parent_mo_or_dn, **kwargs)
| apache-2.0 | -6,090,074,951,718,473,000 | 43.414286 | 236 | 0.602766 | false |
deljus/predictor | MWUI/API/resources.py | 1 | 37011 | # -*- coding: utf-8 -*-
#
# Copyright 2016, 2017 Ramil Nugmanov <[email protected]>
# This file is part of MWUI.
#
# MWUI is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
from collections import defaultdict
from flask import url_for, request, Response
from flask_login import current_user, login_user
from flask_restful import reqparse, marshal, inputs, Resource
from functools import wraps
from importlib.util import find_spec
from os import path
from pony.orm import db_session, select, left_join
from typing import Dict, Tuple
from uuid import uuid4
from validators import url
from werkzeug import datastructures
from werkzeug.exceptions import HTTPException, Aborter
from .data import get_additives, get_model, get_models_list, format_results
from .redis import RedisCombiner
from .structures import (ModelRegisterFields, TaskPostResponseFields, TaskGetResponseFields, TaskStructureFields,
LogInFields, AdditivesListFields, ModelListFields)
from ..config import (UPLOAD_PATH, REDIS_HOST, REDIS_JOB_TIMEOUT, REDIS_PASSWORD, REDIS_PORT, REDIS_TTL, SWAGGER,
BLOG_POSTS_PER_PAGE)
from ..constants import (StructureStatus, TaskStatus, ModelType, TaskType, StructureType, UserRole, AdditiveType,
ResultType)
from ..logins import UserLogin
from ..models import Task, Structure, Additive, Model, Additiveset, Destination, User, Result
if SWAGGER and find_spec('flask_restful_swagger'):
from flask_restful_swagger import swagger
else:
class Swagger:
@staticmethod
def operation(*args, **kwargs):
def decorator(f):
return f
return decorator
swagger = Swagger()
redis = RedisCombiner(host=REDIS_HOST, port=REDIS_PORT, password=REDIS_PASSWORD, result_ttl=REDIS_TTL,
job_timeout=REDIS_JOB_TIMEOUT)
task_types_desc = ', '.join('{0.value} - {0.name}'.format(x) for x in TaskType)
results_types_desc = ', '.join('{0.value} - {0.name}'.format(x) for x in ResultType)
additives_types_desc = ', '.join('{0.value} - {0.name}'.format(x) for x in AdditiveType)
class Abort512(HTTPException):
code = 512
description = 'task not ready'
original_flask_abort = Aborter(extra={512: Abort512})
def abort(http_status_code, **kwargs):
""" copy-paste from flask-restful
"""
try:
original_flask_abort(http_status_code)
except HTTPException as e:
if len(kwargs):
e.data = kwargs
raise
def fetch_task(task, status):
job = redis.fetch_job(task)
if job is None:
abort(404, message='invalid task id. perhaps this task has already been removed')
if not job:
abort(500, message='modeling server error')
if not job['is_finished']:
abort(512, message='PROCESSING.Task not ready')
if job['result']['status'] != status:
abort(406, message='task status is invalid. task status is [%s]' % job['result']['status'].name)
if job['result']['user'] != current_user.id:
abort(403, message='user access deny. you do not have permission to this task')
return job['result'], job['ended_at']
def dynamic_docstring(*sub):
def decorator(f):
f.__doc__ = f.__doc__.format(*sub)
return f
return decorator
def authenticate(f):
@wraps(f)
def wrapper(*args, **kwargs):
if current_user.is_authenticated:
return f(*args, **kwargs)
abort(401, message=dict(user='not authenticated'))
return wrapper
def auth_admin(f):
@wraps(f)
def wrapper(*args, **kwargs):
auth = request.authorization
if auth:
u = UserLogin.get(auth.username.lower(), auth.password)
if u and u.role_is(UserRole.ADMIN):
return f(*args, **kwargs)
return Response('access deny', 401, {'WWW-Authenticate': 'Basic realm="Login Required"'})
return wrapper
class AuthResource(Resource):
method_decorators = [authenticate]
class AdminResource(Resource):
method_decorators = [auth_admin]
class RegisterModels(AdminResource):
def post(self):
data = marshal(request.get_json(force=True), ModelRegisterFields.resource_fields)
models = data if isinstance(data, list) else [data]
available = {x['name']: [(d['host'], d['port'], d['name']) for d in x['destinations']]
for x in get_models_list(skip_prep=False).values()}
report = []
for m in models:
if m['destinations']:
if m['name'] not in available:
with db_session:
new_m = Model(type=m['type'], name=m['name'], description=m['description'],
example=m['example'])
for d in m['destinations']:
Destination(model=new_m, **d)
report.append(dict(model=new_m.id, name=new_m.name, description=new_m.description,
type=new_m.type.value,
example=new_m.example,
destinations=[dict(host=x.host, port=x.port, name=x.name)
for x in new_m.destinations]))
else:
tmp = []
with db_session:
model = Model.get(name=m['name'])
for d in m['destinations']:
if (d['host'], d['port'], d['name']) not in available[m['name']]:
tmp.append(Destination(model=model, **d))
if tmp:
report.append(dict(model=model.id, name=model.name, description=model.description,
type=model.type.value, example=model.example,
destinations=[dict(host=x.host, port=x.port, name=x.name)
for x in tmp]))
return report, 201
class AvailableModels(AuthResource):
@swagger.operation(
notes='Get available models',
nickname='modellist',
responseClass=ModelListFields.__name__,
responseMessages=[dict(code=200, message="models list"), dict(code=401, message="user not authenticated")])
@dynamic_docstring(ModelType.MOLECULE_MODELING, ModelType.REACTION_MODELING)
def get(self):
"""
Get available models list
response format:
example - chemical structure in in smiles or marvin or cml format
description - description of model. in markdown format.
name - model name
type - model type: {0.value} [{0.name}] or {1.value} [{1.name}]
model - id
"""
out = []
for x in get_models_list(skip_destinations=True, skip_example=False).values():
x['type'] = x['type'].value
out.append(x)
return out, 200
class AvailableAdditives(AuthResource):
@swagger.operation(
notes='Get available additives',
nickname='additives',
responseClass=AdditivesListFields.__name__,
responseMessages=[dict(code=200, message="additives list"), dict(code=401, message="user not authenticated")])
@dynamic_docstring(additives_types_desc)
def get(self):
"""
Get available additives list
response format:
additive - id
name - name of additive
structure - chemical structure in smiles or marvin or cml format
type - additive type: {0}
"""
out = []
for x in get_additives().values():
x['type'] = x['type'].value
out.append(x)
return out, 200
results_fetch = reqparse.RequestParser()
results_fetch.add_argument('page', type=inputs.positive)
class ResultsTask(AuthResource):
@swagger.operation(
notes='Get saved modeled task',
nickname='saved',
responseClass=TaskGetResponseFields.__name__,
parameters=[dict(name='task', description='Task ID', required=True,
allowMultiple=False, dataType='str', paramType='path')],
responseMessages=[dict(code=200, message="modeled task"),
dict(code=401, message="user not authenticated"),
dict(code=403, message='user access deny. you do not have permission to this task'),
dict(code=404, message='invalid task id. perhaps this task has already been removed'),
dict(code=406, message='task status is invalid. only validation tasks acceptable'),
dict(code=500, message="modeling server error"),
dict(code=512, message='task not ready')])
def get(self, task):
"""
Task with modeling results of structures with conditions
all structures include only models with nonempty results lists.
see /task/model get doc.
"""
try:
task = int(task)
except ValueError:
abort(404, message='invalid task id. Use int Luke')
page = results_fetch.parse_args().get('page')
with db_session:
result = Task.get(id=task)
if not result:
abort(404, message='Invalid task id. Perhaps this task has already been removed')
if result.user.id != current_user.id:
abort(403, message='User access deny. You do not have permission to this task')
models = get_models_list(skip_destinations=True)
for v in models.values():
v['type'] = v['type'].value
additives = get_additives()
s = select(s for s in Structure if s.task == result).order_by(Structure.id)
if page:
s = s.page(page, pagesize=BLOG_POSTS_PER_PAGE)
structures = {x.id: dict(structure=x.id, data=x.structure, temperature=x.temperature, pressure=x.pressure,
type=x.structure_type, status=x.structure_status, additives=[], models=[])
for x in s}
r = left_join((s.id, r.model.id, r.key, r.value, r.result_type)
for s in Structure for r in s.results if s.id in structures.keys() and r is not None)
a = left_join((s.id, a.additive.id, a.amount)
for s in Structure for a in s.additives if s.id in structures.keys() and a is not None)
for s, a, aa in a:
tmp = dict(amount=aa)
tmp.update(additives[a])
structures[s]['additives'].append(tmp)
tmp_models = defaultdict(dict)
for s, m, rk, rv, rt in r:
tmp_models[s].setdefault(m, []).append(dict(key=rk, value=rv, type=rt))
for s, mr in tmp_models.items():
for m, r in mr.items():
tmp = dict(results=r)
tmp.update(models[m])
structures[s]['models'].append(tmp)
return dict(task=task, status=TaskStatus.DONE.value, date=result.date.strftime("%Y-%m-%d %H:%M:%S"),
type=result.task_type, user=result.user.id, structures=list(structures.values())), 200
@swagger.operation(
notes='Save modeled task',
nickname='save',
responseClass=TaskPostResponseFields.__name__,
parameters=[dict(name='task', description='Task ID', required=True,
allowMultiple=False, dataType='str', paramType='path')],
responseMessages=[dict(code=201, message="modeled task saved"),
dict(code=401, message="user not authenticated"),
dict(code=403, message='user access deny. you do not have permission to this task'),
dict(code=404, message='invalid task id. perhaps this task has already been removed'),
dict(code=406, message='task status is invalid. only modeled tasks acceptable'),
dict(code=500, message="modeling server error"),
dict(code=512, message='task not ready')])
def post(self, task):
"""
Store in database modeled task
only modeled tasks can be saved.
failed models in structures skipped.
"""
result, ended_at = fetch_task(task, TaskStatus.DONE)
with db_session:
_task = Task(type=result['type'], date=ended_at, user=User[current_user.id])
for s in result['structures']:
_structure = Structure(structure=s['data'], type=s['type'], temperature=s['temperature'],
pressure=s['pressure'], status=s['status'], task=_task)
for a in s['additives']:
Additiveset(additive=Additive[a['additive']], structure=_structure, amount=a['amount'])
for m in s['models']:
for r in m.get('results', []):
Result(model=m['model'], structure=_structure, type=r['type'], key=r['key'], value=r['value'])
return dict(task=_task.id, status=TaskStatus.DONE.value, date=ended_at.strftime("%Y-%m-%d %H:%M:%S"),
type=result['type'].value, user=current_user.id), 201
class ModelTask(AuthResource):
@swagger.operation(
notes='Get modeled task',
nickname='modeled',
responseClass=TaskGetResponseFields.__name__,
parameters=[dict(name='task', description='Task ID', required=True,
allowMultiple=False, dataType='str', paramType='path')],
responseMessages=[dict(code=200, message="modeled task"),
dict(code=401, message="user not authenticated"),
dict(code=403, message='user access deny. you do not have permission to this task'),
dict(code=404, message='invalid task id. perhaps this task has already been removed'),
dict(code=406, message='task status is invalid. only validation tasks acceptable'),
dict(code=500, message="modeling server error"),
dict(code=512, message='task not ready')])
@dynamic_docstring(results_types_desc)
def get(self, task):
"""
Task with results of structures with conditions modeling
all structures include models with results lists.
failed models contain empty results lists.
see also /task/prepare get doc.
available model results response types: {0}
"""
page = results_fetch.parse_args().get('page')
return format_results(task, fetch_task(task, TaskStatus.DONE), page=page), 200
@swagger.operation(
notes='Create modeling task',
nickname='modeling',
responseClass=TaskPostResponseFields.__name__,
parameters=[dict(name='task', description='Task ID', required=True,
allowMultiple=False, dataType='str', paramType='path'),
dict(name='structures', description='Conditions and selected models for structure[s]',
required=True, allowMultiple=False, dataType=TaskStructureFields.__name__, paramType='body')],
responseMessages=[dict(code=201, message="modeling task created"),
dict(code=400, message="invalid structure data"),
dict(code=401, message="user not authenticated"),
dict(code=403, message='user access deny. you do not have permission to this task'),
dict(code=404, message='invalid task id. perhaps this task has already been removed'),
dict(code=406, message='task status is invalid. only validation tasks acceptable'),
dict(code=500, message="modeling server error"),
dict(code=512, message='task not ready')])
def post(self, task):
"""
Modeling task structures and conditions
send only changed conditions or todelete marks. see task/prepare doc.
data, status and type fields unusable.
"""
data = marshal(request.get_json(force=True), TaskStructureFields.resource_fields)
result = fetch_task(task, TaskStatus.PREPARED)[0]
prepared = {s['structure']: s for s in result['structures']}
structures = data if isinstance(data, list) else [data]
tmp = {x['structure']: x for x in structures if x['structure'] in prepared}
if 0 in tmp:
abort(400, message='invalid structure data')
additives = get_additives()
models = get_models_list()
for s, d in tmp.items():
if d['todelete']:
prepared.pop(s)
else:
ps = prepared[s]
if d['additives'] is not None:
alist = []
for a in d['additives']:
if a['additive'] in additives and (0 < a['amount'] <= 1
if additives[a['additive']]['type'] == AdditiveType.SOLVENT
else a['amount'] > 0):
a.update(additives[a['additive']])
alist.append(a)
ps['additives'] = alist
if result['type'] != TaskType.MODELING: # for search tasks assign compatible models
ps['models'] = [get_model(ModelType.select(ps['type'], result['type']))]
elif d['models'] is not None and ps['status'] == StructureStatus.CLEAR:
ps['models'] = [models[m['model']].copy() for m in d['models'] if m['model'] in models and
models[m['model']]['type'].compatible(ps['type'], TaskType.MODELING)]
if d['temperature']:
ps['temperature'] = d['temperature']
if d['pressure']:
ps['pressure'] = d['pressure']
result['structures'] = list(prepared.values())
result['status'] = TaskStatus.MODELING
new_job = redis.new_job(result)
if new_job is None:
abort(500, message='modeling server error')
return dict(task=new_job['id'], status=result['status'].value, type=result['type'].value,
date=new_job['created_at'].strftime("%Y-%m-%d %H:%M:%S"), user=result['user']), 201
class PrepareTask(AuthResource):
@swagger.operation(
notes='Get validated task',
nickname='prepared',
responseClass=TaskGetResponseFields.__name__,
parameters=[dict(name='task', description='Task ID', required=True,
allowMultiple=False, dataType='str', paramType='path')],
responseMessages=[dict(code=200, message="validated task"),
dict(code=401, message="user not authenticated"),
dict(code=403, message='user access deny. you do not have permission to this task'),
dict(code=404, message='invalid task id. perhaps this task has already been removed'),
dict(code=406, message='task status is invalid. only validation tasks acceptable'),
dict(code=500, message="modeling server error"),
dict(code=512, message='task not ready')])
@dynamic_docstring(ModelType.PREPARER, StructureStatus.CLEAR, StructureStatus.RAW, StructureStatus.HAS_ERROR,
ResultType.TEXT, StructureType.REACTION, StructureType.MOLECULE)
def get(self, task):
"""
Task with validated structure and conditions data
all structures has check status = {1.value} [{1.name}] - all checks passed. {3.value} [{3.name}] - structure \
has errors. {2.value} [{2.name}] - validation failed.
structure type also autoassigned: {5.value} [{5.name}] or {6.value} [{6.name}].
all newly validated structures include model with type = {0.value} [{0.name}] with results containing \
errors or warning information.
if task not newly created by upload file or create task api it can contain models with types different from \
{0.value} [{0.name}] which previously selected on revalidaton for structures with status = {1.value} [{1.name}].
this models contain empty results list.
if preparer model failed [due to server lag etc] returned structures with status = {2.value} [{2.name}] and\
{0.name} model with empty results list. In this case possible to resend this task to revalidation as is.
for upload task failed validation return empty structure list and resend impossible.
model results response structure:
key: string - header
type: data type = {4.value} [{4.name}] - plain text information
value: string - body
"""
page = results_fetch.parse_args().get('page')
return format_results(task, fetch_task(task, TaskStatus.PREPARED), page=page), 200
@swagger.operation(
notes='Create revalidation task',
nickname='prepare',
responseClass=TaskPostResponseFields.__name__,
parameters=[dict(name='task', description='Task ID', required=True,
allowMultiple=False, dataType='str', paramType='path'),
dict(name='structures', description='Structure[s] of molecule or reaction with optional conditions',
required=True, allowMultiple=False, dataType=TaskStructureFields.__name__, paramType='body')],
responseMessages=[dict(code=201, message="revalidation task created"),
dict(code=400, message="invalid structure data"),
dict(code=401, message="user not authenticated"),
dict(code=403, message='user access deny. you do not have permission to this task'),
dict(code=404, message='invalid task id. perhaps this task has already been removed'),
dict(code=406, message='task status is invalid. only validation tasks acceptable'),
dict(code=500, message="modeling server error"),
dict(code=512, message='task not ready')])
@dynamic_docstring(StructureStatus.CLEAR, StructureType.REACTION, ModelType.REACTION_MODELING,
StructureType.MOLECULE, ModelType.MOLECULE_MODELING)
def post(self, task):
"""
Revalidate task structures and conditions
possible to send list of TaskStructureFields.
send only changed data and structure id's. e.g. if user changed only temperature in structure 4 json should be
{{"temperature": new_value, "structure": 4}} or in list [{{"temperature": new_value, "structure": 4}}]
unchanged data server kept as is.
structures status and type fields not usable
todelete field marks structure for delete.
example json: [{{"structure": 5, "todetele": true}}]
structure with id 5 in task will be removed from list.
data field should be a string containing marvin document or cml or smiles/smirks.
models field usable if structure has status = {0.value} [{0.name}] and don't changed.
for structure type = {1.value} [{1.name}] acceptable only model types = {2.value} [{2.name}]
and vice versa for type = {3.value} [{3.name}] only model types = {4.value} [{4.name}].
only model id field needed. e.g. [{{"models": [{{model: 1}}], "structure": 3}}]
for SEARCH type tasks models field unusable.
see also task/create doc.
"""
data = marshal(request.get_json(force=True), TaskStructureFields.resource_fields)
result = fetch_task(task, TaskStatus.PREPARED)[0]
preparer = get_model(ModelType.PREPARER)
prepared = {s['structure']: s for s in result['structures']}
structures = data if isinstance(data, list) else [data]
tmp = {x['structure']: x for x in structures if x['structure'] in prepared}
if 0 in tmp:
abort(400, message='invalid structure data')
additives = get_additives()
models = get_models_list()
for s, d in tmp.items():
if d['todelete']:
prepared.pop(s)
else:
ps = prepared[s]
if d['additives'] is not None:
alist = []
for a in d['additives']:
if a['additive'] in additives and (0 < a['amount'] <= 1
if additives[a['additive']]['type'] == AdditiveType.SOLVENT
else a['amount'] > 0):
a.update(additives[a['additive']])
alist.append(a)
ps['additives'] = alist
if d['data']:
ps['data'] = d['data']
ps['status'] = StructureStatus.RAW
ps['models'] = [preparer.copy()]
elif s['status'] == StructureStatus.RAW: # renew preparer model.
ps['models'] = [preparer.copy()]
elif ps['status'] == StructureStatus.CLEAR:
if d['models'] is not None:
ps['models'] = [models[m['model']].copy() for m in d['models'] if m['model'] in models and
models[m['model']]['type'].compatible(ps['type'], TaskType.MODELING)]
else: # recheck models for existing
ps['models'] = [m.copy() for m in ps['models'] if m['model'] in models]
if d['temperature']:
ps['temperature'] = d['temperature']
if d['pressure']:
ps['pressure'] = d['pressure']
result['structures'] = list(prepared.values())
result['status'] = TaskStatus.PREPARING
new_job = redis.new_job(result)
if new_job is None:
abort(500, message='modeling server error')
return dict(task=new_job['id'], status=result['status'].value, type=result['type'].value,
date=new_job['created_at'].strftime("%Y-%m-%d %H:%M:%S"), user=result['user']), 201
class CreateTask(AuthResource):
@swagger.operation(
notes='Create validation task',
nickname='create',
responseClass=TaskPostResponseFields.__name__,
parameters=[dict(name='_type', description='Task type ID: %s' % task_types_desc, required=True,
allowMultiple=False, dataType='int', paramType='path'),
dict(name='structures', description='Structure[s] of molecule or reaction with optional conditions',
required=True, allowMultiple=False, dataType=TaskStructureFields.__name__, paramType='body')],
responseMessages=[dict(code=201, message="validation task created"),
dict(code=400, message="invalid structure data"),
dict(code=401, message="user not authenticated"),
dict(code=403, message="invalid task type"),
dict(code=500, message="modeling server error")])
@dynamic_docstring(AdditiveType.SOLVENT, TaskStatus.PREPARING,
TaskType.MODELING, TaskType.SIMILARITY, TaskType.SUBSTRUCTURE)
def post(self, _type):
"""
Create new task
possible to send list of TaskStructureFields.
e.g. [TaskStructureFields1, TaskStructureFields2,...]
todelete, status, type and models fields not usable
data field is required. field should be a string containing marvin document or cml or smiles/smirks
additive should be in list of available additives.
amount should be in range 0 to 1 for additives type = {0.value} [{0.name}], and positive for overs.
temperature in Kelvin and pressure in Bar also should be positive.
response include next information:
date: creation date time
status: {1.value} [{1.name}]
task: task id
type: {2.value} [{2.name}] or {3.value} [{3.name}] or {4.value} [{4.name}]
user: user id
"""
try:
_type = TaskType(_type)
except ValueError:
abort(403, message='invalid task type [%s]. valid values are %s' % (_type, task_types_desc))
data = marshal(request.get_json(force=True), TaskStructureFields.resource_fields)
additives = get_additives()
preparer = get_model(ModelType.PREPARER)
structures = data if isinstance(data, list) else [data]
data = []
for s, d in enumerate(structures, start=1):
if d['data']:
alist = []
for a in d['additives'] or []:
if a['additive'] in additives and (0 < a['amount'] <= 1
if additives[a['additive']]['type'] == AdditiveType.SOLVENT
else a['amount'] > 0):
a.update(additives[a['additive']])
alist.append(a)
data.append(dict(structure=s, data=d['data'], status=StructureStatus.RAW, type=StructureType.UNDEFINED,
pressure=d['pressure'], temperature=d['temperature'],
additives=alist, models=[preparer.copy()]))
if not data:
abort(400, message='invalid structure data')
new_job = redis.new_job(dict(status=TaskStatus.NEW, type=_type, user=current_user.id, structures=data))
if new_job is None:
abort(500, message='modeling server error')
return dict(task=new_job['id'], status=TaskStatus.PREPARING.value, type=_type.value,
date=new_job['created_at'].strftime("%Y-%m-%d %H:%M:%S"), user=current_user.id), 201
uf_post = reqparse.RequestParser()
uf_post.add_argument('file.url', type=str)
uf_post.add_argument('file.path', type=str)
uf_post.add_argument('structures', type=datastructures.FileStorage, location='files')
class UploadTask(AuthResource):
@swagger.operation(
notes='Create validation task from uploaded structures file',
nickname='upload',
responseClass=TaskPostResponseFields.__name__,
parameters=[dict(name='_type', description='Task type ID: %s' % task_types_desc, required=True,
allowMultiple=False, dataType='int', paramType='path'),
dict(name='structures', description='RDF SDF MRV SMILES file', required=True,
allowMultiple=False, dataType='file', paramType='body')],
responseMessages=[dict(code=201, message="validation task created"),
dict(code=401, message="user not authenticated"),
dict(code=400, message="structure file required"),
dict(code=403, message="invalid task type"),
dict(code=500, message="modeling server error")])
def post(self, _type: int) -> Tuple[Dict, int]:
"""
Structures file upload
Need for batch mode.
Any chemical structure formats convertable with Chemaxon JChem can be passed.
conditions in files should be present in next key-value format:
additive.amount.1 --> string = float [possible delimiters: :, :=, =]
temperature --> float
pressure --> float
additive.2 --> string
amount.2 --> float
where .1[.2] is index of additive. possible set multiple additives.
example [RDF]:
$DTYPE additive.amount.1
$DATUM water = .4
$DTYPE temperature
$DATUM 298
$DTYPE pressure
$DATUM 0.9
$DTYPE additive.2
$DATUM DMSO
$DTYPE amount.2
$DATUM 0.6
parsed as:
temperature = 298
pressure = 0.9
additives = [{"name": "water", "amount": 0.4, "type": x, "additive": y1}, \
{"name": "DMSO", "amount": 0.6, "type": x, "additive": y2}]
where "type" and "additive" obtained from DataBase by name
see task/create doc about acceptable conditions values and additives types and response structure.
"""
try:
_type = TaskType(_type)
except ValueError:
abort(403, message='invalid task type [%s]. valid values are %s' % (_type, task_types_desc))
args = uf_post.parse_args()
file_url = None
if args['file.url']: # smart frontend
if url(args['file.url']):
file_url = args['file.url']
elif args['file.path']: # NGINX upload
file_name = path.basename(args['file.path'])
if path.exists(path.join(UPLOAD_PATH, file_name)):
file_url = url_for('.batch_file', file=file_name, _external=True)
elif args['structures']: # flask
file_name = str(uuid4())
args['structures'].save(path.join(UPLOAD_PATH, file_name))
file_url = url_for('.batch_file', file=file_name, _external=True)
if file_url is None:
abort(400, message='structure file required')
new_job = redis.new_job(dict(status=TaskStatus.NEW, type=_type, user=current_user.id,
structures=[dict(data=dict(url=file_url), status=StructureStatus.RAW,
type=StructureType.UNDEFINED,
models=[get_model(ModelType.PREPARER)])]))
if new_job is None:
abort(500, message='modeling server error')
return dict(task=new_job['id'], status=TaskStatus.PREPARING.value, type=_type.value,
date=new_job['created_at'].strftime("%Y-%m-%d %H:%M:%S"), user=current_user.id), 201
class LogIn(Resource):
@swagger.operation(
notes='App login',
nickname='login',
parameters=[dict(name='credentials', description='User credentials', required=True,
allowMultiple=False, dataType=LogInFields.__name__, paramType='body')],
responseMessages=[dict(code=200, message="logged in"),
dict(code=400, message="invalid data"),
dict(code=403, message="bad credentials")])
def post(self):
"""
Get auth token
Token returned in headers as remember_token.
for use task api send in requests headers Cookie: 'remember_token=_token_'
"""
data = request.get_json(force=True)
if data:
username = data.get('user')
password = data.get('password')
if username and password:
user = UserLogin.get(username.lower(), password)
if user:
login_user(user, remember=True)
return dict(message='logged in'), 200
return dict(message='bad credentials'), 403
class MagicNumbers(AuthResource):
@swagger.operation(
notes='Magic Numbers',
nickname='magic',
parameters=[],
responseMessages=[dict(code=200, message="magic numbers"),
dict(code=401, message="user not authenticated")])
def get(self):
"""
Get Magic numbers
Dict of all magic numbers with values.
"""
data = {x.__name__: self.__to_dict(x) for x in [TaskType, TaskStatus, StructureType, StructureStatus,
AdditiveType, ResultType]}
data['ModelType'] = {ModelType.MOLECULE_MODELING.name: ModelType.MOLECULE_MODELING.value,
ModelType.REACTION_MODELING.name: ModelType.REACTION_MODELING.value}
return data, 200
@staticmethod
def __to_dict(enum):
return {x.name: x.value for x in enum}
| agpl-3.0 | -434,087,189,328,953,200 | 44.523985 | 120 | 0.574559 | false |
Zottel/Startzeile | startzeile/test/web.py | 1 | 1337 | import unittest
from startzeile.web import create_app
class WebTestCase(unittest.TestCase):
def setUp(self):
app = create_app(None, None)
app.config['TESTING'] = True
self.client = app.test_client()
def tearDown(self):
pass
class BasicDefaultTestCase(WebTestCase):
def runTest(self):
resp = self.client.get('/')
self.assertEqual(resp.status_code, 200)
#print(resp.status_code)
#print(resp.headers)
#print(resp.data)
class BasicLinkTestCase(WebTestCase):
def runTest(self):
resp = self.client.get('/link/%d.html' % 0)
self.assertEqual(resp.status_code, 200)
#print(resp.status_code)
#print(resp.headers)
#print(resp.data)
class BasicQueryTestCase(WebTestCase):
def runTest(self):
resp = self.client.get('/query/all.html')
self.assertEqual(resp.status_code, 200)
#print(resp.status_code)
#print(resp.headers)
#print(resp.data)
class BasicTagsTestCase(WebTestCase):
def runTest(self):
resp = self.client.get('/tags/test.html')
self.assertEqual(resp.status_code, 200)
#print(resp.status_code)
#print(resp.headers)
#print(resp.data)
def createSuite():
suite = unittest.TestSuite()
tests = []
tests.append(BasicDefaultTestCase())
tests.append(BasicLinkTestCase())
tests.append(BasicQueryTestCase())
tests.append(BasicTagsTestCase())
suite.addTests(tests)
return suite
| mit | 5,846,090,764,233,196,000 | 21.661017 | 45 | 0.727001 | false |
emanuele-f/python-pesci | pesci/environment.py | 1 | 3727 | #!/bin/env python2
# -*- coding: utf-8 -*-
#
# Emanuele Faranda <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
import ast
import pesci.code
from pesci.errors import *
class ExecutionEnvironment:
def __init__(self):
self.reset()
def reset(self):
self.code = None
self.ip = -1
self._contexts = []
self._stack = []
self.iterator = None
# create global context
self.push_context()
def setup(self, code):
self.code = code
self.ip = 0
self.iterator = None
def setvar(self, vid, val):
if vid and vid[0] == "_":
raise EnvBadSymbolName(self, vid)
self._set_in_context(self._get_context(vid), vid, val)
def getvar(self, vid):
# Try to get defined function or name
try:
var = self._get_from_contexts(vid)
except KeyError:
raise EnvSymbolNotFound(self, vid)
return var
"""set multiple k->v at one """
def loadvars(self, vdict):
for k,v in vdict.items():
self.setvar(k, v)
def _set_in_context(self, context, key, val):
context[key] = val
def _get_from_contexts(self, key):
# search a key in the context stack
for context in reversed(self._contexts):
if context.has_key(key):
return context[key]
raise KeyError(key)
"""push a value into the call stack"""
def push(self, val):
self._stack.append(val)
def pop(self):
return self._stack.pop()
def popall(self):
s = self._stack
self._stack = []
return s
"""context: additional names pool"""
def push_context(self):
# a trick to remember global variables
context = {'__globals__':[]}
self._contexts.append(context)
def pop_context(self):
if len(self._contexts) <= 1:
# NB: cannot pop the global context
raise EnvContextsEmpty(self)
return self._contexts.pop()
def _get_context(self, var):
cur = self.get_current_context()
if var in cur['__globals__']:
return self.get_global_context()
return cur
def get_global_context(self):
return self._contexts[0]
def get_current_context(self):
return self._contexts[-1]
def get_visible_context(self):
# determine the currently visible context variables
ctx = {}
for env in self._contexts:
for key,val in env.items():
if key[0] != "_":
ctx[key] = val
return ctx
def add_global(self, name):
gls = self.get_current_context()['__globals__']
if not name in gls:
gls.append(name)
def get_description(self):
env = self.get_visible_context()
return "ENV :%d:\n%s\n%s\n%s" % (self.ip, "-" * 10,
"\n".join(["%s: %s" % (key, env[key]) for key in sorted(env.keys())]),
"-" * 10)
| gpl-3.0 | 7,393,122,344,925,203,000 | 28.346457 | 82 | 0.579823 | false |
malkavi/Flexget | flexget/components/ftp/ftp_download.py | 1 | 8489 | import ftplib
import os
import time
from urllib.parse import unquote, urlparse
from loguru import logger
from flexget import plugin
from flexget.event import event
from flexget.utils.pathscrub import pathscrub
from flexget.utils.template import RenderError
logger = logger.bind(name='ftp')
class OutputFtp:
"""
Ftp Download plugin
input-url: ftp://<user>:<password>@<host>:<port>/<path to file>
Example: ftp://anonymous:[email protected]:21/torrent-files-dir
config:
ftp_download:
use-ssl: <True/False>
ftp_tmp_path: <path>
delete_origin: <True/False>
download_empty_dirs: <True/False>
TODO:
- Resume downloads
- create banlists files
- validate connection parameters
"""
schema = {
'type': 'object',
'properties': {
'use-ssl': {'type': 'boolean', 'default': False},
'ftp_tmp_path': {'type': 'string', 'format': 'path'},
'delete_origin': {'type': 'boolean', 'default': False},
'download_empty_dirs': {'type': 'boolean', 'default': False},
},
'additionalProperties': False,
}
def prepare_config(self, config, task):
config.setdefault('use-ssl', False)
config.setdefault('delete_origin', False)
config.setdefault('ftp_tmp_path', os.path.join(task.manager.config_base, 'temp'))
config.setdefault('download_empty_dirs', False)
return config
def ftp_connect(self, config, ftp_url, current_path):
if config['use-ssl']:
ftp = ftplib.FTP_TLS()
else:
ftp = ftplib.FTP()
# ftp.set_debuglevel(2)
logger.debug('Connecting to ' + ftp_url.hostname)
ftp.connect(ftp_url.hostname, ftp_url.port)
ftp.login(ftp_url.username, ftp_url.password)
if config['use-ssl']:
ftp.prot_p()
ftp.sendcmd('TYPE I')
ftp.set_pasv(True)
logger.debug('Changing directory to: ' + current_path)
ftp.cwd(current_path)
return ftp
def check_connection(self, ftp, config, ftp_url, current_path):
try:
ftp.voidcmd("NOOP")
except (IOError, ftplib.Error):
ftp = self.ftp_connect(config, ftp_url, current_path)
return ftp
def on_task_download(self, task, config):
config = self.prepare_config(config, task)
for entry in task.accepted:
ftp_url = urlparse(entry.get('url'))
ftp_url = ftp_url._replace(path=unquote(ftp_url.path))
current_path = os.path.dirname(ftp_url.path)
try:
ftp = self.ftp_connect(config, ftp_url, current_path)
except ftplib.all_errors as e:
entry.fail("Unable to connect to server : %s" % e)
break
to_path = config['ftp_tmp_path']
try:
to_path = entry.render(to_path)
except RenderError as err:
raise plugin.PluginError(
"Path value replacement `%s` failed: %s" % (to_path, err.args[0])
)
# Clean invalid characters with pathscrub plugin
to_path = pathscrub(to_path)
if not os.path.exists(to_path):
logger.debug('Creating base path: {}', to_path)
os.makedirs(to_path)
if not os.path.isdir(to_path):
raise plugin.PluginWarning("Destination `%s` is not a directory." % to_path)
file_name = os.path.basename(ftp_url.path)
try:
# Directory
ftp = self.check_connection(ftp, config, ftp_url, current_path)
ftp.cwd(file_name)
self.ftp_walk(ftp, os.path.join(to_path, file_name), config, ftp_url, ftp_url.path)
ftp = self.check_connection(ftp, config, ftp_url, current_path)
ftp.cwd('..')
if config['delete_origin']:
ftp.rmd(file_name)
except ftplib.error_perm:
# File
self.ftp_down(ftp, file_name, to_path, config, ftp_url, current_path)
ftp.close()
def on_task_output(self, task, config):
"""Count this as an output plugin."""
def ftp_walk(self, ftp, tmp_path, config, ftp_url, current_path):
logger.debug('DIR->' + ftp.pwd())
logger.debug('FTP tmp_path : ' + tmp_path)
try:
ftp = self.check_connection(ftp, config, ftp_url, current_path)
dirs = ftp.nlst(ftp.pwd())
except ftplib.error_perm as ex:
logger.info('Error {}', ex)
return ftp
if not dirs:
if config['download_empty_dirs']:
os.mkdir(tmp_path)
else:
logger.debug("Empty directory, skipping.")
return ftp
for file_name in (path for path in dirs if path not in ('.', '..')):
file_name = os.path.basename(file_name)
try:
ftp = self.check_connection(ftp, config, ftp_url, current_path)
ftp.cwd(file_name)
if not os.path.isdir(tmp_path):
os.mkdir(tmp_path)
logger.debug('Directory {} created', tmp_path)
ftp = self.ftp_walk(
ftp,
os.path.join(tmp_path, os.path.basename(file_name)),
config,
ftp_url,
os.path.join(current_path, os.path.basename(file_name)),
)
ftp = self.check_connection(ftp, config, ftp_url, current_path)
ftp.cwd('..')
if config['delete_origin']:
ftp.rmd(os.path.basename(file_name))
except ftplib.error_perm:
ftp = self.ftp_down(
ftp, os.path.basename(file_name), tmp_path, config, ftp_url, current_path
)
ftp = self.check_connection(ftp, config, ftp_url, current_path)
return ftp
def ftp_down(self, ftp, file_name, tmp_path, config, ftp_url, current_path):
logger.debug('Downloading {} into {}', file_name, tmp_path)
if not os.path.exists(tmp_path):
os.makedirs(tmp_path)
local_file = open(os.path.join(tmp_path, file_name), 'a+b')
ftp = self.check_connection(ftp, config, ftp_url, current_path)
try:
ftp.sendcmd("TYPE I")
file_size = ftp.size(file_name)
except Exception:
file_size = 1
max_attempts = 5
size_at_last_err = 0
logger.info('Starting download of {} into {}', file_name, tmp_path)
while file_size > local_file.tell():
try:
if local_file.tell() != 0:
ftp = self.check_connection(ftp, config, ftp_url, current_path)
ftp.retrbinary('RETR %s' % file_name, local_file.write, local_file.tell())
else:
ftp = self.check_connection(ftp, config, ftp_url, current_path)
ftp.retrbinary('RETR %s' % file_name, local_file.write)
except Exception as error:
if max_attempts != 0:
if size_at_last_err == local_file.tell():
# Nothing new was downloaded so the error is most likely connected to the resume functionality.
# Delete the downloaded file and try again from the beginning.
local_file.close()
os.remove(os.path.join(tmp_path, file_name))
local_file = open(os.path.join(tmp_path, file_name), 'a+b')
max_attempts -= 1
size_at_last_err = local_file.tell()
logger.debug('Retrying download after error {}', error.args[0])
# Short timeout before retry.
time.sleep(1)
else:
logger.error('Too many errors downloading {}. Aborting.', file_name)
break
local_file.close()
if config['delete_origin']:
ftp = self.check_connection(ftp, config, ftp_url, current_path)
ftp.delete(file_name)
return ftp
@event('plugin.register')
def register_plugin():
plugin.register(OutputFtp, 'ftp_download', api_ver=2)
| mit | 1,172,875,676,045,034,500 | 36.396476 | 119 | 0.533161 | false |
pudo-attic/docstash | docstash/util.py | 1 | 2772 | from os import path
from hashlib import sha1
from httplib import HTTPResponse
from urllib2 import urlopen
from StringIO import StringIO
from urlparse import urlparse
from werkzeug import secure_filename
MANIFEST_FILE = 'manifest.yaml'
def fullpath(filename):
# a happy tour through stdlib
filename = path.expanduser(filename)
filename = path.expandvars(filename)
filename = path.normpath(filename)
return path.abspath(filename)
def filename(filename, default='data'):
if filename is None:
return filename
basename = path.basename(filename)
return secure_filename(basename) or default
def checksum(filename):
hash = sha1()
with open(filename, 'rb') as fh:
while True:
block = fh.read(2 ** 10)
if not block:
break
hash.update(block)
return hash.hexdigest()
def clean_headers(headers):
result = {}
for k, v in dict(headers).items():
k = k.lower().replace('-', '_')
result[k] = v
return result
def ingest_misc(coll, obj, **kwargs):
if isinstance(obj, basestring):
# Treat strings as paths or URLs
url = urlparse(obj)
if url.scheme.lower() in ['http', 'https']:
try:
import requests
obj = requests.get(obj)
except ImportError:
obj = urlopen(obj)
elif url.scheme.lower() in ['file', '']:
if path.isdir(url.path):
return coll.ingest_dir(url.path)
return coll.ingest_file(url.path)
# Python requests
try:
from requests import Response
if isinstance(obj, Response):
kwargs['source_status'] = obj.status_code
kwargs['headers'] = clean_headers(obj.headers)
kwargs['source_url'] = obj.url
kwargs['file'] = obj.url
fd = StringIO(obj.content)
return coll.ingest_fileobj(fd, **kwargs)
except ImportError:
pass
if isinstance(obj, HTTPResponse):
# Can't tell the URL for HTTPResponses
kwargs['source_status'] = obj.status
# TODO handle lists:
kwargs['headers'] = clean_headers(obj.getheaders())
return coll.ingest_fileobj(obj, **kwargs)
elif hasattr(obj, 'geturl') and hasattr(obj, 'info'):
# assume urllib or urllib2
kwargs['source_url'] = obj.url
kwargs['file'] = obj.url
kwargs['source_status'] = obj.getcode()
kwargs['headers'] = clean_headers(obj.headers)
return coll.ingest_fileobj(obj, **kwargs)
elif hasattr(obj, 'read'):
# Fileobj will be a bit bland
return coll.ingest_fileobj(obj, **kwargs)
raise ValueError("Can't ingest: %r" % obj)
| mit | -3,031,235,047,868,030,000 | 26.445545 | 59 | 0.599206 | false |
Vrekrer/PycuBLAS | codeUtils.py | 1 | 5234 | #Published symbols @7.0
#readelf -Ds /usr/lib/x86_64-linux-gnu/libcublas.so.7.0
from subprocess import Popen, PIPE
import pyperclip
libDir = '/usr/lib/x86_64-linux-gnu/'
c_types_reps = {'int' :'c_int',
'size_t' :'c_size_t',
'char' :'c_char',
'unsigned int' :'c_uint',
'void' :'',
'char*' :'c_char_p',
'void*' :'c_void_p'
}
class XX():
pass
def getSymbolTable(libname):
(stdout, stderr) = Popen(["readelf", "-Ds",
libDir + libname], stdout=PIPE).communicate()
lines = stdout.splitlines()[3:]
return [l.split()[8] for l in lines]
def getNotDefined(fileName, base, symbolTable):
with open(fileName,'r') as pyfile:
fileText = pyfile.read()
return [s for s in symbolTable if not(base+'.'+s in fileText)]
# Function to help construct the headers
def header(funct):
fS = 'cublasS' + funct
fD = 'cublasD' + funct
fC = 'cublasC' + funct
fZ = 'cublasZ' + funct
for f in [fS, fD, fC, fZ]:
print '%s = libcublas.%s_v2' % (f, f)
print 'for funct in [%s, %s, %s, %s]:' % (fS, fD, fC, fZ)
print ' funct.restype = cublasStatus_t'
print ' #funct.argtypes = [cublasHandle_t,'
def pharseFunct(doc):
FunctData = XX()
#remove unicode chars
doc = doc.decode('unicode_escape').encode('ascii', 'ignore')
#split at "("
data = doc.rsplit('(')
#get retType and function Name
FunctData.retType, FunctData.Name = data[0].strip().split()[-2:]
#get
pars = data[1].rsplit(')')[0].strip().split(',')
FunctData.pars = [p.rsplit() for p in pars]
return FunctData
def codeFunct(FunctData, libname):
code = ''
c_header = '# ' + FunctData.retType + ' ' + FunctData.Name + ' ( '
lenH = len(c_header) - 1
for i, p in enumerate(FunctData.pars):
c_header += ' '.join(p)
if (i+1) != len(FunctData.pars):
c_header += ( ',\n#' + lenH*' ' )
else:
c_header += ' )'
code += c_header + '\n'
code += FunctData.Name + ' = ' + libname + '.' + FunctData.Name + '\n'
code += FunctData.Name + '.restype = ' + FunctData.retType + '\n'
args = FunctData.Name + '.argtypes = ['
lenA = len(args)
argtypes = []
argNames = []
for pars in FunctData.pars:
if len(pars) == 1:
argtypes.append(pars[0])
argNames.append('')
elif len(pars) == 2:
argtypes.append(pars[0])
argNames.append(pars[1])
elif len(pars) == 3:
if pars[0] == 'const':
argtypes.append(pars[1])
else:
argtypes.append(' '.join(pars[:2]))
argNames.append(pars[2])
elif '=' in pars:
argtypes.append(' '.join(pars[:-3]))
argNames.append(' '.join(pars[-3:]))
for i, t in enumerate(argtypes):
if t in c_types_reps.keys():
argtypes[i] = c_types_reps[t]
elif (t[:-1] in c_types_reps.keys()) & (t[-1]=='*'):
argtypes[i] = 'POINTER(' + c_types_reps[t[:-1]] + ')'
elif t[-1]=='*':
argtypes[i] = 'POINTER(' + t[:-1] + ')'
else:
argtypes[i] = t
maxArgTypeName = max([len(t) for t in argtypes])+1
for i, argT in enumerate(argtypes):
args += argT
if (i+1) != len(argtypes):
args += ','
else:
args += ' '
if argNames[i] != '':
args += ' '*(maxArgTypeName-len(argT))
args += '# ' + argNames[i]
args += ( '\n' + lenA*' ' )
args += ']\n'
code += args
pyperclip.copy(code)
return code
def codeClipBoardFunct(libname):
source = pyperclip.paste().splitlines()
out = '\n'.join([codeFunct(pharseFunct(l), libname) for l in source])
pyperclip.copy(out)
def pharseStructFields(c_code):
S = XX()
lines = c_code.splitlines()
lines = [line.rsplit(';')[0].strip() for line in lines]
S.datatypes = [' '.join(l.split()[:-1]) for l in lines]
S.dataNames = [l.split()[-1].rsplit('[')[0] for l in lines]
S.arraySize = [(l.split()[-1]+'[').rsplit('[')[1].rsplit(']')[0] for l in lines]
S.size = len(S.datatypes)
S.maxDataNameSize = max([len(a) for a in S.dataNames])
return S
def codeStruct(sData):
code = ' _fields_ = ['
lenH = len(code)
for i in range(sData.size):
name_spaces = (sData.maxDataNameSize - len(sData.dataNames[i]) + 1)*' '
code += "('" + sData.dataNames[i] + "'," +name_spaces
if sData.datatypes[i] in c_types_reps.keys():
code += c_types_reps[sData.datatypes[i]]
else:
code += sData.datatypes[i]
if sData.arraySize[i] != '':
code += '*'+sData.arraySize[i]+')'
else:
code += ')'
if (i+1) != sData.size:
code += ',\n' + lenH*' '
else:
code += ']'
pyperclip.copy(code)
return code
def codeClipBoardStruct():
source = pyperclip.paste()
out = codeStruct(pharseStructFields(source))
pyperclip.copy(out)
| bsd-3-clause | 1,705,460,581,328,154,400 | 31.7125 | 84 | 0.509171 | false |
Zashel/pastel | pari_model.py | 1 | 45739 | from zrest.basedatamodel import RestfulBaseInterface
import gc
import shelve
import os
from definitions import *
import datetime
from zashel.utils import log
from math import ceil
import glob
import re
from utils import *
import json
import pprint
#TODO: Fix imports order
class Pari(RestfulBaseInterface):
def __init__(self, filepath):
super().__init__()
self._filepath = filepath
path, filename = os.path.split(self.filepath)
if not os.path.exists(path):
os.makedirs(path)
self.set_shelve("c")
self.shelf.close()
self.set_shelve()
try:
self._loaded_file = self.shelf["file"]
except KeyError:
self._loaded_file = None
self.name = None
self.list_data = dict()
self.page = 1
self.items_per_page = 50
try:
self.all = self.shelf["all"]
except KeyError:
self.all = set()
self.ids_facturas = None
self.total_query = int()
self.filter = None
def set_shelve(self, flag="r"): # To implement metadata
self._shelf = shelve.open(self.filepath, flag)
@property
def filepath(self):
return self._filepath
@property
def shelf(self):
return self._shelf
@property
def loaded_file(self):
return self._loaded_file
def headers(self):
return PARI_FIELDS
def read_pari(self, pari_file):
assert os.path.exists(pari_file)
begin = datetime.datetime.now()
total_bytes = os.stat(pari_file).st_size
read_bytes = int()
last = 0.0000
info = False
with open(pari_file, "r") as pari:
headers = pari.readline().strip("\n").split("|")
for line in pari:
read_bytes += len(bytearray(line, "utf-8"))+1
percent = read_bytes/total_bytes
if percent >= last:
last += 0.0001
info = True
row = line.strip("\n").split("|")
final = dict()
for index, key in enumerate(headers):
final[key.lower()] = row[index]
if info is True:
time = datetime.datetime.now() - begin
yield {"percent": round(percent, 4),
"time": time,
"eta": (time/percent)-time,
"data": final}
info = False
else:
yield {"data": final}
def set_pari(self, pari_file, *, do_export=True, do_report=True):
API_id_factura = {"_heads": ["fecha_factura",
"importe_adeudado",
"estado_recibo",
"id_cuenta"],
"data": dict()}
API_id_cuenta = {"_heads": ["segmento",
"facturas",
"id_cliente"],
"data": dict()}
API_id_cliente = {"_heads": ["numdoc",
"id_cuenta"],
"data": dict()}
PARI_FIELDS = admin_config.PARI_FILE_FIELDS
API_segmentos = list()
index_segmentos = dict()
API_estados = list()
index_estados = dict()
index_facturas = dict()
API_numdocs = {"_heads": ["id_cuenta"],
"data": dict()}
limit_date = datetime.datetime.strptime(
(datetime.datetime.now() - datetime.timedelta(days=92)).strftime("%d%m%Y"),
"%d%m%Y").date()
total = int()
self.all = set()
reports = {"importes por fechas y estados": dict(),
"facturas por fechas y estados": dict(),
"devoluciones por fechas y estados": dict(),
"diario": dict()}
ife = reports["importes por fechas y estados"]
ffe = reports["facturas por fechas y estados"]
dfe = reports["devoluciones por fechas y estados"]
diario = dict()
dni_en_gestion = set()
dni_no_en_gestion = set()
ctes_pendientes = dict()
for row in self.read_pari(pari_file):
id_factura = int(row["data"]["id_factura"])
id_cuenta = int(row["data"]["id_cuenta"])
id_cliente = int(row["data"]["id_cliente"])
numdoc = row["data"]["numdoc"]
final = {"id_cliente": API_id_cliente,
"id_cuenta": API_id_cuenta,
"id_factura": API_id_factura,
"numdoc": API_numdocs,
"estados": API_estados,
"segmentos": API_segmentos,
"index":{"estados": index_estados,
"segmentos": index_segmentos,
"fecha_factura": index_facturas},
"reports": reports
}
data = row["data"]
#Exporting daily reports of certain invoices:
if do_export is True:
fecha_puesta_cobro = datetime.datetime.strptime(data["fecha_puesta_cobro"], "%d/%m/%y")
if (fecha_puesta_cobro + datetime.timedelta(days=61) >= datetime.datetime.today().replace(hour=0,
minute=0,
second=0,
microsecond=0) and
(data["primera_factura"] == "1" or data["primera_factura"] != "1")):
str_fecha_factura = datetime.datetime.strptime(data["fecha_factura"], "%d/%m/%y")
if data["fecha_factura"] not in diario:
diario[data["fecha_factura"]] = list()
diario[data["fecha_factura"]].append(";".join(PARI_FIELDS))
final_list = list()
for head in PARI_FIELDS:
if "fecha" in head:
item = datetime.datetime.strptime(data[head], "%d/%m/%y").strftime("%d/%m/%Y")
else:
item = data[head]
final_list.append(item)
diario[data["fecha_factura"]].append(";".join(final_list))
if (fecha_puesta_cobro + datetime.timedelta(days=60) >= datetime.datetime.today().replace(hour=0,
minute=0,
second=0,
microsecond=0) and
data["estado_recibo"] == "IMPAGADO"):
if data["id_cuenta"] not in ctes_pendientes:
ctes_pendientes[data["id_cuenta"]] = list()
ctes_pendientes[data["id_cuenta"]].append(data)
dni_en_gestion.add(data["id_cuenta"])
elif data["estado_recibo"] == "IMPAGADO":
dni_no_en_gestion.add(data["id_cuenta"])
for report in (ife, ffe, dfe):
if data["segmento"] not in report:
report[data["segmento"]] = dict()
for segmento in report:
if data["fecha_factura"] not in report[segmento]:
report[segmento][data["fecha_factura"]] = dict()
for fecha_factura in report[segmento]:
if data["estado_recibo"] not in report[segmento][fecha_factura]:
report[segmento][fecha_factura][data["estado_recibo"]] = int()
ife[data["segmento"]][data["fecha_factura"]][data["estado_recibo"]] += int(
data["importe_adeudado"].replace(",", ""))
dfe[data["segmento"]][data["fecha_factura"]][data["estado_recibo"]] += int(
data["importe_devolucion"].replace(",", ""))
ffe[data["segmento"]][data["fecha_factura"]][data["estado_recibo"]] += 1
#if (row["data"]["estado_recibo"] == "IMPAGADO" or row["data"]["estado_recibo"] == "PAGO PARCIAL" or
# datetime.datetime.strptime(row["data"]["fecha_factura"], "%d/%m/%y").date() >= limit_date):
if row["data"]["estado_recibo"] == "IMPAGADO": #Let's play mini, then
for name, item, api in (("id_factura", id_factura, API_id_factura),
("id_cuenta", id_cuenta, API_id_cuenta),
("id_cliente", id_cliente, API_id_cliente)):
heads = api["_heads"]
if item not in api["data"]:
api["data"][item] = [None for item in heads]
for index, head in enumerate(heads):
if head in ("id_factura",
"id_cliente",
"id_cuenta"):
if head == "id_cliente":
API_numdocs["data"].update({numdoc: [id_cliente]})
if name == "id_cliente" and head == "id_cuenta":
if api["data"][item][index] is None:
api["data"][item][index] = list()
if id_cuenta not in api["data"][item][index]:
api["data"][item][index].append(id_cuenta)
else:
api["data"][item][index] = {"id_factura": id_factura,
"id_cliente": id_cliente,
"id_cuenta": id_cuenta}[head]
elif head == "facturas":
if api["data"][item][index] is None:
api["data"][item][index] = list()
api["data"][item][index].append(id_factura)
elif head == "importe_adeudado":
importe = float(row["data"][head].replace(",", "."))
importe = int(importe*100)
api["data"][item][index] = importe
elif head == "segmento":
if row["data"][head] not in API_segmentos:
API_segmentos.append(row["data"][head])
if row["data"][head] not in index_segmentos:
index_segmentos[row["data"][head]] = set() #id_cliente
index_segmentos[row["data"][head]] |= {id_cliente}
segmento = API_segmentos.index(row["data"][head])
api["data"][item][index] = segmento.to_bytes(ceil(segmento.bit_length() / 8), "big")
elif head == "estado_recibo":
if row["data"][head] not in API_estados:
API_estados.append(row["data"][head])
if row["data"][head] not in index_estados:
index_estados[row["data"][head]] = set() #id_factura
index_estados[row["data"][head]] |= {id_factura}
estado = API_estados.index(row["data"][head])
api["data"][item][index] = estado.to_bytes(ceil(estado.bit_length() / 8), "big")
elif head == "fecha_factura":
fecha = datetime.datetime.strptime(row["data"][head], "%d/%m/%y")
fecha = int(fecha.strftime("%d%m%y"))
fecha = fecha.to_bytes(ceil(fecha.bit_length() / 8), "big")
api["data"][id_factura][index] = fecha
if row["data"][head] not in index_facturas:
index_facturas[row["data"][head]] = set() #id_factura
index_facturas[row["data"][head]] |= {id_factura}
else:
api["data"][item][index] = row["data"][head]
self.all |= {id_factura}
total += 1
if "eta" in row:
yield row
self.shelf.close()
self.set_shelve("c")
self.shelf.update(final)
path, name = os.path.split(pari_file)
self.shelf["file"] = name
self.shelf["total"] = total
self.shelf["all"] = self.all
self._loaded_file = name
self.shelf.close()
self.set_shelve()
if do_export is True:
for fecha_factura in diario:
str_fecha_factura = datetime.datetime.strptime(fecha_factura, "%d/%m/%y")
trying = 0
while True:
if trying > 0:
ache = " ({})".format(str(trying))
else:
ache = ""
try:
with open(os.path.join(admin_config.DAILY_EXPORT_PATH,
"jazztel_ciclo_"+str_fecha_factura.strftime("%Y-%m-%d")+ache+".csv"),
"w") as f:
f.write("\n".join(diario[fecha_factura]))
except PermissionError:
trying += 1
else:
break
final_list = [";".join(PARI_FIELDS)]
final_dni = dni_en_gestion - dni_no_en_gestion
for dni in final_dni:
if dni in ctes_pendientes:
for item in ctes_pendientes[dni]:
final_list.append(";".join([item[key] for key in PARI_FIELDS]))
with open(os.path.join(admin_config.DAILY_EXPORT_PATH,
"en_gestion.csv"),
"w") as f:
f.write("\n".join(final_list))
#ife = data["importes por fechas y estados"]
#ffe = data["facturas por fechas y estados"]
#dfe = data["devoluciones por fechas y estados"]
segmentos = list(ife.keys())
segmentos.sort()
assert len(segmentos) > 0
fechas = list(ife[segmentos[0]].keys())
fechas.sort()
assert len(fechas) > 0
estados = list(ife[segmentos[0]][fechas[0]].keys())
estados.sort()
assert len(estados) > 0
heads = "segmento;fecha_factura;estado;facturas;importe_devuelto;importe_impagado\n"
name = os.path.split(pari_file)[1].strip("BI_131_FICHERO_PARI_DIARIO")
name = "report_pari_{}".format(name)
if do_report is True: # Set in server
with open(os.path.join(admin_config.REPORT_PATH, "Pari", name), "w") as f:
f.write(heads)
for segmento in segmentos:
for fecha in fechas:
for estado in estados:
fecha_str = datetime.datetime.strptime(fecha, "%d/%m/%y").strftime("%d/%m/%Y")
facturas = str(ffe[segmento][fecha][estado])
importe_devuelto = str(dfe[segmento][fecha][estado])
importe_devuelto = "{},{}".format(importe_devuelto[:-2], importe_devuelto[-2:])
importe_impagado = str(ife[segmento][fecha][estado])
importe_impagado = "{},{}".format(importe_impagado[:-2], importe_impagado[-2:])
f.write(";".join((segmento,
fecha_str,
estado,
facturas,
importe_devuelto,
importe_impagado
)) + "\n")
def read_n43(self, filepath):
if os.path.exists(filepath):
begin = datetime.datetime.now()
total_bytes = os.stat(filepath).st_size
read_bytes = int()
last = 0.0000
total = int()
info = False
re_nif = re.compile(r"[DNI]?[ ]?([XYZ]?[0-9]{5,8}[TRWAGMYFPDXBNJZSQVHLCKE]{1})")
re_cif = re.compile(r"[ABCDEFGHJNPQRUVW]{1}[0-9]{8}")
re_tels = re.compile(r"\+34[6-9]{1}[0-9]{8}|[6-9]{1}[0-9]{8}")
with open(filepath, "r") as file_:
f_oper = None
f_valor = None
oficina_orig = str()
importe = str()
observaciones = str()
account = str()
for row in file_:
read_bytes += len(bytearray(row, "utf-8")) + 1
percent = read_bytes / total_bytes
if percent >= last:
last += 0.0001
info = True
row = row.strip("\n")
if row.startswith("11"):
account = row[2:20]
if row.startswith("22") or row.startswith("33"):
if not f_oper is None and not observaciones.startswith("TRASP. AGRUPADO") and not observaciones.startswith("TRASPASO A CTA"):
total += 1
observaciones = observaciones.strip()
telefonos = list()
nif = None
if observaciones.startswith("TRANSFER"):
observaciones = observaciones[:-8]
elif observaciones.startswith("81856015"):
nif = calcular_letra_dni(observaciones[15:23])
telefonos.append(observaciones[53:62])
nifs = set()
tels = set()
if nif is None:
for ind, restring in enumerate((observaciones,
observaciones.replace(".", ""),
observaciones.replace("-", ""),
observaciones.replace(" ", ""))):
for nif in re_nif.findall(restring.upper()):
nifs.add(nif)
for cif in re_cif.findall(restring.upper()):
if cif[0] in "XYZ":
cif = calcular_letra_dni(cif)
nifs.add(cif)
for tel in re_tels.findall(restring.upper()):
tels.add(tel)
if ind == 0 and len(nifs) > 0:
break
telefonos = list(tels)
nifs = list(nifs)
if len(nifs) > 0:
nif = nifs[0]
for nifid in nifs:
if nif[-1] in "TRWAGMYFPDXBNJZSQVHLCKE":
nif = nifid
break
else:
nif = ""
nif = formatear_letra_dni(nif)
final = {"cuenta": account,
"fecha_operacion": f_oper,
"fecha_valor": f_valor,
"oficina_origen": oficina_orig,
"importe": importe,
"observaciones": observaciones,
"nif": nif,
"telefonos": telefonos}
f_oper = None
f_valor = None
oficina_orig = str()
importe = str()
observaciones = str()
if info is True:
time = datetime.datetime.now() - begin
yield {"percent": round(percent, 4),
"time": time,
"eta": (time / percent) - time,
"data": final}
info = False
else:
yield {"data": final}
if row.startswith("22"):
row = row.strip()
f_oper = datetime.datetime.strptime(row[10:16], "%y%m%d")
f_valor = datetime.datetime.strptime(row[16:22], "%y%m%d")
importe = int(row[28:42])
observaciones = row[52:].strip()
oficina_orig = row[6:10]
elif row.startswith("23"):
observaciones += row[4:].strip()
def get_codes(self):
fechas_facturas = list(self.shelf["reports"]["importes por fechas y estados"]["RESIDENCIAL"].keys())
fechas_facturas = [datetime.datetime.strptime(fecha, "%d/%m/%y") for fecha in fechas_facturas]
fechas_facturas.sort()
final = dict(admin_config.FACTURAS)
fechas_final = list(admin_config.FACTURAS.keys())
fechas_final.sort()
fecha_inicio = fechas_final[-1]
codigo_inicio = final[fecha_inicio]
len_facturas = len(final)
if fecha_inicio in fechas_facturas:
index_inicio = fechas_facturas.index(fecha_inicio)
for index, fecha in enumerate(fechas_facturas):
if fecha not in final:
final[fecha] = codigo_inicio+index-index_inicio
len_final = len(final)
if len_final > len_facturas+6:
keys = len(final.keys())
keys.sort()
sfinal = [";".join((final[key], key.strftime("%d/%m/%Y"))) for key in keys]
with open(admin_config.FACTURAS_FILE, "w") as f:
f.write("\n".join(sfinal))
admin_config.FACTURAS = final
return final
def set_n43(self, filepath):
if os.path.exists(filepath):
apply_date = datetime.datetime.today().strftime("%d/%m/%Y") #TODO: To config
print("Loading Codes")
codes = self.get_codes()
print("Loading PARI")
shelf = dict(self.shelf)
print("Cleaning")
gc.collect()
account_number = ["018239990014690035"] #TODO: set in shitty config
account_ventanillas = ["018239990202055004"]
if not "aplicados" in self.shelf:
self.shelf["aplicados"] = dict()
applied = dict(self.shelf["aplicados"])
print("LEN_APPLIED {}".format(len(applied)))
print("LEN_NUMDOC {}".format(len(shelf["numdoc"]["data"])))
print("LEN_CODES {}".format(len(codes)))
final = list()
manuals = list()
anulaciones = dict()
informe = {"total": {"operaciones": int(),
"importe": int()},
"aplicado": {"operaciones": int(),
"importe": int()},
"pendiente": {"operaciones": int(),
"importe": int()},
"anulaciones": {"operaciones": int(),
"importe": int()},
"ventanillas": {"operaciones": int(),
"importe": int()}
}
for row in self.read_n43(filepath):
data = row["data"]
if data["cuenta"] in account_number:
if data["observaciones"].startswith("ANULACIONES"):
if data["cuenta"] not in anulaciones:
anulaciones[data["cuenta"]] = dict()
anulaciones[data["cuenta"]][data["importe"]] = data["oficina_origen"]
informe["anulaciones"]["operaciones"] += 1
informe["anulaciones"]["importe"] += data["importe"]
informe["total"]["operaciones"] += 1
informe["total"]["importe"] += data["importe"]
elif data["cuenta"] in account_ventanillas:
informe["ventanillas"]["operaciones"] += 1
informe["ventanillas"]["importe"] += data["importe"]
for row in self.read_n43(filepath):
data = row["data"]
total = int()
possibles = dict()
go_on = True
payments_list = list()
if data["cuenta"] in account_number and not data["observaciones"].startswith("ANULACIONES"):
if (data["importe"] in anulaciones[data["cuenta"]] and
anulaciones[data["cuenta"]][data["importe"]] == data["oficina_origen"]):
del(anulaciones[data["cuenta"]][data["importe"]])
continue
id_cliente = str()
id_cuentas = list()
if data["nif"] in shelf["numdoc"]["data"]:
#print("{} en numdoc".format(data["nif"]))
id_cliente = shelf["numdoc"]["data"][data["nif"]][0] #TODO: Get index of field by header position
id_cuentas = shelf["id_cliente"]["data"][id_cliente][1]
for id_cuenta in id_cuentas:
#print("id_cuenta {}".format(id_cuentas))
if shelf["id_cuenta"]["data"][id_cuenta][0] != "GRAN CUENTA":
facturas = shelf["id_cuenta"]["data"][id_cuenta][1]
facturas.sort()
for id_factura in facturas:
total += 1
estado = (shelf["estados"][int.from_bytes(
shelf["id_factura"]["data"][id_factura][2], "big")])
fecha_factura = int.from_bytes(shelf["id_factura"]["data"][id_factura][0],
"big")
fecha_factura = str(fecha_factura)
fecha_factura = "0"*(6-len(fecha_factura))+fecha_factura
fecha_factura = datetime.datetime.strptime(fecha_factura, "%d%m%y")
possibles[id_factura] = {"importe": shelf["id_factura"]["data"][id_factura][1],
"id_cuenta": id_cuenta,
"fecha_factura": fecha_factura,
"estado": estado}
election = None
if total >= 1:
ids_factura = list(possibles.keys())
ids_factura.sort()
pdte = data["importe"]
applied_flag = False
for id_factura in ids_factura:
#print("Posibles :{}".format(pprint.pprint(possibles[id_factura])))
#input("id_factura in applied {}".format(id_factura in applied))
if (possibles[id_factura]["estado"] in ("IMPAGADO", "PAGO PARCIAL") and
possibles[id_factura]["importe"] > 0):
if (not id_factura in applied or
(id_factura in applied and
applied[id_factura]["importe_aplicado"] < applied[id_factura]["importe"]) and
pdte > 0):
if not id_factura in applied:
applied[id_factura] = {"importe_aplicado": 0,
"importe": possibles[id_factura]["importe"]}
unpaid = applied[id_factura]["importe"] - applied[id_factura]["importe_aplicado"]
to_apply = pdte < unpaid and pdte or unpaid
pdte -= to_apply
if pdte < 0:
pdte = 0
try:
code = codes[possibles[id_factura]["fecha_factura"]]
except KeyError:
#print(possibles)
#print("Orig: {}".format(int.from_bytes(
# shelf["id_factura"]["data"][id_factura][0],
# "big")))
code = 1
subdata = [str(apply_date),
str(code),
str(admin_config.PM_CUSTOMER),
str(data["nif"]),
str(id_factura),
str(data["fecha_operacion"].strftime("%d/%m/%y")),
str(round(to_apply/100, 2)).replace(".", ","),
str(id_cuenta),
str(get_billing_period(possibles[id_factura]["fecha_factura"])),
str(admin_config.PM_PAYMENT_METHOD),
str(admin_config.PM_PAYMENT_WAY)
]
payments_list.append(";".join(subdata))
applied[id_factura]["importe_aplicado"] += to_apply
applied_flag = True
if pdte == 0:
informe["aplicado"]["operaciones"] += 1
informe["aplicado"]["importe"] += data["importe"]
final.extend(payments_list)
go_on = False
break
if pdte > 0 and applied_flag is True:
go_on = True
if pdte > 0 and applied_flag is False:
go_on = True
else:
go_on = True
if go_on is True:
go_on_final = row["data"].copy()
poss = payments_list.copy()
#for id in poss:
# for field in poss[id]:
# if isinstance(poss[id][field], datetime.datetime):
# poss[id][field] = poss[id][field].strftime("%d/%m/%Y")
for item in go_on_final:
if isinstance(go_on_final[item], datetime.datetime):
go_on_final[item] = go_on_final[item].strftime("%d/%m/%Y")
go_on_final.update({"id_cliente": id_cliente,
"posibles": poss})
manuals.append(go_on_final)
self.shelf["aplicados"].update(applied)
if "eta" in row:
yield row
with open(os.path.join(local_config.EXPORT_PATH,
"localizacion_automatica_{}.csv".format(apply_date.replace("/", "-"))),
"w") as f:
f.write("\n".join(final))
os.makedirs(os.path.join(admin_config.REPORT_PATH, "ISM"), exist_ok=True)
informe["pendiente"]["operaciones"] = informe["total"]["operaciones"] - informe["aplicado"]["operaciones"]
informe["pendiente"]["importe"] = informe["total"]["importe"] - informe["aplicado"]["importe"]
final_informe = "estado;operaciones;importe\n"
final_informe += "\n".join([";".join((estado,
str(informe[estado]["operaciones"]),
str(informe[estado]["importe"]/100).replace(".", ",")))
for estado in informe])
with open(os.path.join(admin_config.REPORT_PATH, "ISM",
"informe_ism_{}.csv".format(apply_date.replace("/", "-"))),
"w") as f:
f.write(final_informe)
yield {"manuals": manuals, "anulaciones": anulaciones}
def replace(self, filter, data, **kwargs):
if "file" in data:
if data["file"] == str():
files = glob.glob(
"{}*.csv".format(os.path.join(admin_config.N43_PATH_INCOMING, "BI_131_FICHERO_PARI_DIARIO")))
files.sort()
files.reverse()
data["file"] = files[0]
path, name = os.path.split(data["file"])
if ("file" in self.shelf and name > self.shelf["file"]) or "file" not in self.shelf:
self.drop(filter, **kwargs)
return self.insert(data, filter=filter)
else:
return self.fetch({}, reportes=True)
#TODO: Reenviar algo si no hay nada
def drop(self, filter, **kwargs):
if self.loaded_file is not None:
self._loaded_file = None
self.shelf.close()
for file in glob.glob("{}.*".format(self.filepath)):
os.remove(file)
self.set_shelve("c")
self.shelf.close()
self.set_shelve()
return {"data": {"pari": {"data": [],
"total": 0,
"page": 1,
"items_per_page": self.items_per_page}
},
"total": 1,
"page": 1,
"items_per_page": self.items_per_page}
def load_n43(self, data, **kwargs): #TODO: Move to Server
if isinstance(data, str): #Direct call, transform to json
data = json.loads(data)
try:
if self.loaded_file is not None and "file" in data and os.path.exists(data["file"]):
final = None
for item in self.set_n43(data["file"]):
if "eta" in item:
print("\r{0:{w}}".format(str(item["eta"]), w=79, fill=" "), end="")
else:
final = item
print()
return json.dumps({"data": final,
"headers": {"Content-Type": "text/csv"}})
else:
print(data)
except:
print("Final: {}".format(final))
raise
def insert(self, data, **kwargs):
do_export = False
do_report = False
if "filter" in kwargs:
filter = kwargs["filter"]
if "do_export" in filter:
do_export = filter["do_export"] == "1"
if "do_report" in kwargs["filter"]:
do_report = filter["do_report"] == "1"
if "file" in data:
for item in self.set_pari(data["file"], do_export=do_export, do_report=do_report):
print("\r{0:{w}}".format(str(item["eta"]), w=79, fill=" "), end="")
print()
return self.fetch({}, reportes=True)
def fetch(self, filter, **kwargs):
if not self.loaded_file:
return {"filepath": "",
"data": {"pari": {"data": [],
"total": 0,
"page": 1,
"items_per_page": self.items_per_page}
},
"total": 1,
"page": 1,
"items_per_page": self.items_per_page }
else:
if not kwargs:
main_indexes = ("id_factura", "id_cuenta", "id_cliente", "numdoc")
if self.list_data == dict() or self.filter != filter:
shelf = dict(self.shelf)
self.list_data = dict()
self.filter = filter
if "fecha_factura" in filter:
fechas_factura = filter["fecha_factura"].split(",")
filters = [filter.copy() for fecha_factura in fechas_factura]
[filters[index].update({"fecha_factura": fechas_factura[index]}) for index in range(len(filters))]
else:
filters = [filter]
template = {"numdoc": None,
"id_cliente": None,
"id_cuenta": None,
"segmento": None,
"id_factura": None,
"fecha_factura": None,
"importe_adeudado": None,
"estado_recibo": None
}
self.total_query = int()
self.ids_facturas = None
gc.collect()
if any(index in filter for index in main_indexes): #TODO: Redo
for index, id in enumerate(main_indexes):
if id in filter:
data = template.update()
try:
data.update(dict(zip(shelf[id]["_heads"],
shelf[id]["data"][filter[id]])))
except ValueError:
pass
else:
while any(data[key] is None for key in template):
for subfilter in main_indexes:
if subfilter in data and data[subfilter] is not None:
data.update(dict(zip(shelf[subfilter]["_heads"],
shelf[subfilter]["data"][data[subfilter]])))
if "facturas" in data and "id_factura" not in filter:
subdata = data.copy()
del(subdata["facturas"])
for id_factura in data["facturas"]:
subdata.update(dict(zip(shelf[subfilter]["_heads"],
shelf["id_factura"]["data"][id_factura])))
subdata = self.friend_fetch(subdata)
if any([all([filter[field] == data[field] for field in data if field in filter])
for filter in filters]):
del (subdata["facturas"])
self.list_data[self.total_query] = subdata.copy()
self.total_query += 1
else:
subdata = self.friend_fetch(data.copy())
if any([all([filter[field] == data[field] for field in data if field in filter])
for filter in filters]):
self.list_data[self.total_query] = subdata
self.total_query += 1
break
elif self.ids_facturas is None:
self.ids_facturas = set()
for filter in filters:
ids = self.all.copy()
if any(field in filter for field in ("estado_recibo", "fecha_factura", "segmentos")):
if "estado_recibo" in filter and filter["estado_recibo"] in shelf["estados"]:
ids &= shelf["index"]["estados"][filter["estado_recibo"]]
elif "estado_recibo" in filter:
ids = set()
if "fecha_factura" in filter and filter["fecha_factura"] in shelf["index"]["fecha_factura"]:
ids &= shelf["index"]["fecha_factura"][filter["fecha_factura"]]
elif "fecha_factura" in filter:
ids = set()
#if "segmento" in filter and filter["segmento"] in shelf["segmentos"]:
# ids &= shelf["index"]["segmentos"][filter["segmentos"]]
else:
ids = set()
self.ids_facturas |= ids
self.ids_facturas = list(self.ids_facturas)
self.ids_facturas.reverse() #From newer to older
self.total_query = len(self.ids_facturas)
else:
pass
if "page" in filter:
self.page = int(filter["page"])
else:
self.page = 1
if "items_per_page" in filter:
self.items_per_page = int(filter["items_per_page"])
else:
self.items_per_page = 50
len_data = len(self.list_data)
ini = (self.page - 1) * self.items_per_page
end = self.page * self.items_per_page
if self.ids_facturas is not None and self.total_query > len_data:
if end > len(self.ids_facturas):
end = len(self.ids_facturas)
for index, id_factura in enumerate(self.ids_facturas[ini:end]):
if ini+index not in self.list_data:
data = template.copy()
data["id_factura"] = id_factura
while any(data[key] is None for key in template):
for subfilter in main_indexes:
if subfilter in data and data[subfilter] is not None:
data.update(dict(zip(shelf[subfilter]["_heads"],
shelf[subfilter]["data"][data[subfilter]])))
self.list_data[ini+index] = self.friend_fetch(data.copy())
try:
del(shelf)
except UnboundLocalError:
pass
gc.collect()
if len(self.list_data) == 0:
data = []
else:
data = [self.list_data[index] for index in range(ini, end)]
return {"data": data,
"total": self.total_query,
"page": self.page,
"items_per_page": self.items_per_page}
elif "reportes" in kwargs and kwargs["reportes"] is True:
return {"data": self.shelf["reports"]}
def friend_fetch(self, data):
try:
del(data["facturas"])
except KeyError:
pass
data["fecha_factura"] = str(int.from_bytes(data["fecha_factura"], "big"))
while len(data["fecha_factura"]) < 6:
data["fecha_factura"] = "0" + data["fecha_factura"]
fecha = data["fecha_factura"]
data["fecha_factura"] = datetime.datetime.strptime(fecha, "%d%m%y").strftime("%d/%m/%y")
data["segmento"] = self.shelf["segmentos"][int.from_bytes(data["segmento"], "big")]
data["estado_recibo"] = self.shelf["estados"][int.from_bytes(data["estado_recibo"], "big")]
return data
| gpl-3.0 | -4,408,235,592,341,429,000 | 53.842926 | 149 | 0.405453 | false |
bitemyapp/ganeti | lib/objects.py | 1 | 74922 | #
#
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Transportable objects for Ganeti.
This module provides small, mostly data-only objects which are safe to
pass to and from external parties.
"""
# pylint: disable=E0203,W0201,R0902
# E0203: Access to member %r before its definition, since we use
# objects.py which doesn't explicitly initialise its members
# W0201: Attribute '%s' defined outside __init__
# R0902: Allow instances of these objects to have more than 20 attributes
import ConfigParser
import re
import copy
import logging
import time
from cStringIO import StringIO
from ganeti import errors
from ganeti import constants
from ganeti import netutils
from ganeti import outils
from ganeti import utils
from ganeti import serializer
from socket import AF_INET
__all__ = ["ConfigObject", "ConfigData", "NIC", "Disk", "Instance",
"OS", "Node", "NodeGroup", "Cluster", "FillDict", "Network",
"Filter", "Maintenance"]
_TIMESTAMPS = ["ctime", "mtime"]
_UUID = ["uuid"]
def FillDict(defaults_dict, custom_dict, skip_keys=None):
"""Basic function to apply settings on top a default dict.
@type defaults_dict: dict
@param defaults_dict: dictionary holding the default values
@type custom_dict: dict
@param custom_dict: dictionary holding customized value
@type skip_keys: list
@param skip_keys: which keys not to fill
@rtype: dict
@return: dict with the 'full' values
"""
ret_dict = copy.deepcopy(defaults_dict)
ret_dict.update(custom_dict)
if skip_keys:
for k in skip_keys:
if k in ret_dict:
del ret_dict[k]
return ret_dict
def FillIPolicy(default_ipolicy, custom_ipolicy):
"""Fills an instance policy with defaults.
"""
assert frozenset(default_ipolicy.keys()) == constants.IPOLICY_ALL_KEYS
ret_dict = copy.deepcopy(custom_ipolicy)
for key in default_ipolicy:
if key not in ret_dict:
ret_dict[key] = copy.deepcopy(default_ipolicy[key])
elif key == constants.ISPECS_STD:
ret_dict[key] = FillDict(default_ipolicy[key], ret_dict[key])
return ret_dict
def FillDiskParams(default_dparams, custom_dparams, skip_keys=None):
"""Fills the disk parameter defaults.
@see: L{FillDict} for parameters and return value
"""
return dict((dt, FillDict(default_dparams.get(dt, {}),
custom_dparams.get(dt, {}),
skip_keys=skip_keys))
for dt in constants.DISK_TEMPLATES)
def UpgradeGroupedParams(target, defaults):
"""Update all groups for the target parameter.
@type target: dict of dicts
@param target: {group: {parameter: value}}
@type defaults: dict
@param defaults: default parameter values
"""
if target is None:
target = {constants.PP_DEFAULT: defaults}
else:
for group in target:
target[group] = FillDict(defaults, target[group])
return target
def UpgradeBeParams(target):
"""Update the be parameters dict to the new format.
@type target: dict
@param target: "be" parameters dict
"""
if constants.BE_MEMORY in target:
memory = target[constants.BE_MEMORY]
target[constants.BE_MAXMEM] = memory
target[constants.BE_MINMEM] = memory
del target[constants.BE_MEMORY]
def UpgradeDiskParams(diskparams):
"""Upgrade the disk parameters.
@type diskparams: dict
@param diskparams: disk parameters to upgrade
@rtype: dict
@return: the upgraded disk parameters dict
"""
if not diskparams:
result = {}
else:
result = FillDiskParams(constants.DISK_DT_DEFAULTS, diskparams)
return result
def UpgradeNDParams(ndparams):
"""Upgrade ndparams structure.
@type ndparams: dict
@param ndparams: disk parameters to upgrade
@rtype: dict
@return: the upgraded node parameters dict
"""
if ndparams is None:
ndparams = {}
if (constants.ND_OOB_PROGRAM in ndparams and
ndparams[constants.ND_OOB_PROGRAM] is None):
# will be reset by the line below
del ndparams[constants.ND_OOB_PROGRAM]
return FillDict(constants.NDC_DEFAULTS, ndparams)
def MakeEmptyIPolicy():
"""Create empty IPolicy dictionary.
"""
return {}
class ConfigObject(outils.ValidatedSlots):
"""A generic config object.
It has the following properties:
- provides somewhat safe recursive unpickling and pickling for its classes
- unset attributes which are defined in slots are always returned
as None instead of raising an error
Classes derived from this must always declare __slots__ (we use many
config objects and the memory reduction is useful)
"""
__slots__ = []
def __getattr__(self, name):
if name not in self.GetAllSlots():
raise AttributeError("Invalid object attribute %s.%s" %
(type(self).__name__, name))
return None
def __setstate__(self, state):
slots = self.GetAllSlots()
for name in state:
if name in slots:
setattr(self, name, state[name])
def Validate(self):
"""Validates the slots.
This method returns L{None} if the validation succeeds, or raises
an exception otherwise.
This method must be implemented by the child classes.
@rtype: NoneType
@return: L{None}, if the validation succeeds
@raise Exception: validation fails
"""
def ToDict(self, _with_private=False):
"""Convert to a dict holding only standard python types.
The generic routine just dumps all of this object's attributes in
a dict. It does not work if the class has children who are
ConfigObjects themselves (e.g. the nics list in an Instance), in
which case the object should subclass the function in order to
make sure all objects returned are only standard python types.
Private fields can be included or not with the _with_private switch.
The actual implementation of this switch is left for those subclassses
with private fields to implement.
@type _with_private: bool
@param _with_private: if True, the object will leak its private fields in
the dictionary representation. If False, the values
will be replaced with None.
"""
result = {}
for name in self.GetAllSlots():
value = getattr(self, name, None)
if value is not None:
result[name] = value
return result
__getstate__ = ToDict
@classmethod
def FromDict(cls, val):
"""Create an object from a dictionary.
This generic routine takes a dict, instantiates a new instance of
the given class, and sets attributes based on the dict content.
As for `ToDict`, this does not work if the class has children
who are ConfigObjects themselves (e.g. the nics list in an
Instance), in which case the object should subclass the function
and alter the objects.
"""
if not isinstance(val, dict):
raise errors.ConfigurationError("Invalid object passed to FromDict:"
" expected dict, got %s" % type(val))
val_str = dict([(str(k), v) for k, v in val.iteritems()])
obj = cls(**val_str) # pylint: disable=W0142
return obj
def Copy(self):
"""Makes a deep copy of the current object and its children.
"""
dict_form = self.ToDict()
clone_obj = self.__class__.FromDict(dict_form)
return clone_obj
def __repr__(self):
"""Implement __repr__ for ConfigObjects."""
return repr(self.ToDict())
def __eq__(self, other):
"""Implement __eq__ for ConfigObjects."""
return isinstance(other, self.__class__) and self.ToDict() == other.ToDict()
def UpgradeConfig(self):
"""Fill defaults for missing configuration values.
This method will be called at configuration load time, and its
implementation will be object dependent.
"""
pass
class TaggableObject(ConfigObject):
"""An generic class supporting tags.
"""
__slots__ = ["tags"]
VALID_TAG_RE = re.compile(r"^[\w.+*/:@-]+$")
@classmethod
def ValidateTag(cls, tag):
"""Check if a tag is valid.
If the tag is invalid, an errors.TagError will be raised. The
function has no return value.
"""
if not isinstance(tag, basestring):
raise errors.TagError("Invalid tag type (not a string)")
if len(tag) > constants.MAX_TAG_LEN:
raise errors.TagError("Tag too long (>%d characters)" %
constants.MAX_TAG_LEN)
if not tag:
raise errors.TagError("Tags cannot be empty")
if not cls.VALID_TAG_RE.match(tag):
raise errors.TagError("Tag contains invalid characters")
def GetTags(self):
"""Return the tags list.
"""
tags = getattr(self, "tags", None)
if tags is None:
tags = self.tags = set()
return tags
def AddTag(self, tag):
"""Add a new tag.
"""
self.ValidateTag(tag)
tags = self.GetTags()
if len(tags) >= constants.MAX_TAGS_PER_OBJ:
raise errors.TagError("Too many tags")
self.GetTags().add(tag)
def RemoveTag(self, tag):
"""Remove a tag.
"""
self.ValidateTag(tag)
tags = self.GetTags()
try:
tags.remove(tag)
except KeyError:
raise errors.TagError("Tag not found")
def ToDict(self, _with_private=False):
"""Taggable-object-specific conversion to standard python types.
This replaces the tags set with a list.
"""
bo = super(TaggableObject, self).ToDict(_with_private=_with_private)
tags = bo.get("tags", None)
if isinstance(tags, set):
bo["tags"] = list(tags)
return bo
@classmethod
def FromDict(cls, val):
"""Custom function for instances.
"""
obj = super(TaggableObject, cls).FromDict(val)
if hasattr(obj, "tags") and isinstance(obj.tags, list):
obj.tags = set(obj.tags)
return obj
class MasterNetworkParameters(ConfigObject):
"""Network configuration parameters for the master
@ivar uuid: master nodes UUID
@ivar ip: master IP
@ivar netmask: master netmask
@ivar netdev: master network device
@ivar ip_family: master IP family
"""
__slots__ = [
"uuid",
"ip",
"netmask",
"netdev",
"ip_family",
]
class ConfigData(ConfigObject):
"""Top-level config object."""
__slots__ = [
"version",
"cluster",
"nodes",
"nodegroups",
"instances",
"networks",
"disks",
"filters",
"maintenance",
"serial_no",
] + _TIMESTAMPS
def ToDict(self, _with_private=False):
"""Custom function for top-level config data.
This just replaces the list of nodes, instances, nodegroups,
networks, disks and the cluster with standard python types.
"""
mydict = super(ConfigData, self).ToDict(_with_private=_with_private)
mydict["cluster"] = mydict["cluster"].ToDict()
mydict["maintenance"] = mydict["maintenance"].ToDict()
for key in ("nodes", "instances", "nodegroups", "networks", "disks",
"filters"):
mydict[key] = outils.ContainerToDicts(mydict[key])
return mydict
@classmethod
def FromDict(cls, val):
"""Custom function for top-level config data
"""
obj = super(ConfigData, cls).FromDict(val)
obj.cluster = Cluster.FromDict(obj.cluster)
obj.nodes = outils.ContainerFromDicts(obj.nodes, dict, Node)
obj.instances = \
outils.ContainerFromDicts(obj.instances, dict, Instance)
obj.nodegroups = \
outils.ContainerFromDicts(obj.nodegroups, dict, NodeGroup)
obj.networks = outils.ContainerFromDicts(obj.networks, dict, Network)
obj.disks = outils.ContainerFromDicts(obj.disks, dict, Disk)
obj.filters = outils.ContainerFromDicts(obj.filters, dict, Filter)
obj.maintenance = Maintenance.FromDict(obj.maintenance)
return obj
def DisksOfType(self, dev_type):
"""Check if in there is at disk of the given type in the configuration.
@type dev_type: L{constants.DTS_BLOCK}
@param dev_type: the type to look for
@rtype: list of disks
@return: all disks of the dev_type
"""
return [disk for disk in self.disks.values()
if disk.IsBasedOnDiskType(dev_type)]
def UpgradeConfig(self):
"""Fill defaults for missing configuration values.
"""
self.cluster.UpgradeConfig()
for node in self.nodes.values():
node.UpgradeConfig()
for instance in self.instances.values():
instance.UpgradeConfig()
self._UpgradeEnabledDiskTemplates()
if self.nodegroups is None:
self.nodegroups = {}
for nodegroup in self.nodegroups.values():
nodegroup.UpgradeConfig()
InstancePolicy.UpgradeDiskTemplates(
nodegroup.ipolicy, self.cluster.enabled_disk_templates)
if self.cluster.drbd_usermode_helper is None:
if self.cluster.IsDiskTemplateEnabled(constants.DT_DRBD8):
self.cluster.drbd_usermode_helper = constants.DEFAULT_DRBD_HELPER
if self.networks is None:
self.networks = {}
for network in self.networks.values():
network.UpgradeConfig()
for disk in self.disks.values():
disk.UpgradeConfig()
if self.filters is None:
self.filters = {}
if self.maintenance is None:
self.maintenance = Maintenance.FromDict({})
self.maintenance.UpgradeConfig()
def _UpgradeEnabledDiskTemplates(self):
"""Upgrade the cluster's enabled disk templates by inspecting the currently
enabled and/or used disk templates.
"""
if not self.cluster.enabled_disk_templates:
template_set = \
set([d.dev_type for d in self.disks.values()])
if any(not inst.disks for inst in self.instances.values()):
template_set.add(constants.DT_DISKLESS)
# Add drbd and plain, if lvm is enabled (by specifying a volume group)
if self.cluster.volume_group_name:
template_set.add(constants.DT_DRBD8)
template_set.add(constants.DT_PLAIN)
# Set enabled_disk_templates to the inferred disk templates. Order them
# according to a preference list that is based on Ganeti's history of
# supported disk templates.
self.cluster.enabled_disk_templates = []
for preferred_template in constants.DISK_TEMPLATE_PREFERENCE:
if preferred_template in template_set:
self.cluster.enabled_disk_templates.append(preferred_template)
template_set.remove(preferred_template)
self.cluster.enabled_disk_templates.extend(list(template_set))
InstancePolicy.UpgradeDiskTemplates(
self.cluster.ipolicy, self.cluster.enabled_disk_templates)
class NIC(ConfigObject):
"""Config object representing a network card."""
__slots__ = ["name", "mac", "ip", "network",
"nicparams", "netinfo", "pci"] + _UUID
@classmethod
def CheckParameterSyntax(cls, nicparams):
"""Check the given parameters for validity.
@type nicparams: dict
@param nicparams: dictionary with parameter names/value
@raise errors.ConfigurationError: when a parameter is not valid
"""
mode = nicparams[constants.NIC_MODE]
if (mode not in constants.NIC_VALID_MODES and
mode != constants.VALUE_AUTO):
raise errors.ConfigurationError("Invalid NIC mode '%s'" % mode)
if (mode == constants.NIC_MODE_BRIDGED and
not nicparams[constants.NIC_LINK]):
raise errors.ConfigurationError("Missing bridged NIC link")
class Filter(ConfigObject):
"""Config object representing a filter rule."""
__slots__ = ["watermark", "priority",
"predicates", "action", "reason_trail"] + _UUID
class Maintenance(ConfigObject):
"""Config object representing the state of the maintenance daemon"""
__slots__ = ["roundDelay", "jobs", "evacuated", "balance", "balanceThreshold",
"incidents", "serial_no"] + _TIMESTAMPS
def UpgradeConfig(self):
if self.serial_no is None:
self.serial_no = 1
if self.mtime is None:
self.mtime = time.time()
if self.ctime is None:
self.ctime = time.time()
class Disk(ConfigObject):
"""Config object representing a block device."""
__slots__ = [
"forthcoming",
"name",
"dev_type",
"logical_id",
"children",
"nodes",
"iv_name",
"size",
"mode",
"params",
"spindles",
"pci",
"serial_no",
# dynamic_params is special. It depends on the node this instance
# is sent to, and should not be persisted.
"dynamic_params"
] + _UUID + _TIMESTAMPS
def _ComputeAllNodes(self):
"""Compute the list of all nodes covered by a device and its children."""
def _Helper(nodes, device):
"""Recursively compute nodes given a top device."""
if device.dev_type in constants.DTS_DRBD:
nodes.extend(device.logical_id[:2])
if device.children:
for child in device.children:
_Helper(nodes, child)
all_nodes = list()
_Helper(all_nodes, self)
return tuple(set(all_nodes))
all_nodes = property(_ComputeAllNodes, None, None,
"List of names of all the nodes of a disk")
def CreateOnSecondary(self):
"""Test if this device needs to be created on a secondary node."""
return self.dev_type in (constants.DT_DRBD8, constants.DT_PLAIN)
def AssembleOnSecondary(self):
"""Test if this device needs to be assembled on a secondary node."""
return self.dev_type in (constants.DT_DRBD8, constants.DT_PLAIN)
def OpenOnSecondary(self):
"""Test if this device needs to be opened on a secondary node."""
return self.dev_type in (constants.DT_PLAIN,)
def SupportsSnapshots(self):
"""Test if this device supports snapshots."""
return self.dev_type in constants.DTS_SNAPSHOT_CAPABLE
def StaticDevPath(self):
"""Return the device path if this device type has a static one.
Some devices (LVM for example) live always at the same /dev/ path,
irrespective of their status. For such devices, we return this
path, for others we return None.
@warning: The path returned is not a normalized pathname; callers
should check that it is a valid path.
"""
if self.dev_type == constants.DT_PLAIN:
return "/dev/%s/%s" % (self.logical_id[0], self.logical_id[1])
elif self.dev_type == constants.DT_BLOCK:
return self.logical_id[1]
elif self.dev_type == constants.DT_RBD:
return "/dev/%s/%s" % (self.logical_id[0], self.logical_id[1])
return None
def ChildrenNeeded(self):
"""Compute the needed number of children for activation.
This method will return either -1 (all children) or a positive
number denoting the minimum number of children needed for
activation (only mirrored devices will usually return >=0).
Currently, only DRBD8 supports diskless activation (therefore we
return 0), for all other we keep the previous semantics and return
-1.
"""
if self.dev_type == constants.DT_DRBD8:
return 0
return -1
def IsBasedOnDiskType(self, dev_type):
"""Check if the disk or its children are based on the given type.
@type dev_type: L{constants.DTS_BLOCK}
@param dev_type: the type to look for
@rtype: boolean
@return: boolean indicating if a device of the given type was found or not
"""
if self.children:
for child in self.children:
if child.IsBasedOnDiskType(dev_type):
return True
return self.dev_type == dev_type
def GetNodes(self, node_uuid):
"""This function returns the nodes this device lives on.
Given the node on which the parent of the device lives on (or, in
case of a top-level device, the primary node of the devices'
instance), this function will return a list of nodes on which this
devices needs to (or can) be assembled.
"""
if self.dev_type in [constants.DT_PLAIN, constants.DT_FILE,
constants.DT_BLOCK, constants.DT_RBD,
constants.DT_EXT, constants.DT_SHARED_FILE,
constants.DT_GLUSTER]:
result = [node_uuid]
elif self.dev_type in constants.DTS_DRBD:
result = [self.logical_id[0], self.logical_id[1]]
if node_uuid not in result:
raise errors.ConfigurationError("DRBD device passed unknown node")
else:
raise errors.ProgrammerError("Unhandled device type %s" % self.dev_type)
return result
def ComputeNodeTree(self, parent_node_uuid):
"""Compute the node/disk tree for this disk and its children.
This method, given the node on which the parent disk lives, will
return the list of all (node UUID, disk) pairs which describe the disk
tree in the most compact way. For example, a drbd/lvm stack
will be returned as (primary_node, drbd) and (secondary_node, drbd)
which represents all the top-level devices on the nodes.
"""
my_nodes = self.GetNodes(parent_node_uuid)
result = [(node, self) for node in my_nodes]
if not self.children:
# leaf device
return result
for node in my_nodes:
for child in self.children:
child_result = child.ComputeNodeTree(node)
if len(child_result) == 1:
# child (and all its descendants) is simple, doesn't split
# over multiple hosts, so we don't need to describe it, our
# own entry for this node describes it completely
continue
else:
# check if child nodes differ from my nodes; note that
# subdisk can differ from the child itself, and be instead
# one of its descendants
for subnode, subdisk in child_result:
if subnode not in my_nodes:
result.append((subnode, subdisk))
# otherwise child is under our own node, so we ignore this
# entry (but probably the other results in the list will
# be different)
return result
def ComputeGrowth(self, amount):
"""Compute the per-VG growth requirements.
This only works for VG-based disks.
@type amount: integer
@param amount: the desired increase in (user-visible) disk space
@rtype: dict
@return: a dictionary of volume-groups and the required size
"""
if self.dev_type == constants.DT_PLAIN:
return {self.logical_id[0]: amount}
elif self.dev_type == constants.DT_DRBD8:
if self.children:
return self.children[0].ComputeGrowth(amount)
else:
return {}
else:
# Other disk types do not require VG space
return {}
def RecordGrow(self, amount):
"""Update the size of this disk after growth.
This method recurses over the disks's children and updates their
size correspondigly. The method needs to be kept in sync with the
actual algorithms from bdev.
"""
if self.dev_type in (constants.DT_PLAIN, constants.DT_FILE,
constants.DT_RBD, constants.DT_EXT,
constants.DT_SHARED_FILE, constants.DT_GLUSTER):
self.size += amount
elif self.dev_type == constants.DT_DRBD8:
if self.children:
self.children[0].RecordGrow(amount)
self.size += amount
else:
raise errors.ProgrammerError("Disk.RecordGrow called for unsupported"
" disk type %s" % self.dev_type)
def Update(self, size=None, mode=None, spindles=None):
"""Apply changes to size, spindles and mode.
"""
if self.dev_type == constants.DT_DRBD8:
if self.children:
self.children[0].Update(size=size, mode=mode)
else:
assert not self.children
if size is not None:
self.size = size
if mode is not None:
self.mode = mode
if spindles is not None:
self.spindles = spindles
def UnsetSize(self):
"""Sets recursively the size to zero for the disk and its children.
"""
if self.children:
for child in self.children:
child.UnsetSize()
self.size = 0
def UpdateDynamicDiskParams(self, target_node_uuid, nodes_ip):
"""Updates the dynamic disk params for the given node.
This is mainly used for drbd, which needs ip/port configuration.
Arguments:
- target_node_uuid: the node UUID we wish to configure for
- nodes_ip: a mapping of node name to ip
The target_node must exist in nodes_ip, and should be one of the
nodes in the logical ID if this device is a DRBD device.
"""
if self.children:
for child in self.children:
child.UpdateDynamicDiskParams(target_node_uuid, nodes_ip)
dyn_disk_params = {}
if self.logical_id is not None and self.dev_type in constants.DTS_DRBD:
pnode_uuid, snode_uuid, _, pminor, sminor, _ = self.logical_id
if target_node_uuid not in (pnode_uuid, snode_uuid):
# disk object is being sent to neither the primary nor the secondary
# node. reset the dynamic parameters, the target node is not
# supposed to use them.
self.dynamic_params = dyn_disk_params
return
pnode_ip = nodes_ip.get(pnode_uuid, None)
snode_ip = nodes_ip.get(snode_uuid, None)
if pnode_ip is None or snode_ip is None:
raise errors.ConfigurationError("Can't find primary or secondary node"
" for %s" % str(self))
if pnode_uuid == target_node_uuid:
dyn_disk_params[constants.DDP_LOCAL_IP] = pnode_ip
dyn_disk_params[constants.DDP_REMOTE_IP] = snode_ip
dyn_disk_params[constants.DDP_LOCAL_MINOR] = pminor
dyn_disk_params[constants.DDP_REMOTE_MINOR] = sminor
else: # it must be secondary, we tested above
dyn_disk_params[constants.DDP_LOCAL_IP] = snode_ip
dyn_disk_params[constants.DDP_REMOTE_IP] = pnode_ip
dyn_disk_params[constants.DDP_LOCAL_MINOR] = sminor
dyn_disk_params[constants.DDP_REMOTE_MINOR] = pminor
self.dynamic_params = dyn_disk_params
# pylint: disable=W0221
def ToDict(self, include_dynamic_params=False,
_with_private=False):
"""Disk-specific conversion to standard python types.
This replaces the children lists of objects with lists of
standard python types.
"""
bo = super(Disk, self).ToDict()
if not include_dynamic_params and "dynamic_params" in bo:
del bo["dynamic_params"]
for attr in ("children",):
alist = bo.get(attr, None)
if alist:
bo[attr] = outils.ContainerToDicts(alist)
return bo
@classmethod
def FromDict(cls, val):
"""Custom function for Disks
"""
obj = super(Disk, cls).FromDict(val)
if obj.children:
obj.children = outils.ContainerFromDicts(obj.children, list, Disk)
if obj.logical_id and isinstance(obj.logical_id, list):
obj.logical_id = tuple(obj.logical_id)
if obj.dev_type in constants.DTS_DRBD:
# we need a tuple of length six here
if len(obj.logical_id) < 6:
obj.logical_id += (None,) * (6 - len(obj.logical_id))
return obj
def __str__(self):
"""Custom str() formatter for disks.
"""
if self.dev_type == constants.DT_PLAIN:
val = "<LogicalVolume(/dev/%s/%s" % self.logical_id
elif self.dev_type in constants.DTS_DRBD:
node_a, node_b, port, minor_a, minor_b = self.logical_id[:5]
val = "<DRBD8("
val += ("hosts=%s/%d-%s/%d, port=%s, " %
(node_a, minor_a, node_b, minor_b, port))
if self.children and self.children.count(None) == 0:
val += "backend=%s, metadev=%s" % (self.children[0], self.children[1])
else:
val += "no local storage"
else:
val = ("<Disk(type=%s, logical_id=%s, children=%s" %
(self.dev_type, self.logical_id, self.children))
if self.iv_name is None:
val += ", not visible"
else:
val += ", visible as /dev/%s" % self.iv_name
if self.spindles is not None:
val += ", spindles=%s" % self.spindles
if isinstance(self.size, int):
val += ", size=%dm)>" % self.size
else:
val += ", size='%s')>" % (self.size,)
return val
def Verify(self):
"""Checks that this disk is correctly configured.
"""
all_errors = []
if self.mode not in constants.DISK_ACCESS_SET:
all_errors.append("Disk access mode '%s' is invalid" % (self.mode, ))
return all_errors
def UpgradeConfig(self):
"""Fill defaults for missing configuration values.
"""
if self.children:
for child in self.children:
child.UpgradeConfig()
# FIXME: Make this configurable in Ganeti 2.7
# Params should be an empty dict that gets filled any time needed
# In case of ext template we allow arbitrary params that should not
# be overrided during a config reload/upgrade.
if not self.params or not isinstance(self.params, dict):
self.params = {}
# add here config upgrade for this disk
if self.serial_no is None:
self.serial_no = 1
if self.mtime is None:
self.mtime = time.time()
if self.ctime is None:
self.ctime = time.time()
# map of legacy device types (mapping differing LD constants to new
# DT constants)
LEG_DEV_TYPE_MAP = {"lvm": constants.DT_PLAIN, "drbd8": constants.DT_DRBD8}
if self.dev_type in LEG_DEV_TYPE_MAP:
self.dev_type = LEG_DEV_TYPE_MAP[self.dev_type]
@staticmethod
def ComputeLDParams(disk_template, disk_params):
"""Computes Logical Disk parameters from Disk Template parameters.
@type disk_template: string
@param disk_template: disk template, one of L{constants.DISK_TEMPLATES}
@type disk_params: dict
@param disk_params: disk template parameters;
dict(template_name -> parameters
@rtype: list(dict)
@return: a list of dicts, one for each node of the disk hierarchy. Each dict
contains the LD parameters of the node. The tree is flattened in-order.
"""
if disk_template not in constants.DISK_TEMPLATES:
raise errors.ProgrammerError("Unknown disk template %s" % disk_template)
assert disk_template in disk_params
result = list()
dt_params = disk_params[disk_template]
if disk_template == constants.DT_DRBD8:
result.append(FillDict(constants.DISK_LD_DEFAULTS[constants.DT_DRBD8], {
constants.LDP_RESYNC_RATE: dt_params[constants.DRBD_RESYNC_RATE],
constants.LDP_BARRIERS: dt_params[constants.DRBD_DISK_BARRIERS],
constants.LDP_NO_META_FLUSH: dt_params[constants.DRBD_META_BARRIERS],
constants.LDP_DEFAULT_METAVG: dt_params[constants.DRBD_DEFAULT_METAVG],
constants.LDP_DISK_CUSTOM: dt_params[constants.DRBD_DISK_CUSTOM],
constants.LDP_NET_CUSTOM: dt_params[constants.DRBD_NET_CUSTOM],
constants.LDP_PROTOCOL: dt_params[constants.DRBD_PROTOCOL],
constants.LDP_DYNAMIC_RESYNC: dt_params[constants.DRBD_DYNAMIC_RESYNC],
constants.LDP_PLAN_AHEAD: dt_params[constants.DRBD_PLAN_AHEAD],
constants.LDP_FILL_TARGET: dt_params[constants.DRBD_FILL_TARGET],
constants.LDP_DELAY_TARGET: dt_params[constants.DRBD_DELAY_TARGET],
constants.LDP_MAX_RATE: dt_params[constants.DRBD_MAX_RATE],
constants.LDP_MIN_RATE: dt_params[constants.DRBD_MIN_RATE],
}))
# data LV
result.append(FillDict(constants.DISK_LD_DEFAULTS[constants.DT_PLAIN], {
constants.LDP_STRIPES: dt_params[constants.DRBD_DATA_STRIPES],
}))
# metadata LV
result.append(FillDict(constants.DISK_LD_DEFAULTS[constants.DT_PLAIN], {
constants.LDP_STRIPES: dt_params[constants.DRBD_META_STRIPES],
}))
else:
defaults = constants.DISK_LD_DEFAULTS[disk_template]
values = {}
for field in defaults:
values[field] = dt_params[field]
result.append(FillDict(defaults, values))
return result
class InstancePolicy(ConfigObject):
"""Config object representing instance policy limits dictionary.
Note that this object is not actually used in the config, it's just
used as a placeholder for a few functions.
"""
@classmethod
def UpgradeDiskTemplates(cls, ipolicy, enabled_disk_templates):
"""Upgrades the ipolicy configuration."""
if constants.IPOLICY_DTS in ipolicy:
if not set(ipolicy[constants.IPOLICY_DTS]).issubset(
set(enabled_disk_templates)):
ipolicy[constants.IPOLICY_DTS] = list(
set(ipolicy[constants.IPOLICY_DTS]) & set(enabled_disk_templates))
@classmethod
def CheckParameterSyntax(cls, ipolicy, check_std):
""" Check the instance policy for validity.
@type ipolicy: dict
@param ipolicy: dictionary with min/max/std specs and policies
@type check_std: bool
@param check_std: Whether to check std value or just assume compliance
@raise errors.ConfigurationError: when the policy is not legal
"""
InstancePolicy.CheckISpecSyntax(ipolicy, check_std)
if constants.IPOLICY_DTS in ipolicy:
InstancePolicy.CheckDiskTemplates(ipolicy[constants.IPOLICY_DTS])
for key in constants.IPOLICY_PARAMETERS:
if key in ipolicy:
InstancePolicy.CheckParameter(key, ipolicy[key])
wrong_keys = frozenset(ipolicy.keys()) - constants.IPOLICY_ALL_KEYS
if wrong_keys:
raise errors.ConfigurationError("Invalid keys in ipolicy: %s" %
utils.CommaJoin(wrong_keys))
@classmethod
def _CheckIncompleteSpec(cls, spec, keyname):
missing_params = constants.ISPECS_PARAMETERS - frozenset(spec.keys())
if missing_params:
msg = ("Missing instance specs parameters for %s: %s" %
(keyname, utils.CommaJoin(missing_params)))
raise errors.ConfigurationError(msg)
@classmethod
def CheckISpecSyntax(cls, ipolicy, check_std):
"""Check the instance policy specs for validity.
@type ipolicy: dict
@param ipolicy: dictionary with min/max/std specs
@type check_std: bool
@param check_std: Whether to check std value or just assume compliance
@raise errors.ConfigurationError: when specs are not valid
"""
if constants.ISPECS_MINMAX not in ipolicy:
# Nothing to check
return
if check_std and constants.ISPECS_STD not in ipolicy:
msg = "Missing key in ipolicy: %s" % constants.ISPECS_STD
raise errors.ConfigurationError(msg)
stdspec = ipolicy.get(constants.ISPECS_STD)
if check_std:
InstancePolicy._CheckIncompleteSpec(stdspec, constants.ISPECS_STD)
if not ipolicy[constants.ISPECS_MINMAX]:
raise errors.ConfigurationError("Empty minmax specifications")
std_is_good = False
for minmaxspecs in ipolicy[constants.ISPECS_MINMAX]:
missing = constants.ISPECS_MINMAX_KEYS - frozenset(minmaxspecs.keys())
if missing:
msg = "Missing instance specification: %s" % utils.CommaJoin(missing)
raise errors.ConfigurationError(msg)
for (key, spec) in minmaxspecs.items():
InstancePolicy._CheckIncompleteSpec(spec, key)
spec_std_ok = True
for param in constants.ISPECS_PARAMETERS:
par_std_ok = InstancePolicy._CheckISpecParamSyntax(minmaxspecs, stdspec,
param, check_std)
spec_std_ok = spec_std_ok and par_std_ok
std_is_good = std_is_good or spec_std_ok
if not std_is_good:
raise errors.ConfigurationError("Invalid std specifications")
@classmethod
def _CheckISpecParamSyntax(cls, minmaxspecs, stdspec, name, check_std):
"""Check the instance policy specs for validity on a given key.
We check if the instance specs makes sense for a given key, that is
if minmaxspecs[min][name] <= stdspec[name] <= minmaxspec[max][name].
@type minmaxspecs: dict
@param minmaxspecs: dictionary with min and max instance spec
@type stdspec: dict
@param stdspec: dictionary with standard instance spec
@type name: string
@param name: what are the limits for
@type check_std: bool
@param check_std: Whether to check std value or just assume compliance
@rtype: bool
@return: C{True} when specs are valid, C{False} when standard spec for the
given name is not valid
@raise errors.ConfigurationError: when min/max specs for the given name
are not valid
"""
minspec = minmaxspecs[constants.ISPECS_MIN]
maxspec = minmaxspecs[constants.ISPECS_MAX]
min_v = minspec[name]
max_v = maxspec[name]
if min_v > max_v:
err = ("Invalid specification of min/max values for %s: %s/%s" %
(name, min_v, max_v))
raise errors.ConfigurationError(err)
elif check_std:
std_v = stdspec.get(name, min_v)
return std_v >= min_v and std_v <= max_v
else:
return True
@classmethod
def CheckDiskTemplates(cls, disk_templates):
"""Checks the disk templates for validity.
"""
if not disk_templates:
raise errors.ConfigurationError("Instance policy must contain" +
" at least one disk template")
wrong = frozenset(disk_templates).difference(constants.DISK_TEMPLATES)
if wrong:
raise errors.ConfigurationError("Invalid disk template(s) %s" %
utils.CommaJoin(wrong))
@classmethod
def CheckParameter(cls, key, value):
"""Checks a parameter.
Currently we expect all parameters to be float values.
"""
try:
float(value)
except (TypeError, ValueError), err:
raise errors.ConfigurationError("Invalid value for key" " '%s':"
" '%s', error: %s" % (key, value, err))
def GetOSImage(osparams):
"""Gets the OS image value from the OS parameters.
@type osparams: L{dict} or NoneType
@param osparams: OS parameters or None
@rtype: string or NoneType
@return:
value of OS image contained in OS parameters, or None if the OS
parameters are None or the OS parameters do not contain an OS
image
"""
if osparams is None:
return None
else:
return osparams.get("os-image", None)
def PutOSImage(osparams, os_image):
"""Update OS image value in the OS parameters
@type osparams: L{dict}
@param osparams: OS parameters
@type os_image: string
@param os_image: OS image
@rtype: NoneType
@return: None
"""
osparams["os-image"] = os_image
class Instance(TaggableObject):
"""Config object representing an instance."""
__slots__ = [
"forthcoming",
"name",
"primary_node",
"secondary_nodes",
"os",
"hypervisor",
"hvparams",
"beparams",
"osparams",
"osparams_private",
"admin_state",
"admin_state_source",
"nics",
"disks",
"disks_info",
"disk_template",
"disks_active",
"network_port",
"serial_no",
] + _TIMESTAMPS + _UUID
def FindDisk(self, idx):
"""Find a disk given having a specified index.
This is just a wrapper that does validation of the index.
@type idx: int
@param idx: the disk index
@rtype: string
@return: the corresponding disk's uuid
@raise errors.OpPrereqError: when the given index is not valid
"""
try:
idx = int(idx)
return self.disks[idx]
except (TypeError, ValueError), err:
raise errors.OpPrereqError("Invalid disk index: '%s'" % str(err),
errors.ECODE_INVAL)
except IndexError:
raise errors.OpPrereqError("Invalid disk index: %d (instace has disks"
" 0 to %d" % (idx, len(self.disks) - 1),
errors.ECODE_INVAL)
def ToDict(self, _with_private=False):
"""Instance-specific conversion to standard python types.
This replaces the children lists of objects with lists of standard
python types.
"""
bo = super(Instance, self).ToDict(_with_private=_with_private)
if _with_private:
bo["osparams_private"] = self.osparams_private.Unprivate()
for attr in "nics", :
alist = bo.get(attr, None)
if alist:
nlist = outils.ContainerToDicts(alist)
else:
nlist = []
bo[attr] = nlist
if 'disk_template' in bo:
del bo['disk_template']
return bo
@classmethod
def FromDict(cls, val):
"""Custom function for instances.
"""
if "admin_state" not in val:
if val.get("admin_up", False):
val["admin_state"] = constants.ADMINST_UP
else:
val["admin_state"] = constants.ADMINST_DOWN
if "admin_up" in val:
del val["admin_up"]
obj = super(Instance, cls).FromDict(val)
obj.nics = outils.ContainerFromDicts(obj.nics, list, NIC)
# attribute 'disks_info' is only present when deserializing from a RPC
# call in the backend
disks_info = getattr(obj, "disks_info", None)
if disks_info:
obj.disks_info = outils.ContainerFromDicts(disks_info, list, Disk)
return obj
def UpgradeConfig(self):
"""Fill defaults for missing configuration values.
"""
if self.admin_state_source is None:
self.admin_state_source = constants.ADMIN_SOURCE
for nic in self.nics:
nic.UpgradeConfig()
if self.disks is None:
self.disks = []
if self.hvparams:
for key in constants.HVC_GLOBALS:
try:
del self.hvparams[key]
except KeyError:
pass
if self.osparams is None:
self.osparams = {}
if self.osparams_private is None:
self.osparams_private = serializer.PrivateDict()
UpgradeBeParams(self.beparams)
if self.disks_active is None:
self.disks_active = self.admin_state == constants.ADMINST_UP
class OS(ConfigObject):
"""Config object representing an operating system.
@type supported_parameters: list
@ivar supported_parameters: a list of tuples, name and description,
containing the supported parameters by this OS
@type VARIANT_DELIM: string
@cvar VARIANT_DELIM: the variant delimiter
"""
__slots__ = [
"name",
"path",
"api_versions",
"create_script",
"create_script_untrusted",
"export_script",
"import_script",
"rename_script",
"verify_script",
"supported_variants",
"supported_parameters",
]
VARIANT_DELIM = "+"
@classmethod
def SplitNameVariant(cls, name):
"""Splits the name into the proper name and variant.
@param name: the OS (unprocessed) name
@rtype: list
@return: a list of two elements; if the original name didn't
contain a variant, it's returned as an empty string
"""
nv = name.split(cls.VARIANT_DELIM, 1)
if len(nv) == 1:
nv.append("")
return nv
@classmethod
def GetName(cls, name):
"""Returns the proper name of the os (without the variant).
@param name: the OS (unprocessed) name
"""
return cls.SplitNameVariant(name)[0]
@classmethod
def GetVariant(cls, name):
"""Returns the variant the os (without the base name).
@param name: the OS (unprocessed) name
"""
return cls.SplitNameVariant(name)[1]
def IsTrusted(self):
"""Returns whether this OS is trusted.
@rtype: bool
@return: L{True} if this OS is trusted, L{False} otherwise
"""
return not self.create_script_untrusted
class ExtStorage(ConfigObject):
"""Config object representing an External Storage Provider.
"""
__slots__ = [
"name",
"path",
"create_script",
"remove_script",
"grow_script",
"attach_script",
"detach_script",
"setinfo_script",
"verify_script",
"snapshot_script",
"open_script",
"close_script",
"supported_parameters",
]
class NodeHvState(ConfigObject):
"""Hypvervisor state on a node.
@ivar mem_total: Total amount of memory
@ivar mem_node: Memory used by, or reserved for, the node itself (not always
available)
@ivar mem_hv: Memory used by hypervisor or lost due to instance allocation
rounding
@ivar mem_inst: Memory used by instances living on node
@ivar cpu_total: Total node CPU core count
@ivar cpu_node: Number of CPU cores reserved for the node itself
"""
__slots__ = [
"mem_total",
"mem_node",
"mem_hv",
"mem_inst",
"cpu_total",
"cpu_node",
] + _TIMESTAMPS
class NodeDiskState(ConfigObject):
"""Disk state on a node.
"""
__slots__ = [
"total",
"reserved",
"overhead",
] + _TIMESTAMPS
class Node(TaggableObject):
"""Config object representing a node.
@ivar hv_state: Hypervisor state (e.g. number of CPUs)
@ivar hv_state_static: Hypervisor state overriden by user
@ivar disk_state: Disk state (e.g. free space)
@ivar disk_state_static: Disk state overriden by user
"""
__slots__ = [
"name",
"primary_ip",
"secondary_ip",
"serial_no",
"master_candidate",
"offline",
"drained",
"group",
"master_capable",
"vm_capable",
"ndparams",
"powered",
"hv_state",
"hv_state_static",
"disk_state",
"disk_state_static",
] + _TIMESTAMPS + _UUID
def UpgradeConfig(self):
"""Fill defaults for missing configuration values.
"""
# pylint: disable=E0203
# because these are "defined" via slots, not manually
if self.master_capable is None:
self.master_capable = True
if self.vm_capable is None:
self.vm_capable = True
if self.ndparams is None:
self.ndparams = {}
# And remove any global parameter
for key in constants.NDC_GLOBALS:
if key in self.ndparams:
logging.warning("Ignoring %s node parameter for node %s",
key, self.name)
del self.ndparams[key]
if self.powered is None:
self.powered = True
def ToDict(self, _with_private=False):
"""Custom function for serializing.
"""
data = super(Node, self).ToDict(_with_private=_with_private)
hv_state = data.get("hv_state", None)
if hv_state is not None:
data["hv_state"] = outils.ContainerToDicts(hv_state)
disk_state = data.get("disk_state", None)
if disk_state is not None:
data["disk_state"] = \
dict((key, outils.ContainerToDicts(value))
for (key, value) in disk_state.items())
return data
@classmethod
def FromDict(cls, val):
"""Custom function for deserializing.
"""
obj = super(Node, cls).FromDict(val)
if obj.hv_state is not None:
obj.hv_state = \
outils.ContainerFromDicts(obj.hv_state, dict, NodeHvState)
if obj.disk_state is not None:
obj.disk_state = \
dict((key, outils.ContainerFromDicts(value, dict, NodeDiskState))
for (key, value) in obj.disk_state.items())
return obj
class NodeGroup(TaggableObject):
"""Config object representing a node group."""
__slots__ = [
"name",
"members",
"ndparams",
"diskparams",
"ipolicy",
"serial_no",
"hv_state_static",
"disk_state_static",
"alloc_policy",
"networks",
] + _TIMESTAMPS + _UUID
def ToDict(self, _with_private=False):
"""Custom function for nodegroup.
This discards the members object, which gets recalculated and is only kept
in memory.
"""
mydict = super(NodeGroup, self).ToDict(_with_private=_with_private)
del mydict["members"]
return mydict
@classmethod
def FromDict(cls, val):
"""Custom function for nodegroup.
The members slot is initialized to an empty list, upon deserialization.
"""
obj = super(NodeGroup, cls).FromDict(val)
obj.members = []
return obj
def UpgradeConfig(self):
"""Fill defaults for missing configuration values.
"""
if self.ndparams is None:
self.ndparams = {}
if self.serial_no is None:
self.serial_no = 1
if self.alloc_policy is None:
self.alloc_policy = constants.ALLOC_POLICY_PREFERRED
# We only update mtime, and not ctime, since we would not be able
# to provide a correct value for creation time.
if self.mtime is None:
self.mtime = time.time()
if self.diskparams is None:
self.diskparams = {}
if self.ipolicy is None:
self.ipolicy = MakeEmptyIPolicy()
if self.networks is None:
self.networks = {}
for network, netparams in self.networks.items():
self.networks[network] = FillDict(constants.NICC_DEFAULTS, netparams)
def FillND(self, node):
"""Return filled out ndparams for L{objects.Node}
@type node: L{objects.Node}
@param node: A Node object to fill
@return a copy of the node's ndparams with defaults filled
"""
return self.SimpleFillND(node.ndparams)
def SimpleFillND(self, ndparams):
"""Fill a given ndparams dict with defaults.
@type ndparams: dict
@param ndparams: the dict to fill
@rtype: dict
@return: a copy of the passed in ndparams with missing keys filled
from the node group defaults
"""
return FillDict(self.ndparams, ndparams)
class Cluster(TaggableObject):
"""Config object representing the cluster."""
__slots__ = [
"serial_no",
"rsahostkeypub",
"dsahostkeypub",
"highest_used_port",
"tcpudp_port_pool",
"mac_prefix",
"volume_group_name",
"reserved_lvs",
"drbd_usermode_helper",
"default_bridge",
"default_hypervisor",
"master_node",
"master_ip",
"master_netdev",
"master_netmask",
"use_external_mip_script",
"cluster_name",
"file_storage_dir",
"shared_file_storage_dir",
"gluster_storage_dir",
"enabled_hypervisors",
"hvparams",
"ipolicy",
"os_hvp",
"beparams",
"osparams",
"osparams_private_cluster",
"nicparams",
"ndparams",
"diskparams",
"candidate_pool_size",
"modify_etc_hosts",
"modify_ssh_setup",
"maintain_node_health",
"uid_pool",
"default_iallocator",
"default_iallocator_params",
"hidden_os",
"blacklisted_os",
"primary_ip_family",
"prealloc_wipe_disks",
"hv_state_static",
"disk_state_static",
"enabled_disk_templates",
"candidate_certs",
"max_running_jobs",
"max_tracked_jobs",
"install_image",
"instance_communication_network",
"zeroing_image",
"compression_tools",
"enabled_user_shutdown",
"data_collectors",
"diagnose_data_collector_filename",
] + _TIMESTAMPS + _UUID
def UpgradeConfig(self):
"""Fill defaults for missing configuration values.
"""
# pylint: disable=E0203
# because these are "defined" via slots, not manually
if self.hvparams is None:
self.hvparams = constants.HVC_DEFAULTS
else:
for hypervisor in constants.HYPER_TYPES:
try:
existing_params = self.hvparams[hypervisor]
except KeyError:
existing_params = {}
self.hvparams[hypervisor] = FillDict(
constants.HVC_DEFAULTS[hypervisor], existing_params)
if self.os_hvp is None:
self.os_hvp = {}
if self.osparams is None:
self.osparams = {}
# osparams_private_cluster added in 2.12
if self.osparams_private_cluster is None:
self.osparams_private_cluster = {}
self.ndparams = UpgradeNDParams(self.ndparams)
self.beparams = UpgradeGroupedParams(self.beparams,
constants.BEC_DEFAULTS)
for beparams_group in self.beparams:
UpgradeBeParams(self.beparams[beparams_group])
migrate_default_bridge = not self.nicparams
self.nicparams = UpgradeGroupedParams(self.nicparams,
constants.NICC_DEFAULTS)
if migrate_default_bridge:
self.nicparams[constants.PP_DEFAULT][constants.NIC_LINK] = \
self.default_bridge
if self.modify_etc_hosts is None:
self.modify_etc_hosts = True
if self.modify_ssh_setup is None:
self.modify_ssh_setup = True
# default_bridge is no longer used in 2.1. The slot is left there to
# support auto-upgrading. It can be removed once we decide to deprecate
# upgrading straight from 2.0.
if self.default_bridge is not None:
self.default_bridge = None
# default_hypervisor is just the first enabled one in 2.1. This slot and
# code can be removed once upgrading straight from 2.0 is deprecated.
if self.default_hypervisor is not None:
self.enabled_hypervisors = ([self.default_hypervisor] +
[hvname for hvname in self.enabled_hypervisors
if hvname != self.default_hypervisor])
self.default_hypervisor = None
# maintain_node_health added after 2.1.1
if self.maintain_node_health is None:
self.maintain_node_health = False
if self.uid_pool is None:
self.uid_pool = []
if self.default_iallocator is None:
self.default_iallocator = ""
if self.default_iallocator_params is None:
self.default_iallocator_params = {}
# reserved_lvs added before 2.2
if self.reserved_lvs is None:
self.reserved_lvs = []
# hidden and blacklisted operating systems added before 2.2.1
if self.hidden_os is None:
self.hidden_os = []
if self.blacklisted_os is None:
self.blacklisted_os = []
# primary_ip_family added before 2.3
if self.primary_ip_family is None:
self.primary_ip_family = AF_INET
if self.master_netmask is None:
ipcls = netutils.IPAddress.GetClassFromIpFamily(self.primary_ip_family)
self.master_netmask = ipcls.iplen
if self.prealloc_wipe_disks is None:
self.prealloc_wipe_disks = False
# shared_file_storage_dir added before 2.5
if self.shared_file_storage_dir is None:
self.shared_file_storage_dir = ""
# gluster_storage_dir added in 2.11
if self.gluster_storage_dir is None:
self.gluster_storage_dir = ""
if self.use_external_mip_script is None:
self.use_external_mip_script = False
if self.diskparams:
self.diskparams = UpgradeDiskParams(self.diskparams)
else:
self.diskparams = constants.DISK_DT_DEFAULTS.copy()
# instance policy added before 2.6
if self.ipolicy is None:
self.ipolicy = FillIPolicy(constants.IPOLICY_DEFAULTS, {})
else:
# we can either make sure to upgrade the ipolicy always, or only
# do it in some corner cases (e.g. missing keys); note that this
# will break any removal of keys from the ipolicy dict
wrongkeys = frozenset(self.ipolicy.keys()) - constants.IPOLICY_ALL_KEYS
if wrongkeys:
# These keys would be silently removed by FillIPolicy()
msg = ("Cluster instance policy contains spurious keys: %s" %
utils.CommaJoin(wrongkeys))
raise errors.ConfigurationError(msg)
self.ipolicy = FillIPolicy(constants.IPOLICY_DEFAULTS, self.ipolicy)
# hv_state_static added in 2.7
if self.hv_state_static is None:
self.hv_state_static = {}
if self.disk_state_static is None:
self.disk_state_static = {}
if self.candidate_certs is None:
self.candidate_certs = {}
if self.max_running_jobs is None:
self.max_running_jobs = constants.LUXID_MAXIMAL_RUNNING_JOBS_DEFAULT
if self.max_tracked_jobs is None:
self.max_tracked_jobs = constants.LUXID_MAXIMAL_TRACKED_JOBS_DEFAULT
if self.instance_communication_network is None:
self.instance_communication_network = ""
if self.install_image is None:
self.install_image = ""
if self.compression_tools is None:
self.compression_tools = constants.IEC_DEFAULT_TOOLS
if self.enabled_user_shutdown is None:
self.enabled_user_shutdown = False
@property
def primary_hypervisor(self):
"""The first hypervisor is the primary.
Useful, for example, for L{Node}'s hv/disk state.
"""
return self.enabled_hypervisors[0]
def ToDict(self, _with_private=False):
"""Custom function for cluster.
"""
mydict = super(Cluster, self).ToDict(_with_private=_with_private)
# Explicitly save private parameters.
if _with_private:
for os in mydict["osparams_private_cluster"]:
mydict["osparams_private_cluster"][os] = \
self.osparams_private_cluster[os].Unprivate()
if self.tcpudp_port_pool is None:
tcpudp_port_pool = []
else:
tcpudp_port_pool = list(self.tcpudp_port_pool)
mydict["tcpudp_port_pool"] = tcpudp_port_pool
return mydict
@classmethod
def FromDict(cls, val):
"""Custom function for cluster.
"""
obj = super(Cluster, cls).FromDict(val)
if obj.tcpudp_port_pool is None:
obj.tcpudp_port_pool = set()
elif not isinstance(obj.tcpudp_port_pool, set):
obj.tcpudp_port_pool = set(obj.tcpudp_port_pool)
return obj
def SimpleFillDP(self, diskparams):
"""Fill a given diskparams dict with cluster defaults.
@param diskparams: The diskparams
@return: The defaults dict
"""
return FillDiskParams(self.diskparams, diskparams)
def GetHVDefaults(self, hypervisor, os_name=None, skip_keys=None):
"""Get the default hypervisor parameters for the cluster.
@param hypervisor: the hypervisor name
@param os_name: if specified, we'll also update the defaults for this OS
@param skip_keys: if passed, list of keys not to use
@return: the defaults dict
"""
if skip_keys is None:
skip_keys = []
fill_stack = [self.hvparams.get(hypervisor, {})]
if os_name is not None:
os_hvp = self.os_hvp.get(os_name, {}).get(hypervisor, {})
fill_stack.append(os_hvp)
ret_dict = {}
for o_dict in fill_stack:
ret_dict = FillDict(ret_dict, o_dict, skip_keys=skip_keys)
return ret_dict
def SimpleFillHV(self, hv_name, os_name, hvparams, skip_globals=False):
"""Fill a given hvparams dict with cluster defaults.
@type hv_name: string
@param hv_name: the hypervisor to use
@type os_name: string
@param os_name: the OS to use for overriding the hypervisor defaults
@type skip_globals: boolean
@param skip_globals: if True, the global hypervisor parameters will
not be filled
@rtype: dict
@return: a copy of the given hvparams with missing keys filled from
the cluster defaults
"""
if skip_globals:
skip_keys = constants.HVC_GLOBALS
else:
skip_keys = []
def_dict = self.GetHVDefaults(hv_name, os_name, skip_keys=skip_keys)
return FillDict(def_dict, hvparams, skip_keys=skip_keys)
def FillHV(self, instance, skip_globals=False):
"""Fill an instance's hvparams dict with cluster defaults.
@type instance: L{objects.Instance}
@param instance: the instance parameter to fill
@type skip_globals: boolean
@param skip_globals: if True, the global hypervisor parameters will
not be filled
@rtype: dict
@return: a copy of the instance's hvparams with missing keys filled from
the cluster defaults
"""
return self.SimpleFillHV(instance.hypervisor, instance.os,
instance.hvparams, skip_globals)
def SimpleFillBE(self, beparams):
"""Fill a given beparams dict with cluster defaults.
@type beparams: dict
@param beparams: the dict to fill
@rtype: dict
@return: a copy of the passed in beparams with missing keys filled
from the cluster defaults
"""
return FillDict(self.beparams.get(constants.PP_DEFAULT, {}), beparams)
def FillBE(self, instance):
"""Fill an instance's beparams dict with cluster defaults.
@type instance: L{objects.Instance}
@param instance: the instance parameter to fill
@rtype: dict
@return: a copy of the instance's beparams with missing keys filled from
the cluster defaults
"""
return self.SimpleFillBE(instance.beparams)
def SimpleFillNIC(self, nicparams):
"""Fill a given nicparams dict with cluster defaults.
@type nicparams: dict
@param nicparams: the dict to fill
@rtype: dict
@return: a copy of the passed in nicparams with missing keys filled
from the cluster defaults
"""
return FillDict(self.nicparams.get(constants.PP_DEFAULT, {}), nicparams)
def SimpleFillOS(self, os_name,
os_params_public,
os_params_private=None,
os_params_secret=None):
"""Fill an instance's osparams dict with cluster defaults.
@type os_name: string
@param os_name: the OS name to use
@type os_params_public: dict
@param os_params_public: the dict to fill with default values
@type os_params_private: dict
@param os_params_private: the dict with private fields to fill
with default values. Not passing this field
results in no private fields being added to the
return value. Private fields will be wrapped in
L{Private} objects.
@type os_params_secret: dict
@param os_params_secret: the dict with secret fields to fill
with default values. Not passing this field
results in no secret fields being added to the
return value. Private fields will be wrapped in
L{Private} objects.
@rtype: dict
@return: a copy of the instance's osparams with missing keys filled from
the cluster defaults. Private and secret parameters are not included
unless the respective optional parameters are supplied.
"""
if os_name is None:
name_only = None
else:
name_only = OS.GetName(os_name)
defaults_base_public = self.osparams.get(name_only, {})
defaults_public = FillDict(defaults_base_public,
self.osparams.get(os_name, {}))
params_public = FillDict(defaults_public, os_params_public)
if os_params_private is not None:
defaults_base_private = self.osparams_private_cluster.get(name_only, {})
defaults_private = FillDict(defaults_base_private,
self.osparams_private_cluster.get(os_name,
{}))
params_private = FillDict(defaults_private, os_params_private)
else:
params_private = {}
if os_params_secret is not None:
# There can't be default secret settings, so there's nothing to be done.
params_secret = os_params_secret
else:
params_secret = {}
# Enforce that the set of keys be distinct:
duplicate_keys = utils.GetRepeatedKeys(params_public,
params_private,
params_secret)
if not duplicate_keys:
# Actually update them:
params_public.update(params_private)
params_public.update(params_secret)
return params_public
else:
def formatter(keys):
return utils.CommaJoin(sorted(map(repr, keys))) if keys else "(none)"
#Lose the values.
params_public = set(params_public)
params_private = set(params_private)
params_secret = set(params_secret)
msg = """Cannot assign multiple values to OS parameters.
Conflicting OS parameters that would have been set by this operation:
- at public visibility: {public}
- at private visibility: {private}
- at secret visibility: {secret}
""".format(dupes=formatter(duplicate_keys),
public=formatter(params_public & duplicate_keys),
private=formatter(params_private & duplicate_keys),
secret=formatter(params_secret & duplicate_keys))
raise errors.OpPrereqError(msg)
@staticmethod
def SimpleFillHvState(hv_state):
"""Fill an hv_state sub dict with cluster defaults.
"""
return FillDict(constants.HVST_DEFAULTS, hv_state)
@staticmethod
def SimpleFillDiskState(disk_state):
"""Fill an disk_state sub dict with cluster defaults.
"""
return FillDict(constants.DS_DEFAULTS, disk_state)
def FillND(self, node, nodegroup):
"""Return filled out ndparams for L{objects.NodeGroup} and L{objects.Node}
@type node: L{objects.Node}
@param node: A Node object to fill
@type nodegroup: L{objects.NodeGroup}
@param nodegroup: A Node object to fill
@return a copy of the node's ndparams with defaults filled
"""
return self.SimpleFillND(nodegroup.FillND(node))
def FillNDGroup(self, nodegroup):
"""Return filled out ndparams for just L{objects.NodeGroup}
@type nodegroup: L{objects.NodeGroup}
@param nodegroup: A Node object to fill
@return a copy of the node group's ndparams with defaults filled
"""
return self.SimpleFillND(nodegroup.SimpleFillND({}))
def SimpleFillND(self, ndparams):
"""Fill a given ndparams dict with defaults.
@type ndparams: dict
@param ndparams: the dict to fill
@rtype: dict
@return: a copy of the passed in ndparams with missing keys filled
from the cluster defaults
"""
return FillDict(self.ndparams, ndparams)
def SimpleFillIPolicy(self, ipolicy):
""" Fill instance policy dict with defaults.
@type ipolicy: dict
@param ipolicy: the dict to fill
@rtype: dict
@return: a copy of passed ipolicy with missing keys filled from
the cluster defaults
"""
return FillIPolicy(self.ipolicy, ipolicy)
def IsDiskTemplateEnabled(self, disk_template):
"""Checks if a particular disk template is enabled.
"""
return utils.storage.IsDiskTemplateEnabled(
disk_template, self.enabled_disk_templates)
def IsFileStorageEnabled(self):
"""Checks if file storage is enabled.
"""
return utils.storage.IsFileStorageEnabled(self.enabled_disk_templates)
def IsSharedFileStorageEnabled(self):
"""Checks if shared file storage is enabled.
"""
return utils.storage.IsSharedFileStorageEnabled(
self.enabled_disk_templates)
class BlockDevStatus(ConfigObject):
"""Config object representing the status of a block device."""
__slots__ = [
"dev_path",
"major",
"minor",
"sync_percent",
"estimated_time",
"is_degraded",
"ldisk_status",
]
class ImportExportStatus(ConfigObject):
"""Config object representing the status of an import or export."""
__slots__ = [
"recent_output",
"listen_port",
"connected",
"progress_mbytes",
"progress_throughput",
"progress_eta",
"progress_percent",
"exit_status",
"error_message",
] + _TIMESTAMPS
class ImportExportOptions(ConfigObject):
"""Options for import/export daemon
@ivar key_name: X509 key name (None for cluster certificate)
@ivar ca_pem: Remote peer CA in PEM format (None for cluster certificate)
@ivar compress: Compression tool to use
@ivar magic: Used to ensure the connection goes to the right disk
@ivar ipv6: Whether to use IPv6
@ivar connect_timeout: Number of seconds for establishing connection
"""
__slots__ = [
"key_name",
"ca_pem",
"compress",
"magic",
"ipv6",
"connect_timeout",
]
class ConfdRequest(ConfigObject):
"""Object holding a confd request.
@ivar protocol: confd protocol version
@ivar type: confd query type
@ivar query: query request
@ivar rsalt: requested reply salt
"""
__slots__ = [
"protocol",
"type",
"query",
"rsalt",
]
class ConfdReply(ConfigObject):
"""Object holding a confd reply.
@ivar protocol: confd protocol version
@ivar status: reply status code (ok, error)
@ivar answer: confd query reply
@ivar serial: configuration serial number
"""
__slots__ = [
"protocol",
"status",
"answer",
"serial",
]
class QueryFieldDefinition(ConfigObject):
"""Object holding a query field definition.
@ivar name: Field name
@ivar title: Human-readable title
@ivar kind: Field type
@ivar doc: Human-readable description
"""
__slots__ = [
"name",
"title",
"kind",
"doc",
]
class _QueryResponseBase(ConfigObject):
__slots__ = [
"fields",
]
def ToDict(self, _with_private=False):
"""Custom function for serializing.
"""
mydict = super(_QueryResponseBase, self).ToDict()
mydict["fields"] = outils.ContainerToDicts(mydict["fields"])
return mydict
@classmethod
def FromDict(cls, val):
"""Custom function for de-serializing.
"""
obj = super(_QueryResponseBase, cls).FromDict(val)
obj.fields = \
outils.ContainerFromDicts(obj.fields, list, QueryFieldDefinition)
return obj
class QueryResponse(_QueryResponseBase):
"""Object holding the response to a query.
@ivar fields: List of L{QueryFieldDefinition} objects
@ivar data: Requested data
"""
__slots__ = [
"data",
]
class QueryFieldsRequest(ConfigObject):
"""Object holding a request for querying available fields.
"""
__slots__ = [
"what",
"fields",
]
class QueryFieldsResponse(_QueryResponseBase):
"""Object holding the response to a query for fields.
@ivar fields: List of L{QueryFieldDefinition} objects
"""
__slots__ = []
class MigrationStatus(ConfigObject):
"""Object holding the status of a migration.
"""
__slots__ = [
"status",
"transferred_ram",
"total_ram",
]
class InstanceConsole(ConfigObject):
"""Object describing how to access the console of an instance.
"""
__slots__ = [
"instance",
"kind",
"message",
"host",
"port",
"user",
"command",
"display",
]
def Validate(self):
"""Validates contents of this object.
"""
assert self.kind in constants.CONS_ALL, "Unknown console type"
assert self.instance, "Missing instance name"
assert self.message or self.kind in [constants.CONS_SSH,
constants.CONS_SPICE,
constants.CONS_VNC]
assert self.host or self.kind == constants.CONS_MESSAGE
assert self.port or self.kind in [constants.CONS_MESSAGE,
constants.CONS_SSH]
assert self.user or self.kind in [constants.CONS_MESSAGE,
constants.CONS_SPICE,
constants.CONS_VNC]
assert self.command or self.kind in [constants.CONS_MESSAGE,
constants.CONS_SPICE,
constants.CONS_VNC]
assert self.display or self.kind in [constants.CONS_MESSAGE,
constants.CONS_SPICE,
constants.CONS_SSH]
class Network(TaggableObject):
"""Object representing a network definition for ganeti.
"""
__slots__ = [
"name",
"serial_no",
"mac_prefix",
"network",
"network6",
"gateway",
"gateway6",
"reservations",
"ext_reservations",
] + _TIMESTAMPS + _UUID
def HooksDict(self, prefix=""):
"""Export a dictionary used by hooks with a network's information.
@type prefix: String
@param prefix: Prefix to prepend to the dict entries
"""
result = {
"%sNETWORK_NAME" % prefix: self.name,
"%sNETWORK_UUID" % prefix: self.uuid,
"%sNETWORK_TAGS" % prefix: " ".join(self.GetTags()),
}
if self.network:
result["%sNETWORK_SUBNET" % prefix] = self.network
if self.gateway:
result["%sNETWORK_GATEWAY" % prefix] = self.gateway
if self.network6:
result["%sNETWORK_SUBNET6" % prefix] = self.network6
if self.gateway6:
result["%sNETWORK_GATEWAY6" % prefix] = self.gateway6
if self.mac_prefix:
result["%sNETWORK_MAC_PREFIX" % prefix] = self.mac_prefix
return result
@classmethod
def FromDict(cls, val):
"""Custom function for networks.
Remove deprecated network_type and family.
"""
if "network_type" in val:
del val["network_type"]
if "family" in val:
del val["family"]
obj = super(Network, cls).FromDict(val)
return obj
# need to inherit object in order to use super()
class SerializableConfigParser(ConfigParser.SafeConfigParser, object):
"""Simple wrapper over ConfigParse that allows serialization.
This class is basically ConfigParser.SafeConfigParser with two
additional methods that allow it to serialize/unserialize to/from a
buffer.
"""
def Dumps(self):
"""Dump this instance and return the string representation."""
buf = StringIO()
self.write(buf)
return buf.getvalue()
@classmethod
def Loads(cls, data):
"""Load data from a string."""
buf = StringIO(data)
cfp = cls()
cfp.readfp(buf)
return cfp
def get(self, section, option, **kwargs):
value = None
try:
value = super(SerializableConfigParser, self).get(section, option,
**kwargs)
if value.lower() == constants.VALUE_NONE:
value = None
except ConfigParser.NoOptionError:
r = re.compile(r"(disk|nic)\d+_name|nic\d+_(network|vlan)")
match = r.match(option)
if match:
pass
else:
raise
return value
class LvmPvInfo(ConfigObject):
"""Information about an LVM physical volume (PV).
@type name: string
@ivar name: name of the PV
@type vg_name: string
@ivar vg_name: name of the volume group containing the PV
@type size: float
@ivar size: size of the PV in MiB
@type free: float
@ivar free: free space in the PV, in MiB
@type attributes: string
@ivar attributes: PV attributes
@type lv_list: list of strings
@ivar lv_list: names of the LVs hosted on the PV
"""
__slots__ = [
"name",
"vg_name",
"size",
"free",
"attributes",
"lv_list"
]
def IsEmpty(self):
"""Is this PV empty?
"""
return self.size <= (self.free + 1)
def IsAllocatable(self):
"""Is this PV allocatable?
"""
return ("a" in self.attributes)
| bsd-2-clause | 6,913,690,987,732,189,000 | 29.13757 | 80 | 0.650476 | false |
mostaphaRoudsari/Honeybee | src/Honeybee_Load OpenStudio Measure.py | 1 | 15679 | #
# Honeybee: A Plugin for Environmental Analysis (GPL) started by Mostapha Sadeghipour Roudsari
#
# This file is part of Honeybee.
#
# Copyright (c) 2013-2020, Mostapha Sadeghipour Roudsari <[email protected]>
# Honeybee is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published
# by the Free Software Foundation; either version 3 of the License,
# or (at your option) any later version.
#
# Honeybee is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Honeybee; If not, see <http://www.gnu.org/licenses/>.
#
# @license GPL-3.0+ <http://spdx.org/licenses/GPL-3.0+>
"""
This component loads OpenStudio measureds into Honeybee. The measure can be applied to an OpenStudio model.
Read more about OpenStudio measures here: http://nrel.github.io/OpenStudio-user-documentation/reference/measure_writing_guide/
You can download several measures from here: https://bcl.nrel.gov/nrel/types/measure
-
Provided by Honeybee 0.0.66
Args:
_OSMeasure: Path to measure directory [NOT THE FILE]. This input will be removed once measure is loaded
Returns:
OSMeasure: Loaded OpenStudio measure
"""
ghenv.Component.Name = "Honeybee_Load OpenStudio Measure"
ghenv.Component.NickName = 'importOSMeasure'
ghenv.Component.Message = 'VER 0.0.66\nJUL_07_2020'
ghenv.Component.IconDisplayMode = ghenv.Component.IconDisplayMode.application
ghenv.Component.Category = "HB-Legacy"
ghenv.Component.SubCategory = "09 | Energy | HVACSystems"
#compatibleHBVersion = VER 0.0.56\nJUL_25_2017
#compatibleLBVersion = VER 0.0.59\nFEB_01_2015
try: ghenv.Component.AdditionalHelpFromDocStrings = "2"
except: pass
import os
import Grasshopper.Kernel as gh
import scriptcontext as sc
if sc.sticky.has_key('honeybee_release'):
if sc.sticky["honeybee_folders"]["OSLibPath"] != None:
# openstudio is there
openStudioLibFolder = sc.sticky["honeybee_folders"]["OSLibPath"]
openStudioIsReady = True
# check to see that it's version 2.0 or above.
rightVersion = False
try:
osVersion = openStudioLibFolder.split('-')[-1]
if osVersion.startswith('2'):
rightVersion = True
except:
pass
if rightVersion == False:
openStudioIsReady = False
msg = "Your version of OpenStudio must be 2.0 or above to use the measures components."
print msg
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, msg)
import clr
clr.AddReferenceToFileAndPath(openStudioLibFolder+"\\openStudio.dll")
import sys
if openStudioLibFolder not in sys.path:
sys.path.append(openStudioLibFolder)
import OpenStudio
else:
openStudioIsReady = False
# let the user know that they need to download OpenStudio libraries
msg1 = "You do not have OpenStudio installed on Your System.\n" + \
"You wont be able to use this component until you install it.\n" + \
"Download the latest OpenStudio for Windows from:\n"
msg2 = "https://www.openstudio.net/downloads"
print msg1
print msg2
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, msg1)
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, msg2)
else:
openStudioIsReady = False
class OPSChoice:
def __init__(self, originalString):
self.originalString = originalString
self.value = self.get_value()
self.display_name = self.get_display_name()
def get_display_name(self):
return self.originalString.split("<display_name>")[-1].split("</display_name>")[0]
def get_value(self):
return self.originalString.split("<value>")[-1].split("</value>")[0]
def __repr__(self):
return self.display_name
class OPSMeasureArg:
def __init__(self, originalString):
self.originalString = originalString
self.name = self.get_name()
self.display_name = self.get_display_name()
self.description = self.get_description()
self.type = self.get_type()
self.required = self.get_required()
if self.required == True:
self.display_name = "_" + self.display_name
else:
self.display_name = self.display_name + "_"
self.model_dependent = self.get_model_dependent()
self.default_value = self.get_default_value()
self.choices = self.get_choices()
self.validChoices = [choice.value.lower() for choice in self.choices]
self.userInput = None
def get_name(self):
return self.originalString.split("<name>")[-1].split("</name>")[0]
def get_display_name(self):
return self.originalString.split("</display_name>")[0].split("<display_name>")[-1]
def get_description(self):
return self.originalString.split("<description>")[-1].split("</description>")[0]
def get_type(self):
return self.originalString.split("<type>")[-1].split("</type>")[0]
def get_required(self):
req = self.originalString.split("<required>")[-1].split("</required>")[0]
return True if req.strip() == "true" else False
def get_model_dependent(self):
depends = self.originalString.split("<model_dependent>")[-1].split("</model_dependent>")[0]
return True if depends.strip() == "true" else False
def get_default_value(self):
if not "<default_value>" in self.originalString:
return None
else:
value = self.originalString.split("<default_value>")[-1].split("</default_value>")[0]
if self.type.lower() != "boolean": return value
return True if value.strip() == "true" else False
def get_choices(self):
choicesContainer = self.originalString.split("<choices>")[-1].split("</choices>")[0]
choices = [arg.split("<choice>")[-1] for arg in choicesContainer.split("</choice>")][:-1]
return [OPSChoice(choice) for choice in choices]
def update_value(self, userInput):
#currently everything is string
if len(self.validChoices) == 0:
self.userInput = userInput
elif str(userInput).lower() not in self.validChoices:
#give warning
msg = str(userInput) + " is not a valid input for " + self.display_name + ".\nValid inputs are: " + str(self.choices)
give_warning(msg)
else:
self.userInput = userInput
def __repr__(self):
return (self.display_name + "<" + self.type + "> " + str(self.choices) + \
" Current Value: {}").format(self.default_value if not self.userInput else self.userInput)
def give_warning(msg):
w = gh.GH_RuntimeMessageLevel.Warning
ghenv.Component.AddRuntimeMessage(w, msg)
def get_measureArgs(xmlFile):
# there is no good XML parser for IronPython
# here is parsing the file
with open(xmlFile, "r") as measure:
lines = measure.readlines()
argumentsContainer = "".join(lines).split("<arguments>")[-1].split("</arguments>")[0]
arguments = [arg.split("<argument>")[-1] for arg in argumentsContainer.split("</argument>")][:-1]
#collect arguments in a dictionary so I can map the values on update
args = dict()
for count, arg in enumerate(arguments):
args[count+1] = OPSMeasureArg(arg)
return args
def addInputParam(arg, path, i=None):
if i == None:
param = gh.Parameters.Param_ScriptVariable()
else:
param = ghenv.Component.Params.Input[i]
param.NickName = arg.display_name
param.Name = arg.name
param.Description = str(arg)
param.Optional = True # even if it is required it has a default value
param.AllowTreeAccess = False
param.Access = gh.GH_ParamAccess.item # I assume this can't be a list
if arg.default_value != None:
param.AddVolatileData(path, 0, arg.default_value)
if i == None:
index = ghenv.Component.Params.Input.Count
ghenv.Component.Params.RegisterInputParam(param,index)
ghenv.Component.Params.OnParametersChanged()
def cleanInputNames():
# I couldn't find a clean way to remove the input so I just change the name
for paramCount in range(1,ghenv.Component.Params.Input.Count):
param = ghenv.Component.Params.Input[paramCount]
param.NickName = "_"
param.Name = "_"
param.Description = "_"
param.Optional = False
ghenv.Component.Params.OnParametersChanged()
def cleanFirstInput():
ghenv.Component.Params.Input[0].NickName = "_"
ghenv.Component.Params.Input[0].Name = "_"
# ghenv.Component.Params.Input[0].RemoveAllSources()
def updateComponentDescription(xmlFile):
# get name of measure and description
nickName = os.path.normpath(xmlFile).split("\\")[-2]
ghenv.Component.NickName = nickName
measureType = 'OpenStudio'
with open(xmlFile, "r") as measure:
lines = "".join(measure.readlines())
ghenv.Component.Name = lines.split("</display_name>")[0].split("<display_name>")[-1]
ghenv.Component.Description = lines.split("</description>")[0].split("<description>")[-1]
if 'EnergyPlusMeasure' in lines:
measureType = 'EnergyPlus'
elif 'ModelMeasure' in lines:
measureType = 'OpenStudio'
elif 'ReportingMeasure' in lines:
measureType = 'Reporting'
return measureType
class OpenStudioMeasure:
def __init__(self, name, nickName, description, measurePath, args, measureType):
self.name = name
self.nickName = nickName
self.description = description
self.path = os.path.normpath(measurePath)
self.args = args
self.type = measureType
def updateArguments(self):
#iterate over inputs and assign the new values in case there is any new values
for i in range(1, ghenv.Component.Params.Input.Count):
try:
value = ghenv.Component.Params.Input[i].VolatileData[0][0]
except:
value = self.args[i].default_value
path = gh.Data.GH_Path(0)
ghenv.Component.Params.Input[i].AddVolatileData(path, 0, value)
self.args[i].update_value(value)
def __repr__(self):
return "OpenStudio " + self.name
def loadMeasureFromFile(xmlFile):
if not os.path.isfile(xmlFile): raise Exception("Can't find measure at " + xmlFile)
directory, f_name = os.path.split(xmlFile)
measure = OpenStudio.BCLMeasure(tryGetOSPath(directory))
if measure.arguments().Count == 0:
print "Measure contains no arguments."
measureType = updateComponentDescription(xmlFile)
# load arguments
args = get_measureArgs(xmlFile)
# create an OSMeasure based on default values
OSMeasure = OpenStudioMeasure(ghenv.Component.Name, ghenv.Component.NickName, ghenv.Component.Description, _, args, measureType)
OSMeasure.updateArguments()
# add the measure to sticky to be able to load and update it
key = ghenv.Component.InstanceGuid.ToString()
if "osMeasures" not in sc.sticky.keys():
sc.sticky["osMeasures"] = dict()
sc.sticky["osMeasures"][key] = OSMeasure
return OSMeasure
def tryGetOSPath(path):
"""Try to convert a string path to OpenStudio Path."""
try:
return OpenStudio.Path(path)
except TypeError:
# OpenStudio 2.6.1
ospath = OpenStudio.OpenStudioUtilitiesCore.toPath(path)
return OpenStudio.Path(ospath)
def loadMeasureFromMem():
try:
key = ghenv.Component.InstanceGuid.ToString()
OSMeasure = sc.sticky["osMeasures"][key]
OSMeasure.updateArguments()
ghenv.Component.Name = OSMeasure.name
ghenv.Component.NickName = OSMeasure.nickName
ghenv.Component.Description = OSMeasure.description
return OSMeasure
except Exception , e:
msg = "Couldn't load the measure!\n%s" % str(e)
if ghenv.Component.Params.Input.Count!=1:
msg += "\nTry to reload the measure with a fresh component."
raise Exception(msg)
print msg
return None
fileLoad = False
try:
OSMeasure = sc.sticky["osMeasures"][key]
except:
try:
xmlFile = os.path.join(_ , "measure.xml")
OSMeasure = loadMeasureFromFile(xmlFile)
fileLoad = True
except Exception as e:
print e
#Honeybee check.
initCheck = True
if not sc.sticky.has_key('honeybee_release') == True:
initCheck = False
print "You should first let Honeybee fly..."
ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, "You should first let Honeybee fly...")
else:
try:
if not sc.sticky['honeybee_release'].isCompatible(ghenv.Component): initCheck = False
hb_hvacProperties = sc.sticky['honeybee_hvacProperties']()
hb_airDetail = sc.sticky["honeybee_hvacAirDetails"]
hb_heatingDetail = sc.sticky["honeybee_hvacHeatingDetails"]
hb_coolingDetail = sc.sticky["honeybee_hvacCoolingDetails"]
except:
initCheck = False
warning = "You need a newer version of Honeybee to use this compoent." + \
"Use updateHoneybee component to update userObjects.\n" + \
"If you have already updated userObjects drag Honeybee_Honeybee component " + \
"into canvas and try again."
ghenv.Component.AddRuntimeMessage(w, warning)
if openStudioIsReady == True and initCheck == True and fileLoad == False:
if ghenv.Component.Params.Input.Count==1 and _OSMeasure:
# first time loading
xmlFile = os.path.join(_OSMeasure, "measure.xml")
if not os.path.isfile(xmlFile): raise Exception("Can't find measure at " + xmlFile)
measure = OpenStudio.BCLMeasure(tryGetOSPath(_OSMeasure))
if measure.arguments().Count == 0:
print "Measure contains no arguments."
# load arguments
args = get_measureArgs(xmlFile)
# add arguments to component
path = gh.Data.GH_Path(0)
for key in sorted(args.keys()):
addInputParam(args[key], path)
measureType = updateComponentDescription(xmlFile)
# create an OSMeasure based on default values
OSMeasure = OpenStudioMeasure(ghenv.Component.Name, ghenv.Component.NickName, ghenv.Component.Description, _OSMeasure, args, measureType)
# add the measure to sticky to be able to load and update it
key = ghenv.Component.InstanceGuid.ToString()
if "osMeasures" not in sc.sticky.keys():
sc.sticky["osMeasures"] = dict()
sc.sticky["osMeasures"][key] = OSMeasure
_OSMeasure = False
# clean first input
cleanFirstInput()
if sc.sticky['honeybee_release'].isInputMissing(ghenv.Component):
OSMeasure = None
elif ghenv.Component.Params.Input.Count==1 and not _OSMeasure == False:
sc.sticky['honeybee_release'].isInputMissing(ghenv.Component)
else:
OSMeasure = loadMeasureFromMem()
sc.sticky['honeybee_release'].isInputMissing(ghenv.Component) | gpl-3.0 | 6,137,310,421,724,974,000 | 38.696203 | 145 | 0.649085 | false |
openstack/rally | tests/unit/doc/test_format.py | 1 | 3089 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fnmatch
import io
import os
import re
import testtools
class TestFormat(testtools.TestCase):
def _check_lines_wrapping(self, doc_file, raw):
code_block = False
text_inside_simple_tables = False
lines = raw.split("\n")
for i, line in enumerate(lines):
if code_block:
if not line or line.startswith(" "):
continue
else:
code_block = False
if "::" in line:
code_block = True
# simple style tables also can fit >=80 symbols
# open simple style table
if ("===" in line or "---" in line) and not lines[i - 1]:
text_inside_simple_tables = True
if "http://" in line or "https://" in line or ":ref:" in line:
continue
# Allow lines which do not contain any whitespace
if re.match(r"\s*[^\s]+$", line):
continue
if not text_inside_simple_tables:
self.assertTrue(
len(line) < 80,
msg="%s:%d: Line limited to a maximum of 79 characters." %
(doc_file, i + 1))
# close simple style table
if "===" in line and not lines[i + 1]:
text_inside_simple_tables = False
def _check_no_cr(self, doc_file, raw):
matches = re.findall("\r", raw)
self.assertEqual(
len(matches), 0,
"Found %s literal carriage returns in file %s" %
(len(matches), doc_file))
def _check_trailing_spaces(self, doc_file, raw):
for i, line in enumerate(raw.split("\n")):
trailing_spaces = re.findall(r"\s+$", line)
self.assertEqual(
len(trailing_spaces), 0,
"Found trailing spaces on line %s of %s" % (i + 1, doc_file))
def test_lines(self):
files = []
docs_dir = os.path.join(os.path.dirname(__file__), os.pardir,
os.pardir, os.pardir, "doc")
for root, dirnames, filenames in os.walk(docs_dir):
for filename in fnmatch.filter(filenames, "*.rst"):
files.append(os.path.join(root, filename))
for filename in files:
with io.open(filename, encoding="utf-8") as f:
data = f.read()
self._check_lines_wrapping(filename, data)
self._check_no_cr(filename, data)
self._check_trailing_spaces(filename, data)
| apache-2.0 | -7,617,275,831,751,958,000 | 37.135802 | 78 | 0.556167 | false |
CXWorks/compilerLab | lab1/lex/recore.py | 1 | 7297 | import networkx as nx
from collections import deque
import matplotlib.pyplot as plt
def re2dfa(re,debug=False):
def isChar(c):
return (c>='a' and c<='z') or (c>='A' and c<='Z') or (c>='0' and c<='9')
way=[]
def re2nfa(re):
_op_={'.':6,'|':7,'(':10}
#add .
full=[]
skip=False
for i in range(len(re)):
if skip:
skip=False
continue
full.append(re[i])
if re[i]=='\\':
i+=1
full.append(re[i])
skip=True
if re[i] not in _op_.keys() and i+1<len(re) and (isChar(re[i+1]) or re[i+1]=='(' or re[i+1]=='\\'):
full.append('.')
full.append('$')
# back
back=[]
symb=[]
skip=False
for i in range(len(full)):
if skip:
skip=False
continue
c=full[i]
if isChar(c):
back.append(c)
if c not in way:
way.append(c)
elif c==')':
while symb[len(symb)-1]!= '(':
back.append(symb.pop())
symb.pop()
elif c=='$':
while len(symb)>0:
back.append(symb.pop())
elif c in ['*','+','?']:
back.append(c)
elif c =='\\':
back.append(c)
i+=1
back.append(full[i])
skip=True
if full[i] not in way:
way.append(full[i])
elif c in _op_.keys():
while len(symb)>0 and symb[len(symb)-1]!='(' and _op_[symb[len(symb)-1]] >= _op_[c]:
back.append(symb.pop())
symb.append(c)
else:
back.append(c)
if c not in way:
way.append(c)
#build nfa
stack=[]
skip=False
for i in range(len(back)):
if skip:
skip=False
continue
c=back[i]
if isChar(c):
g=nx.DiGraph()
g.add_edge(0,1,c=c)
stack.append(g)
elif c=='\\':
i+=1
g=nx.DiGraph()
g.add_edge(0,1,c=back[i])
stack.append(g)
skip=True
elif c== '.':
g2=stack.pop()
g1=stack.pop()
n=len(g1)
g=nx.disjoint_union(g1,g2)
g.add_edge(n-1,n,e='1')
stack.append(g)
elif c=='*':
g=stack[len(stack)-1]
n=len(g)
g.add_edge(0,n-1,e='1')
g.add_edge(n-1,0,e='1')
elif c=='+':
g = stack[len(stack)-1]
n = len(g)
g.add_edge(n - 1, 0, e='1')
elif c=='?':
g = stack[len(stack) - 1]
n = len(g)
g.add_edge(0, n - 1, e='1')
elif c=='|':
g1 = stack.pop()
g2 = stack.pop()
n1 = len(g1)
n2 = len(g2)
s=nx.DiGraph()
s.add_node(0)
s1=nx.disjoint_union(s,g1)
s1.add_edge(0,1,e='1')
e=nx.DiGraph()
e.add_node(0)
e1=nx.disjoint_union(g2,e)
e1.add_edge(n2-1,n2,e='1')
ans=nx.disjoint_union(s1,e1)
ans.add_edge(0,n1+1,e='1')
ans.add_edge(n1,n1+n2+1,e='1')
stack.append(ans)
else:
g = nx.DiGraph()
g.add_edge(0, 1, c=c)
stack.append(g)
return stack.pop()
def findClo(g,node):
ans=[node]
#dfs
stack=[node]
while len(stack)>0:
n=stack.pop()
edge = g.edge[n]
for no,dic in edge.items():
if no not in ans and dic.has_key('e'):
stack.append(no)
ans.append(no)
return ans
def findWay(g,ns,w):
ans=[]
for n in ns:
edge=g.edge[n]
for no,dic in edge.items():
if no not in ans and dic.has_key('c') and dic['c']==w:
#find clo
temp=findClo(g,no)
ans.extend(temp)
return ans
def minDFA(node,index):
ans=[]
log=[]
for i in range(len(node)):
n=node[i]
if n in log:
continue
nto=index[n].values()
notin=[x for x in nto if x not in node]
if len(notin)>0 :
ans.append([n])
continue
t=[n]
for j in range(i+1,len(node)):
jto=index[node[j]].values()
if nto==jto and len(nto)!=0:
t.append(node[j])
log.append(node[j])
ans.append(t)
return ans
def delnode(n,conn,t,to):
del conn[n]
t[to].extend([x for x in t[n] if x not in t[to]])
del t[n]
for k,v in conn.items():
if k != n :
for w in way:
if v.has_key(w) and v[w]==n :
v[w]=to
return conn
def nfa2dfa(nfa):
table={}
#init
t=findClo(nfa,0)
t.sort()
table[0]=t
conn={}
queue=deque([0])
while len(queue)>0:
n=queue.popleft()
n2c={}
n_n=table[n]
for c in way:
te=findWay(nfa,n_n,c)
if len(te)==0:
continue
te.sort()
if te not in table.values():
idd=len(table)
table[idd]=te
queue.append(idd)
else:
idd=table.keys()[table.values().index(te)]
n2c[c]=idd
conn[n]=n2c
#minimise
s=[]
e=[]
for k,v in table.items():
if len(nfa.node)-1 in v:
e.append(k)
else:
s.append(k)
s2=minDFA(s,conn)
e2=minDFA(e,conn)
s2.extend(e2)
for l in s2:
if len(l) == 1:
continue
for i in range(1,len(l)):
conn=delnode(l[i],conn,table,l[0])
#build graph
g=nx.DiGraph()
for k,v in table.items():
g.add_node(k)
if len(nfa.node) - 1 in v:
g.node[k]['e']=1
for node,di in conn.items():
for c,t in di.items():
# g.add_edge(node,t,)
if g.has_edge(node,t):
g.edge[node][t]['c'].append(c)
else:
g.add_edge(node, t,c=[c] )
return g
nfa = re2nfa(re)
g = nfa2dfa(nfa)
if debug:
return g
else:
return [g.node,g.edge]
if __name__ == '__main__':
g=re2dfa('(a|b)*a(a|b)(a|b)',debug=True)
print g.node
print g.edge
nx.draw_networkx(g)
plt.show() | mit | 3,387,408,629,161,329,000 | 27.396887 | 111 | 0.371386 | false |
OndinaHQ/Tracker | plugins/s3.py | 1 | 3045 | # Copyright (C) 2012 Stefano Palazzo <[email protected]>
# Copyright (C) 2012 Ondina, LLC. <http://ondina.co>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import time
import hmac
import hashlib
import http.client
import urllib.parse
import base64
import collections
class S3Error (Exception):
def __init__(self, status, response):
self.status, self.response = status, response
def __str__(self):
return "{}: {}".format(self.status, self.response)
def __str__(self):
return "S3Error({}, {})".format(repr(self.status), repr(self.response))
class S3 (object):
'''
Usage:
>>> s3 = S3(YOUR_ACCESS_KEY_ID, YOUR_SECRET_ACCESS_KEY)
>>> s3.upload("some-bucket", open("image.png", "rb").read(),
"image/png", "image3838838.png")
https://s3.amazonaws.com/some-bucket/image3838838.png
'''
def __init__(self, access_key, secret_key):
self.__access_key, self.__secret_key = access_key, secret_key
def __request(self, method, bucket, host, action, body, content_type, fn):
date = time.strftime("%c GMT", time.gmtime())
headers = collections.OrderedDict((
("x-amz-acl", "public-read"),
("Content-Type", content_type),
("Content-Length", len(body)),
("Host", bucket + "." + host),
("Date", date),
))
string_to_sign = (method + "\n" +
"\n" +
content_type + "\n" +
date + "\n" +
"x-amz-acl:public-read\n" +
"/" + bucket + "/" + fn)
signature = base64.b64encode(hmac.new(self.__secret_key.encode(),
string_to_sign.encode(), hashlib.sha1).digest()).decode()
authorization = "AWS " + self.__access_key + ":" + signature
headers.update({"Authorization": authorization})
connection = http.client.HTTPSConnection(bucket + "." + host)
action = action + "?" + urllib.parse.urlencode({})
connection.request(method, action, body, headers)
response = connection.getresponse()
if response.status != 200:
raise S3Error(response.status, response.read())
return "https://s3.amazonaws.com/{}/{}".format(bucket, fn)
def upload(self, bucket, data, content_type, filename):
return self.__request("PUT", bucket, "s3.amazonaws.com", "/" +
filename, data, content_type, filename)
| gpl-3.0 | 6,556,845,181,018,682,000 | 36.592593 | 79 | 0.611494 | false |
gioman/QGIS | python/plugins/processing/algs/gdal/buildvrt.py | 1 | 4226 | # -*- coding: utf-8 -*-
"""
***************************************************************************
merge.py
---------------------
Date : October 2014
Copyright : (C) 2014 by Radoslaw Guzinski
Email : rmgu at dhi-gras dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Radoslaw Guzinski'
__date__ = 'October 2014'
__copyright__ = '(C) 2014, Radoslaw Guzinski'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.PyQt.QtGui import QIcon
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.core.outputs import OutputRaster
from processing.core.parameters import ParameterBoolean
from processing.core.parameters import ParameterMultipleInput
from processing.core.parameters import ParameterSelection
from processing.algs.gdal.GdalUtils import GdalUtils
from processing.tools.system import tempFolder
from processing.tools import dataobjects
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class buildvrt(GdalAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
RESOLUTION = 'RESOLUTION'
SEPARATE = 'SEPARATE'
PROJ_DIFFERENCE = 'PROJ_DIFFERENCE'
RESOLUTION_OPTIONS = ['average', 'highest', 'lowest']
def name(self):
return 'buildvirtualraster'
def displayName(self):
return self.tr('Build Virtual Raster')
def icon(self):
return QIcon(os.path.join(pluginPath, 'images', 'gdaltools', 'vrt.png'))
def group(self):
return self.tr('Raster miscellaneous')
def defineCharacteristics(self):
self.addParameter(ParameterMultipleInput(self.INPUT,
self.tr('Input layers'), dataobjects.TYPE_RASTER))
self.addParameter(ParameterSelection(self.RESOLUTION,
self.tr('Resolution'), self.RESOLUTION_OPTIONS, 0))
self.addParameter(ParameterBoolean(self.SEPARATE,
self.tr('Layer stack'), True))
self.addParameter(ParameterBoolean(self.PROJ_DIFFERENCE,
self.tr('Allow projection difference'), False))
self.addOutput(OutputRaster(buildvrt.OUTPUT, self.tr('Virtual')))
def getConsoleCommands(self):
arguments = []
arguments.append('-resolution')
arguments.append(self.RESOLUTION_OPTIONS[self.getParameterValue(self.RESOLUTION)])
if self.getParameterValue(buildvrt.SEPARATE):
arguments.append('-separate')
if self.getParameterValue(buildvrt.PROJ_DIFFERENCE):
arguments.append('-allow_projection_difference')
# Always write input files to a text file in case there are many of them and the
# length of the command will be longer then allowed in command prompt
listFile = os.path.join(tempFolder(), 'buildvrtInputFiles.txt')
with open(listFile, 'w') as f:
f.write(self.getParameterValue(buildvrt.INPUT).replace(';', '\n'))
arguments.append('-input_file_list')
arguments.append(listFile)
out = self.getOutputValue(buildvrt.OUTPUT)
# Ideally the file extensions should be limited to just .vrt but I'm not sure how
# to do it simply so instead a check is performed.
_, ext = os.path.splitext(out)
if not ext.lower() == '.vrt':
out = out.replace(ext, '.vrt')
self.setOutputValue(self.OUTPUT, out)
arguments.append(out)
return ['gdalbuildvrt', GdalUtils.escapeAndJoin(arguments)]
| gpl-2.0 | 2,611,517,822,219,813,400 | 40.841584 | 99 | 0.58424 | false |
boffi/boffi.github.io | dati_2014/08/exercise1.py | 1 | 3923 | ######################################################################
# Preliminaries,
import scipy as sp
mat=sp.matrix
from scipy.linalg import inv
######################################################################
# an utility function to format a matrix for inclusion in a LaTeX file
def latex_print(data,name,fmt="%10.4f",title=""):
delim={"mat":"b",
"vet":"B",
"det":"V",
"norm":"v"}
if title:
print "% ----- "+title+" -----"
print "\\begin{"+delim[name]+"matrix}"
print "\\\\\n".join(["&".join(map(lambda x: fmt%(x,),line)) for line in sp.asarray(data)])
print "\\end{"+delim[name]+"matrix}"
######################################################################
Mass=mat(((2,0,0,),
(0,3,0,),
(0,0,4,),));
Mass=100000.*Mass
latex_print(Mass,"mat",title="Mass Matrix")
######################################################################
Stif=mat(((+1,-1,+0),
(-1,+3,-2),
(+0,-2,+5)))
Stif=120e6*Stif
latex_print(Stif,"mat",title="Stiffness Matrix")
######################################################################
# roots finds the roots of the poly defined by
# the list of coefficients (1, -11.4,m ...)
Omegas=mat(sorted(sp.roots((1,-11/4.,15/8.,-1/4.))))*1200.
Eigenv=mat(sp.zeros((3,3)))
# This sets the 0 row if the eigenv matrix to ones
Eigenv[0,:]=1.,1.,1.
# this is a {0,1} column vector
known=mat(((1,),(0,)))
# solve the eq. of free vibrations for psi_0i = 1
for i in range(3):
Omega2=Omegas[0,i]/1200
coef=mat(((3.-3.*Omega2,-2.),(-2.,5.-4.*Omega2)))
bottom=coef.I*known
# this sets the bottom part of each eigenvector
Eigenv[1:,i]=bottom
latex_print(Eigenv,"mat",title="Eigenvectors Matrix")
MStar=Eigenv.T*Mass*Eigenv
latex_print(MStar,"mat",title="Modal Masses Matrix")
KStar=Eigenv.T*Stif*Eigenv
latex_print(KStar,"mat","%10.5e",title="Modal Stiffnesses Matrix")
MStar=Eigenv.T*Mass*Eigenv
latex_print(MStar/1000.,"mat",title="Modal Masses Matrix, in tons")
KStar=Eigenv.T*Stif*Eigenv
latex_print(KStar/1E6,"mat","%10.2f",title="Modal Stiffnesses Matrix, in MN/m")
q_0=MStar.I*Eigenv.T*Mass*mat((5,4,3)).T
latex_print(sp.mat(((5,4,3),)).T,"vet",title="Initial displacements, nodal coo.")
latex_print(q_0,"vet",title="Initial displacements, modal coo.")
qdot_0=MStar.I*Eigenv.T*Mass*mat((0,9,0)).T
latex_print(mat((0,9,0)).T,"vet",title="Initial velocities, nodal coo.")
latex_print(qdot_0,"vet",title="Initial velocities, modal coo.")
# q_i = A_i sin(w_i t) + B_i cos(w_i t)
# qdot_i = w_i(A_i cos(w_i t) - B_i sin(w_i t))
Bs=q_0
As=mat(sp.diagonal(qdot_0/sp.sqrt(Omegas))).T
latex_print(As,"vet",title="Sine coefficients for modal disp.s")
latex_print(Bs,"vet",title="Cosine coefficients for modal disp.s")
ampli=sp.real(sp.sqrt(sp.power(As,2)+(sp.power(Bs,2))))
phase=sp.arctan2(As,Bs)
latex_print(ampli,"vet",title="Cosine only amplitudes for modal disp.s")
latex_print(phase,"vet",title="Cosine only phases for modal disp.s")
# q_i(t) = ampli_i*cos(w_i-phase)
print "% Nodal displacements, in mm\n\\begin{align*}"
for i in range(3):
print r" x_%d & = " % (i+1,),
for j in range(3):
print r"%+6.3f \cos(%10.3f t %+10.3f) " % (Eigenv[i,j]*ampli[j], sp.sqrt(Omegas[0,j]), phase[j]),
print r"\\"
print "\\end{align*}"
print "% Nodal forces, in kN\n\\begin{align*}"
for i in range(3):
print r"x_%d & = " % (i+1,),
for j in range(3):
print r"%+6.3f \cos(%10.3f t %+10.3f) " % (Mass[i,i]*Omegas[0,j]*Eigenv[i,j]*ampli[j]/1E6, sp.sqrt(Omegas[0,j]), phase[j]),
print r"\\"
print "\\end{align*}"
## half-sine
#t1=0.02 # seconds
#p=mat((2.5e6,5e6,5e6)).T # Newtons
## modal loads, normalized
#pl=MStar.I*Eigenv.T*p
##the impulse, and the final velocity, as pl was normalized, is
#qdot_0 = pl*t1/(sp.pi/2)
#print qdot_0, sp.diagonal(qdot_0/sp.sqrt(Omegas))
| mit | -4,355,237,838,158,614,000 | 32.818966 | 131 | 0.559266 | false |
mattyowl/fitScalingRelation | fitScalingRelation/fitScalingRelationLib.py | 1 | 62047 | """
The MCMC fitting code used in Hilton et al. (2012), in a more general purpose form
Copyright 2015 Matt Hilton ([email protected])
This file is part of fitScalingRelation.
fitScalingRelation is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
fitScalingRelation is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with fitScalingRelation. If not, see <http://www.gnu.org/licenses/>.
"""
import os
import sys
import math
import string
from astLib import *
import pylab as plt
import numpy as np
import astropy.table as atpy
import popen2
from scipy import stats
from scipy import special
from scipy import interpolate
from scipy import ndimage
import pyximport; pyximport.install()
import cythonScalingRelation as csr
import time
import pickle
import matplotlib
import IPython
np.random.seed()
plt.matplotlib.interactive(False)
# For some unknown reason, mathtext in matplotlib is behaving weirdly since Ubuntu 16.10 upgrade
#try:
#plt.matplotlib.rc('text', usetex=True)
#except:
#pass
#-------------------------------------------------------------------------------------------------------------
# Adopt Ed's cosmology
#astCalc.OMEGA_M0=0.27
#astCalc.OMEGA_L=0.73
#-------------------------------------------------------------------------------------------------------------
def ask_for( key ):
s = raw_input( "ParametersDict: enter value for '%s': " % key )
try:
val = eval(s)
except NameError:
# allow people to enter unquoted strings
val = s
return val
class ParametersDict( dict ):
def __getitem__( self, key ):
if key not in self:
print "ParametersDict: parameter '%s' not found" % key
val = ask_for( key )
print "ParametersDict: setting '%s' = %s" % (key,repr(val))
dict.__setitem__( self, key, val )
return dict.__getitem__( self, key )
def read_from_file( self, filename ):
f = open( filename )
old = ''
for line in f:
line = line.strip()
if len(line) == 0 or line[0] == '#':
continue
s = line.split('#')
line = s[0]
#if line[-1] == '\\':
#s = line.split('\\')
#if len(s) > 1:
#old = string.join([old, s[0]])
#continue
#else:
#line = string.join([old, s[0]])
#old = ''
##IPython.embed()
##sys.exit()
s = line.split('=')
if len(s) != 2:
print "Error parsing line:"
print line
IPython.embed()
sys.exit()
continue
try:
key = s[0].strip()
val = eval(s[1].strip()) # XXX:make safer
except:
raise Exception, "can't parse line: %s" % (line)
self[key] = val
f.close()
def write_to_file( self, filename, mode = 'w' ):
f = open( filename, mode )
keys = self.keys()
keys.sort()
for key in keys:
f.write( "%s = %s\n" % (key,repr(self[key])) )
f.close()
def cmp( self, otherDict ):
diff = []
ks = self.keys()
for k in ks:
try:
if otherDict[k] == self.params[k]:
continue
diff += [k]
break
except KeyError:
diff += [k]
return otherDict
#-------------------------------------------------------------------------------------------------------------
def selectStartParsFromPriors(settingsDict):
"""Choose random starting values for the MCMC from the priors we're placing on the parameters.
"""
variables=settingsDict['variables']
pars=np.zeros(len(variables))
for i in range(len(variables)):
v=variables[i]
if settingsDict['%sFit' % (v)] == 'fixed':
pars[i]=settingsDict['%s0' % (v)]
else:
pars[i]=np.random.uniform(settingsDict['prior_%s_MIN' % (v)], settingsDict['prior_%s_MAX' % (v)])
# This makes sure that if we're testing by swapping axes, we can use the same prior ranges
if 'swapAxes' in settingsDict.keys() and settingsDict['swapAxes'] == True:
b=1.0/pars[1]
a=-pars[0]/pars[1]
pars[0]=a
pars[1]=b
return pars
#-------------------------------------------------------------------------------------------------------------
def getPPrior(pPars, settingsDict):
"""Gets prior probability.
"""
variables=settingsDict['variables']
# This makes sure that if we're testing by swapping axes, we can use the same prior ranges
if 'swapAxes' in settingsDict.keys() and settingsDict['swapAxes'] == True:
b=1.0/pPars[1]
a=-pPars[0]/pPars[1]
pPars[0]=a
pPars[1]=b
priors=np.zeros(len(variables))
for i in range(len(variables)):
v=variables[i]
if pPars[i] > settingsDict['prior_%s_MIN' % (v)] and pPars[i] < settingsDict['prior_%s_MAX' % (v)]:
priors[i]=1.0
else:
priors[i]=0.0
# Fixed parameters must surely be within the priors...
if settingsDict['%sFit' % (v)] == 'fixed':
priors[i]=1.0
pPrior=np.product(priors)
return pPrior
#-------------------------------------------------------------------------------------------------------------
def byteSwapArr(arr):
"""FITS is big-endian, but cython likes native-endian arrays (little-endian for x86)... so, byteswap
if we need.
"""
if arr.dtype.byteorder == '>':
arr=arr.byteswap().newbyteorder('=')
return arr
#-------------------------------------------------------------------------------------------------------------
def sampleGetter(settingsDict, sampleDef, outDir):
"""Loads in catalogue in .fits table format, and add columns xToFit, yToFit, xErrToFit, yErrToFit,
which are fed into the MCMCFit routine. Applies any asked for scalings and cuts according to the
contents of settingsDict and sampleDef.
"""
# Stuff we need from settings...
xColumnName=settingsDict['xColumnName']
xPlusErrColumnName=settingsDict['xPlusErrColumnName']
xMinusErrColumnName=settingsDict['xMinusErrColumnName']
yColumnName=settingsDict['yColumnName']
yPlusErrColumnName=settingsDict['yPlusErrColumnName']
yMinusErrColumnName=settingsDict['yMinusErrColumnName']
xPivot=settingsDict['xPivot']
yPivot=settingsDict['yPivot']
xTakeLog10=settingsDict['xTakeLog10']
yTakeLog10=settingsDict['yTakeLog10']
redshiftColumnName=settingsDict['redshiftColumnName']
xScaleFactor=settingsDict['xScaleFactor']
yScaleFactor=settingsDict['yScaleFactor']
yScaleFactorPower=settingsDict['yScaleFactorPower']
newTab=atpy.Table().read(settingsDict['inFileName'])
# Make a new table here with cuts applied
# NOTE: we really need a better way of labelling constraints
for key in sampleDef:
if key not in ['label', 'plotLabel']:
if key[-4:] == '_MIN':
col=key[:-4]
newTab=newTab[np.where(newTab[col] > sampleDef[key])]
elif key[-4:] == '_MAX':
col=key[:-4]
newTab=newTab[np.where(newTab[col] < sampleDef[key])]
else:
if type(sampleDef[key]) != list:
newTab=newTab[np.where(newTab[key] == sampleDef[key])]
else:
print "Need to add more sampleDef key handling code"
IPython.embed()
sys.exit()
if len(newTab) == 0:
print "Hmm... all objects cut? empty newTab"
IPython.embed()
sys.exit()
# Value added useful columns
Ez=[]
for row in newTab:
Ez.append(astCalc.Ez(row[redshiftColumnName]))
newTab.add_column(atpy.Column(Ez, 'E(z)'))
# Add columns we will fit to, scaling and applying log10 as necessary
# We apply pivots here also (undo them, if necessary, elsewhere)
stab=newTab
# We should probably make this default
if xPivot == "median":
xPivot=np.median(newTab[xColumnName])
settingsDict['xPivot']=xPivot
if yPivot == "median":
yPivot=np.median(newTab[yColumnName])
settingsDict['yPivot']=yPivot
if yScaleFactor == "E(z)":
yScaling=np.power(stab["E(z)"], yScaleFactorPower)
elif yScaleFactor == None:
yScaling=np.ones(len(stab))
else:
raise Exception, "didn't understand yScaleFactor"
if xTakeLog10 == True:
xToFit=np.log10(stab[xColumnName]/xPivot)
xErrToFitPlus=np.log10((stab[xColumnName]+stab[xPlusErrColumnName])/xPivot)-xToFit
xErrToFitMinus=xToFit-np.log10((stab[xColumnName]-stab[xMinusErrColumnName])/xPivot)
else:
xToFit=stab[xColumnName]
xErrToFitPlus=stab[xPlusErrColumnName]
xErrToFitMinus=stab[xMinusErrColumnName]
if yTakeLog10 == True:
yToFit=np.log10(yScaling*stab[yColumnName]/yPivot)
yErrToFitPlus=np.log10(yScaling*(stab[yColumnName]+stab[yPlusErrColumnName])/yPivot)-yToFit
yErrToFitMinus=yToFit-np.log10(yScaling*(stab[yColumnName]-stab[yMinusErrColumnName])/yPivot)
else:
yToFit=stab[yColumnName]
yErrToFitPlus=stab[yPlusErrColumnName]
yErrToFitMinus=stab[yMinusErrColumnName]
# Swap
if xToFit.dtype.byteorder == '>':
xToFit=xToFit.byteswap().newbyteorder('=')
stab.add_column(atpy.Column(xToFit, 'xToFit'))
stab.add_column(atpy.Column(xErrToFitPlus, 'xErrToFitPlus'))
stab.add_column(atpy.Column(xErrToFitMinus, 'xErrToFitMinus'))
stab.add_column(atpy.Column(yToFit, 'yToFit'))
stab.add_column(atpy.Column(yErrToFitPlus, 'yErrToFitPlus'))
stab.add_column(atpy.Column(yErrToFitMinus, 'yErrToFitMinus'))
# If we ever get around to fiddling with detection probabilities again, change this...
if 'detPColumnName' in settingsDict.keys():
if settingsDict['detPColumnName'] != 'detP':
stab.add_column(atpy.Column(stab[settingsDict['detPColumnName']], 'detP'))
#stab['detP']=np.ones(len(stab))
#stab['detP']=stab['detP'].byteswap().newbyteorder()
#IPython.embed()
#sys.exit()
else:
stab.add_column(atpy.Column([1.0]*len(stab), 'detP'))
if 'ignoreSelectionFunction' in settingsDict.keys() and settingsDict['ignoreSelectionFunction'] == True:
stab['detP']=np.ones(len(stab))
if settingsDict['symmetriseErrors'] == True:
xAvErr=(stab['xErrToFitPlus']+stab['xErrToFitMinus'])/2.0
yAvErr=(stab['yErrToFitPlus']+stab['yErrToFitMinus'])/2.0
stab['xErrToFitPlus']=xAvErr
stab['xErrToFitMinus']=xAvErr
stab['yErrToFitPlus']=yAvErr
stab['yErrToFitMinus']=yAvErr
# Histograms of redshift and x property distribution, one above the other
# Fiddle with this later...
#print "plots"
#IPython.embed()
#sys.exit()
#fontDict={'size': 16}
#cols=1
#pylab.figure(figsize=(6, 8*cols))
#pylab.subplots_adjust(0.1, 0.06, 0.97, 0.97, 0.03, 0.12)
#pylab.subplot(2, 1, 1)
#pylab.hist(stab['redshift'], bins = numpy.linspace(0.0, 1.5, 16), histtype = 'stepfilled', color =
#'#A0A0A0', ec = '#A0A0A0')
#pylab.xlabel("$z$", fontdict = fontDict)
#pylab.ylabel("N", fontdict = fontDict)
#pylab.ylim(0, 60)
#pylab.subplot(2, 1, 2)
#pylab.hist(stab['temp'], bins = numpy.linspace(0, 12, 13), histtype = 'stepfilled', color =
#'#A0A0A0', ec = '#A0A0A0')
#pylab.xlabel("$T$ (keV)", fontdict = fontDict)
#pylab.ylabel("N", fontdict = fontDict)
##pylab.yticks(ylocs, [""]*len(ylabels))
#pylab.ylim(0, 60)
#pylab.savefig(outDir+os.path.sep+"zT_histograms.pdf")
#pylab.close()
return stab
#-------------------------------------------------------------------------------------------------------------
def MCMCFit(settingsDict, tab):
"""My attempt at fitting using MCMC and maximum likelihood.
settingsDict = dictionary containing MCMC parameters and settings
You can choose whether to use the likelihood for 'bisector' or 'orthogonal' fitting using the 'method' key.
"""
# Can now swap axes for testing purposes
if 'swapAxes' in settingsDict.keys():
swapAxes=settingsDict['swapAxes']
else:
swapAxes=False
print "... swapAxes = ", swapAxes
# Choice of method
method=settingsDict['method']
if method == 'orthogonal':
likelihood=csr.fastOrthogonalLikelihood
variables=['A', 'B', 'C', 'S']
numFreePars=4
elif method == 'bisector':
likelihood=csr.fastBisectorLikelihood
variables=['A', 'B', 'C', 'Sx', 'Sy']
numFreePars=5
settingsDict['variables']=variables # A handy place to store this for cutting down code elsewhere
scales=[]
for v in variables:
scales.append(settingsDict['%sScale' % (v)])
# Start by writing this in python, but calling the likelihood function in cython
# MCMC parameters
numSamples=settingsDict['numSamples'] # Total number of random steps over likelihood surface
burnSamples=settingsDict['burnSamples'] # Throw away initial bunch of this many samples
thinning=settingsDict['thinning'] # Keep only every ith sample - good in some ways, bad in others
# Choice of evolution models
if settingsDict['evoModel'] == '1+z':
log10RedshiftEvo=np.log10(tab[settingsDict['redshiftColumnName']]+1)
elif settingsDict['evoModel'] == 'E(z)':
log10RedshiftEvo=np.log10(tab['E(z)'])
else:
raise Exception, "didn't understand evoModel '%s'" % (evoModel)
#log10RedshiftEvo=np.array(log10RedshiftEvo, dtype = float)
# To start with, we're going to use the same proposal distribution for everything
# But later on we could dig out the correlated random numbers code to generate random parameter values that
# satisfy the covariance we see between parameters, which would speed things up.
cPars=selectStartParsFromPriors(settingsDict)
#print "... starting values [A, B, C, S] = [%.2f, %.2f, %.2f, %.2f]" % (cA, cB, cC, cS)
# Byte swapping festival to keep cython happy
yToFit=byteSwapArr(tab['yToFit'])
yErrToFitPlus=byteSwapArr(tab['yErrToFitPlus'])
yErrToFitMinus=byteSwapArr(tab['yErrToFitMinus'])
xToFit=byteSwapArr(tab['xToFit'])
xErrToFitPlus=byteSwapArr(tab['xErrToFitPlus'])
xErrToFitMinus=byteSwapArr(tab['xErrToFitMinus'])
detP=byteSwapArr(tab['detP'])
# Another thing... fix this later properly... but if everything isn't same data type, cython falls over
yToFit=np.array(tab['yToFit'], dtype = np.float64)
yErrToFitPlus=np.array(tab['yErrToFitPlus'], dtype = np.float64)
yErrToFitMinus=np.array(tab['yErrToFitMinus'], dtype = np.float64)
xToFit=np.array(tab['xToFit'], dtype = np.float64)
xErrToFitPlus=np.array(tab['xErrToFitPlus'], dtype = np.float64)
xErrToFitMinus=np.array(tab['xErrToFitMinus'], dtype = np.float64)
log10RedshiftEvo=np.array(log10RedshiftEvo, dtype = np.float64)
detP=np.array(tab['detP'], dtype = np.float64)
if swapAxes == False:
try:
cProb, probArray=likelihood(cPars, yToFit, yErrToFitPlus, yErrToFitMinus, xToFit, xErrToFitPlus,
xErrToFitMinus, log10RedshiftEvo, detP)
except:
print "byte swapping problem?"
IPython.embed()
sys.exit()
else:
cProb, probArray=likelihood(cPars, xToFit, xErrToFitPlus, xErrToFitMinus, yToFit, yErrToFitPlus,
yErrToFitMinus, log10RedshiftEvo, detP)
if cProb == 0:
raise Exception, "initial position in MCMC chain has zero probability - change initial values/fiddle with priors in .par file?"
allPars=[] # == 'the Markov chain'
likelihoods=[]
# Metropolis-Hastings (actually just Metropolis since our candidate distribution is symmetric)
for k in range(numSamples):
# Progress update
tenPercent=numSamples/10
for j in range(0,11):
if k == j*tenPercent:
print "... "+str(j*10)+"% complete ..."
pPars=makeProposal(cPars, scales, settingsDict)
if swapAxes == False:
pProb, probArray=likelihood(pPars, yToFit, yErrToFitPlus, yErrToFitMinus, xToFit, xErrToFitPlus,
xErrToFitMinus, log10RedshiftEvo, detP)
else:
pProb, probArray=likelihood(pPars, xToFit, xErrToFitPlus, xErrToFitMinus, yToFit, yErrToFitPlus,
yErrToFitMinus, log10RedshiftEvo, detP)
if np.isinf(pProb) == True:
print "Hmm - infinite probability?"
IPython.embed()
sys.exit()
# Changed below because we're now dealing with log10 probabilities instead of the actual numbers
alpha=pProb-cProb
acceptProposal=False
if alpha > 0:
acceptProposal=True
else:
U=math.log10(np.random.uniform(0, 1))
if U <= alpha:
acceptProposal=True
# Our prior is uniform, so we're really just using it to force the answer into a range
# i.e. if it's not 1.0, then something has strayed out of the box.
pPrior=getPPrior(pPars, settingsDict)
if acceptProposal == True and pPrior == 1.0:
cPars=pPars
cProb=pProb
# Only keep samples after burning in and also thin as we go along
if k > burnSamples and k % thinning == 0:
# If we want to plot the trace (i.e. to check mixing) then we want to store these always in some fashion
# As it is, we're only keeping the ones that are drawn from the probability distributions
allPars.append(cPars)
likelihoods.append(pProb)
allPars=np.array(allPars)
likelihoods=np.array(likelihoods)
# If we swap axes, it's just easier to transform back into a form we know
if 'swapAxes' in settingsDict.keys() and settingsDict['swapAxes'] == True:
a=-allPars[:, 0]/allPars[:, 1]
b=1.0/allPars[:, 1]
allPars[:, 0]=a
allPars[:, 1]=b
# Gewerke test to check if the chain has converged
# If z < 2 then we're converged
index10Percent=int(len(allPars)*0.1)
index50Percent=int(len(allPars)*0.5)
mean10Percent=allPars[:index10Percent].mean(axis = 0)
mean50Percent=allPars[::-1][:index50Percent].mean(axis = 0)
var10Percent=allPars[:index10Percent].var(axis = 0)
var50Percent=allPars[::-1][:index50Percent].var(axis = 0)
zStatistic=(mean10Percent-mean50Percent)/np.sqrt(var10Percent+var50Percent)
zStatistic=np.nan_to_num(zStatistic)
# Zap entries in here that are fixed (avoids round off or div 0 making them look large when we don't care)
for i in range(len(variables)):
v=variables[i]
if settingsDict['%sFit' % (v)] == 'fixed':
zStatistic[i]=0.0
numFreePars=numFreePars-1
# Max likelihood values are simply the mean of the values in the probability distribution
# 1-sigma errors are similarly easy (could also use calc1SigmaError routine, but this is quicker)
resultsDict={}
for i in range(len(variables)):
v=variables[i]
resultsDict['%s' % (v)]=allPars[:, i].mean()
resultsDict['%sErr' % (v)]=calc68Percentile(allPars[:, i])
# Scott's translation of orthogonal scatter S into scatter in y-variable at fixed x-variable
if method == 'orthogonal':
s=allPars[:, 3]/np.cos(np.arctan(allPars[:, 1]))
resultsDict['s']=s.mean()
resultsDict['sErr']=calc68Percentile(s)
# We have numFreePars above
lnL=np.log(np.power(10, likelihoods))
resultsDict['AIC']=2*numFreePars-2*lnL.max()
resultsDict['AICc']=resultsDict['AIC']+(2*numFreePars*(numFreePars+1))/(float(len(tab))-numFreePars-1)
resultsDict['pars']=allPars
resultsDict['zStatistic']=zStatistic
# chi-sq
#yMod=(xToFit*resultsDict['B'])+resultsDict['A']+resultsDict['C']*log10RedshiftEvo
#chiSq=np.sum(np.power(yToFit-yMod, 2)/np.power(yErrToFitPlus, 2))
#resultsDict['chiSq']=chiSq
#print "check chiSq"
#IPython.embed()
#sys.exit()
return resultsDict
#-------------------------------------------------------------------------------------------------------------
def makeProposal(pars, scales, settingsDict):
"""Generates random set of parameters in format [A, B, C, S] for feeding into likelihood function.
Proposal distributions are assumed Gaussian with scales [AScale, BScale, CScale, SScale].
"""
# This makes sure that if we're testing by swapping axes, we can use the same prior scales
# To the same space as our scales
if 'swapAxes' in settingsDict.keys() and settingsDict['swapAxes'] == True:
b=1.0/pars[1]
a=-pars[0]/pars[1]
pars[0]=a
pars[1]=b
prop=np.random.normal(pars, scales)
# And back...
if 'swapAxes' in settingsDict.keys() and settingsDict['swapAxes'] == True:
b=1.0/prop[1]
a=-prop[0]/prop[1]
prop[0]=a
prop[1]=b
# Force scatters +ve
prop[3:]=abs(prop[3:])
if settingsDict['AFit'] == 'fixed':
prop[0]=settingsDict['A0']
if settingsDict['BFit'] == 'fixed':
prop[1]=settingsDict['B0']
if settingsDict['CFit'] == 'fixed':
prop[2]=settingsDict['C0']
if settingsDict['method'] == 'orthogonal':
if settingsDict['SFit'] == 'fixed':
prop[3]=settingsDict['S0']
elif settingsDict['method'] == 'bisector':
if settingsDict['SxFit'] == 'fixed':
prop[3]=settingsDict['Sx0']
if settingsDict['SyFit'] == 'fixed':
prop[4]=settingsDict['Sy0']
return prop
#-------------------------------------------------------------------------------------------------------------
def make1DProbDensityPlots(fitResults, settingsDict, outDir):
"""Makes 1D plots of probability density distributions
"""
sigmaScale=5.0
bins=30
variables=settingsDict['variables']
axes=range(len(variables))
# Individual plots
#for v, a in zip(variables, axes):
#if settingsDict['%sFit' % (v)] == 'free':
#x=np.linspace(fitResults['%s' % (v)]-sigmaScale*fitResults['%sErr' % (v)],
#fitResults['%s' % (v)]+sigmaScale*fitResults['%sErr' % (v)], bins)
#P1D=LTCythonMCMC.fast1DProbProjection(x, a, fitResults['pars'])
#make1DPlot(x, P1D, '%s' % (v), '%s = %.3f $\pm$ %.3f' % (v, fitResults['%s' % (v)], fitResults['%sErr' % (v)]),
#outDir+os.path.sep+"1DProb_%s.pdf" % (v))
# Make an uber plot with multiple panels
cols=0
for v, a in zip(variables, axes):
if settingsDict['%sFit' % (v)] == 'free':
cols=cols+1
plt.figure(figsize=(4.5*cols, 3.94))
plt.subplots_adjust(0.02, 0.12, 0.98, 0.92, 0.1, 0.1)
count=0
for v, a in zip(variables, axes):
if settingsDict['%sFit' % (v)] == 'free':
count=count+1
x=np.linspace(fitResults['%s' % (v)]-sigmaScale*fitResults['%sErr' % (v)],
fitResults['%s' % (v)]+sigmaScale*fitResults['%sErr' % (v)], bins)
P1D=csr.fast1DProbProjection(x, a, fitResults['pars'])
P1D=P1D/P1D.max()
plt.subplot(1, cols, count)
ax=plt.gca()
y=P1D
fitLabel='%s = %.3f $\pm$ %.3f' % (v, fitResults['%s' % (v)], fitResults['%sErr' % (v)])
xLabel='%s' % (v)
plt.plot(x, y, 'k-', label = fitLabel)
plt.xlabel(xLabel, fontdict = {'size': 14})
plt.ylabel("")
plt.yticks([], [])
ax.xaxis.set_major_locator(matplotlib.ticker.MaxNLocator(6))
plt.ylim(0, 1.2)
leg=plt.legend(prop = {'size': 12})
leg.draw_frame(False)
plt.draw()
plt.savefig(outDir+os.path.sep+"1DProb_allPars.pdf")
plt.close()
#-------------------------------------------------------------------------------------------------------------
def make1DPlot(x, y, xLabel, fitLabel, outFileName):
"""Actually makes the 1D probability plots
"""
plt.plot(x, y, label = fitLabel)
plt.xlabel(xLabel)
plt.ylabel("")
plt.legend()
plt.savefig(outFileName)
plt.close()
#-------------------------------------------------------------------------------------------------------------
def makeContourPlots(fitResults, outDir, sampleLabel):
"""This takes fit results and turns it into contour plots.
"""
mlA, mlAErr=fitResults['A'], fitResults['AErr']
mlB, mlBErr=fitResults['B'], fitResults['BErr']
mlC, mlCErr=fitResults['C'], fitResults['CErr']
mlS, mlSErr=fitResults['S'], fitResults['SErr']
pars=fitResults['pars']
# Make 2d contour plots of valid combinations, determined by if they have a non null 1 sigma error
As=np.linspace(mlA-5.0*mlAErr-math.fmod(mlA-5.0*mlAErr, 0.1), mlA+7.0*mlAErr-math.fmod(mlA+7.0*mlAErr, 0.1), 81)
Bs=np.linspace(mlB-5.0*mlBErr-math.fmod(mlB-5.0*mlBErr, 0.1), mlB+7.0*mlBErr-math.fmod(mlB+7.0*mlBErr, 0.1), 81)
Cs=np.linspace(mlC-5.0*mlCErr-math.fmod(mlC-5.0*mlCErr, 0.1), mlC+7.0*mlCErr-math.fmod(mlC+7.0*mlCErr, 0.1), 81)
Ss=np.linspace(mlS-5.0*mlSErr-math.fmod(mlS-5.0*mlSErr, 0.05), mlS+7.0*mlSErr-math.fmod(mlS+7.0*mlSErr, 0.05), 81)
if mlAErr > 0 and mlBErr > 0:
outFileName=outDir+os.path.sep+"contours_AvB_"+sampleLabel+".pdf"
PDist2D=csr.fast2DProbProjection(As, Bs, 0, 1, pars)
astImages.saveFITS(outFileName.replace(".pdf", ".fits"), PDist2D, None)
probContourPlot(As, Bs, "A", "B", 0.1, 0.1, mlA, mlB, mlAErr, mlBErr, PDist2D, outFileName)
if mlAErr > 0 and mlCErr > 0:
outFileName=outDir+os.path.sep+"contours_AvC_"+sampleLabel+".pdf"
PDist2D=csr.fast2DProbProjection(As, Cs, 0, 2, pars)
probContourPlot(As, Cs, "A", "C", 0.1, 0.5, mlA, mlC, mlAErr, mlCErr, PDist2D, outFileName)
astImages.saveFITS(outFileName.replace(".pdf", ".fits"), PDist2D, None)
if mlAErr > 0 and mlSErr > 0:
outFileName=outDir+os.path.sep+"contours_AvS_"+sampleLabel+".pdf"
PDist2D=csr.fast2DProbProjection(As, Ss, 0, 3, pars)
probContourPlot(As, Ss, "A", "S", 0.1, 0.05, mlA, mlS, mlAErr, mlSErr, PDist2D, outFileName)
astImages.saveFITS(outFileName.replace(".pdf", ".fits"), PDist2D, None)
if mlBErr > 0 and mlCErr > 0:
outFileName=outDir+os.path.sep+"contours_BvC_"+sampleLabel+".pdf"
PDist2D=csr.fast2DProbProjection(Bs, Cs, 1, 2, pars)
probContourPlot(Bs, Cs, "B", "C", 0.1, 0.5, mlB, mlC, mlBErr, mlCErr, PDist2D, outFileName)
astImages.saveFITS(outFileName.replace(".pdf", ".fits"), PDist2D, None)
#-------------------------------------------------------------------------------------------------------------
def probContourPlot(par1Values, par2Values, par1Label, par2Label, par1TickStep, par2TickStep, mlPar1, mlPar2,
mlPar1Err, mlPar2Err, PDist2D, outFileName):
"""Make a 2d contour plot of probability surface of given parameters.
par1Values = values for parameter 1 (plotted on Y axis)
par2Values = values for parameter 2 (plotted on X axis)
par1Label = text label for Y axis
par2Label = text label for X axis
par1TickStep = tick step along Y axis
par2TickStep = tick step along X axis
mlPar1 = maximum likelihood value for parameter 1
mlPar2 = maximum likelihood value for parameter 2
mlPar1Err = 1d 1-sigma error in parameter 1
mlPar2Err = 1d 1-sigma error in parameter 2
PDist2D = 2d likelihood surface, made using fast2DProbProjection
"""
tck1=interpolate.splrep(par1Values, np.arange(par1Values.shape[0]))
par1TickLabels=np.arange(par1Values.min(), par1Values.max(), par1TickStep)
par1TickIndices=interpolate.splev(par1TickLabels, tck1)
plt.yticks(par1TickIndices, par1TickLabels)
tck2=interpolate.splrep(par2Values, np.arange(par2Values.shape[0]))
par2TickLabels=np.arange(par2Values.min(), par2Values.max(), par2TickStep)
par2TickIndices=interpolate.splev(par2TickLabels, tck2)
plt.xticks(par2TickIndices, par2TickLabels)
# We have to smooth to get decent looking contours
# Gaussian smoothing preserves the normalisation
# NOTE: smoothing only needed if very fine grid
PDist2D=ndimage.gaussian_filter(PDist2D, 1)
# Work out where to put contours
sigma1Level=calc2DProbThreshold(PDist2D, 0.683)
sigma2Level=calc2DProbThreshold(PDist2D, 0.95)
plt.contour(PDist2D, [sigma1Level, sigma2Level], colors = 'b')
# Save plot - trim down area first (?) and add axes labels
plt.plot(interpolate.splev(mlPar2, tck2), interpolate.splev(mlPar1, tck1), 'r*',
label = "%s = %.2f $\pm$ %.2f, %s = %.2f $\pm$ %.2f" % (par1Label, mlPar1, mlPar1Err, par2Label, mlPar2, mlPar2Err))
plt.legend(numpoints = 1)
plt.xlabel(par2Label)
plt.ylabel(par1Label)
if outFileName != None:
plt.savefig(outFileName)
plt.close()
#-------------------------------------------------------------------------------------------------------------
def calc1SigmaError(par1d, prob1d, mlParValue):
"""Calculates 1d 1-sigma error on a parameter (marginalised, is the word I'm looking for I think) relative
to the maximum likelihood value.
NOTE: Now we're using MCMC, the regular calc68Percentile routine below works just fine, and is quicker
than this.
"""
norm=np.trapz(prob1d, par1d)
prob1d=prob1d/norm
tckPDist=interpolate.splrep(par1d, prob1d)
target=0.683 # 1 sigma
dRange=np.linspace(0.0, par1d.max()-mlParValue, 1000) # we need to wok out how to choose sensible values
bestDiff=1e6
dBest=1e6
for d in dRange:
integrationRange=np.linspace(mlParValue-d, mlParValue+d, 1000)
diff=abs(target-np.trapz(interpolate.splev(integrationRange, tckPDist), integrationRange))
if diff < bestDiff:
bestDiff=diff
dBest=d
return dBest
#-------------------------------------------------------------------------------------------------------------
def calc2DProbThreshold(PDist2D, probThresh):
"""Calculates threshold probability per pixel in PDist2D needed to draw confidence contours at e.g.
1-sigma, 2-sigma level
"""
p=PDist2D.flatten()
p.sort()
p=p[::-1]
pCumSum=p.cumsum()
diff=abs(pCumSum-probThresh)
pIndex=diff.tolist().index(diff.min())
pLevel=p[pIndex]
return pLevel
#------------------------------------------------------------------------------------------------------------
def calc68Percentile(arr):
"""Calculates the 68-percentile (i.e. equivalent to 1-sigma error) from an array.
"""
res=np.abs(arr-np.median(arr))
res=np.sort(res)
index=int(round(0.683*arr.shape[0]))
try:
err=res[index]
except:
print "index error?"
IPython.embed()
sys.exit()
return err
#-------------------------------------------------------------------------------------------------------------
def makeScalingRelationPlot(sampleTab, fitResults, outDir, sampleDict, settingsDict):
"""Make a scaling relation plot.
sampleDict = the dictionary defining the sample (e.g. min z, max z etc.)
"""
# Stuff we need from settings...
xColumnName=settingsDict['xColumnName']
xPlusErrColumnName=settingsDict['xPlusErrColumnName']
xMinusErrColumnName=settingsDict['xMinusErrColumnName']
yColumnName=settingsDict['yColumnName']
yPlusErrColumnName=settingsDict['yPlusErrColumnName']
yMinusErrColumnName=settingsDict['yMinusErrColumnName']
xPivot=settingsDict['xPivot']
xTakeLog10=settingsDict['xTakeLog10']
yTakeLog10=settingsDict['yTakeLog10']
redshiftColumnName=settingsDict['redshiftColumnName']
xScaleFactor=settingsDict['xScaleFactor']
yScaleFactor=settingsDict['yScaleFactor']
yScaleFactorPower=settingsDict['yScaleFactorPower']
# The plot
plt.figure(figsize=(10, 10))
plt.axes([0.1, 0.1, 0.85, 0.85])
if yScaleFactor != None:
yPlot=np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yColumnName]
yPlotErrs=np.array([np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yMinusErrColumnName],
np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yPlusErrColumnName]])
else:
yPlot=sampleTab[yColumnName]
yPlotErrs=np.array([sampleTab[yMinusErrColumnName],
sampleTab[yPlusErrColumnName]])
plt.errorbar(sampleTab[xColumnName], yPlot,
yerr = yPlotErrs,
xerr = np.array([sampleTab[xMinusErrColumnName],
sampleTab[xPlusErrColumnName]]),
fmt = 'kD', mec = 'k', label = sampleDict['label']+" (N=%d)" % (len(sampleTab)))
if xTakeLog10 == True and yTakeLog10 == True:
plt.loglog()
elif xTakeLog10 == True and yTakeLog10 == False:
plt.semilogx()
elif xTakeLog10 == False and yTakeLog10 == True:
plt.semilogy()
#cmdata=np.outer(np.linspace(0, 1, 10), np.linspace(0, 1, 10)) # to easily make a colorbar 0-1
#cmim=plt.imshow(cmdata, cmap = "gray")
#ax=plt.axes([0.1, 0.17, 0.85, 0.78])
if np.sum(np.equal(sampleTab['detP'], 1.0)) == len(sampleTab):
shadeByDetP=False
else:
shadeByDetP=True
if shadeByDetP == True:
for row, pY in zip(sampleTab, yPlot):
plt.plot(row[xColumnName], [pY], 'D', color = (row['detP'], row['detP'], row['detP']))
plotRange=np.linspace(settingsDict['xPlotMin'], settingsDict['xPlotMax'], 100)
if xTakeLog10 == True and yTakeLog10 == True:
yFit=settingsDict['yPivot']*np.power(10, fitResults['A'])*np.power((plotRange/xPivot), fitResults['B'])
elif xTakeLog10 == False and yTakeLog10 == False:
yFit=settingsDict['yPivot']*(fitResults['A']+fitResults['B']*(plotRange/xPivot))
else:
raise Exception, "add semilogx, semilogy fit line code"
if xPivot != 1.0:
fitLabel='%s (%s) = 10$^{%.2f \pm %.2f}$ (%s/%.1f %s)$^{%.2f \pm %.2f}$' % (settingsDict['yPlotLabel'], settingsDict['yPlotLabelUnits'], fitResults['A'], fitResults['AErr'], settingsDict['xPlotLabel'], xPivot, settingsDict['xPlotLabelUnits'], fitResults['B'], fitResults['BErr'])
else:
fitLabel='%s (%s) = 10$^{%.2f \pm %.2f}$ (%s)$^{%.2f \pm %.2f}$' % (settingsDict['yPlotLabel'], settingsDict['yPlotLabelUnits'], fitResults['A'], fitResults['AErr'], settingsDict['xPlotLabel'], fitResults['B'], fitResults['BErr'])
yLabel="%s (%s)" % (settingsDict['yPlotLabel'], settingsDict['yPlotLabelUnits'])
if settingsDict['yScaleFactor'] == "E(z)":
fitLabel="$E^{%d}(z)$ " % (settingsDict['yScaleFactorPower'])+fitLabel
yLabel="$E^{%d}(z)$ " % (settingsDict['yScaleFactorPower'])+yLabel
plt.plot(plotRange, yFit, 'b--', label = fitLabel)
## Below is just diagnostic
#if sampleLabel == 'REXCESS':
#prattLabel='$L_{\sf X}$ (erg s$^{-1}$) = 10$^{44.85 \pm 0.06}$ ($T/5.0$ keV)$^{3.35 \pm 0.32}$'
#prattLabel="$E^{-1}(z)$ "+prattLabel
#prattLabel="P09: "+prattLabel
#prattLX=np.power(10, 44.85)*np.power((plotRange/5.0), 3.35)
#plt.plot(plotRange, prattLX, 'r:', label = prattLabel)
#sample['plotLabel']=""
plt.ylabel(yLabel, size = 16)
plt.xlabel("%s (%s)" % (settingsDict['xPlotLabel'], settingsDict['xPlotLabelUnits']), size = 16)
plt.xlim(settingsDict['xPlotMin'], settingsDict['xPlotMax'])
plt.ylim(settingsDict['yPlotMin'], settingsDict['yPlotMax'])
if settingsDict['showPlotLegend'] == True:
leg=plt.legend(loc = 'upper left', prop = {'size': 16}, scatterpoints = 1, numpoints = 1)
leg.draw_frame(False)
plt.draw()
ax=plt.gca()
plt.text(0.95, 0.05, sampleDict['plotLabel'], ha = 'right', va = 'center', transform = ax.transAxes,
fontdict = {"size": 16, "linespacing" : 1.2, 'family': 'serif'})
outFileName=outDir+os.path.sep+"scalingRelation_%s_%s.pdf" % (yColumnName, xColumnName)
plt.savefig(outFileName)
plt.close()
#-------------------------------------------------------------------------------------------------------------
def makeScalingRelationPlot_ABC(sampleTab, fitResults, outDir, sampleDict, settingsDict, mode = 'normal'):
"""Make a scaling relation plot with y values scaling by normalisation and z evolution.
sampleDict = the dictionary defining the sample (e.g. min z, max z etc.)
"""
# Stuff we need from settings...
xColumnName=settingsDict['xColumnName']
xPlusErrColumnName=settingsDict['xPlusErrColumnName']
xMinusErrColumnName=settingsDict['xMinusErrColumnName']
yColumnName=settingsDict['yColumnName']
yPlusErrColumnName=settingsDict['yPlusErrColumnName']
yMinusErrColumnName=settingsDict['yMinusErrColumnName']
xPivot=settingsDict['xPivot']
xTakeLog10=settingsDict['xTakeLog10']
yTakeLog10=settingsDict['yTakeLog10']
redshiftColumnName=settingsDict['redshiftColumnName']
xScaleFactor=settingsDict['xScaleFactor']
yScaleFactor=settingsDict['yScaleFactor']
yScaleFactorPower=settingsDict['yScaleFactorPower']
# The plot...
if yScaleFactor != None:
yPlot=np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yColumnName]
yPlotErrs=np.array([np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yMinusErrColumnName],
np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yPlusErrColumnName]])
else:
yPlot=sampleTab[yColumnName]
yPlotErrs=np.array([sampleTab[yMinusErrColumnName],
sampleTab[yPlusErrColumnName]])
fitLabel='%s = 10$^{%.2f \pm %.2f}$ (%s/%d)$^{%.2f \pm %.2f}$' % (settingsDict['yPlotLabel'], fitResults['A'], fitResults['AErr'], settingsDict['xPlotLabel'], xPivot, fitResults['B'], fitResults['BErr'])
yLabel="%s (%s)" % (settingsDict['yPlotLabel'], settingsDict['yPlotLabelUnits'])
if settingsDict['evoModel'] == '1+z':
yPlot=np.power(sampleTab[redshiftColumnName]+1, -fitResults['C'])*yPlot
yPlotErrs=np.power(sampleTab[redshiftColumnName]+1, -fitResults['C'])*yPlotErrs
fitLabel=fitLabel+' (1+$z$)$^{%s}$' % (fitResults['plotLabel_C'])
yLabel=yLabel.replace("(%s)" % (settingsDict['yPlotLabelUnits']), "(1+$z$)$^{%.1f}$ (%s)" % (-1*fitResults['C'], settingsDict['yPlotLabelUnits']))
elif settingsDict['evoModel'] == 'E(z)':
yPlot=np.power(sampleTab['E(z)'], -fitResults['C'])*yPlot
yPlotErrs=np.power(sampleTab['E(z)'], -fitResults['C'])*yPlotErrs
fitLabel=fitLabel+' $E(z)^{%s}$' % (fitResults['plotLabel_C'])
yLabel=yLabel.replace("(%s)" % (settingsDict['yPlotLabelUnits']), "$E(z)^{%.1f}$ (%s)" % (-1*fitResults['C'], settingsDict['yPlotLabelUnits']))
if settingsDict['yScaleFactor'] == "E(z)":
fitLabel="$E^{%d}(z)$ " % (settingsDict['yScaleFactorPower'])+fitLabel
yLabel="$E^{%d}(z)$ " % (settingsDict['yScaleFactorPower'])+yLabel
if mode == 'normal':
plt.figure(figsize=(8, 8))
ax=plt.axes([0.11, 0.1, 0.86, 0.85])
plotRange=np.linspace(0.1*sampleTab[xColumnName].min(), 10*sampleTab[xColumnName].max(), 100)
yFit=np.power(10, fitResults['A'])*np.power((plotRange/xPivot), fitResults['B'])
plt.plot(plotRange, yFit, 'b--', label = fitLabel)
outFileName=outDir+os.path.sep+"scalingRelation_%s_%s_ABC.pdf" % (settingsDict['yColumnName'], settingsDict['xColumnName'])
# Old
#plt.errorbar(sampleTab['temp'], plotLXs,
#yerr = plotLXErrs,
#xerr = np.array([sampleTab['temp_min'],
#sampleTab['temp_max']]),
#fmt = 'kD', mec = 'k', label = sampleLabel+" (N=%d)" % (len(sampleTab)))
# New (coding by redshift)
zBins=[[0.0, 0.25], [0.25, 0.5], [0.5, 1.5]]
labels=["0.0 < $z$ < 0.25", "0.25 < $z$ < 0.5", "0.5 < $z$ < 1.5"]
#colours=['k', [0.5, 0, 1], [1, 0.5, 0]]
colours=['k', 'c', 'r']
symbols=['D', 'o', '^']
for zBin, col, s, l in zip(zBins, colours, symbols, labels):
mask=np.logical_and(np.greater(sampleTab[redshiftColumnName], zBin[0]), np.less_equal(sampleTab[redshiftColumnName], zBin[1]))
plt.errorbar(sampleTab[xColumnName][mask], yPlot[mask],
yerr = yPlotErrs[:, mask],
xerr = np.array([sampleTab[xMinusErrColumnName][mask],
sampleTab[xPlusErrColumnName][mask]]),
fmt = s, ecolor = col, mfc = col, mec = col, label = l)
elif mode == 'PDetCoded':
plotRange=np.linspace(0.1, 22.0, 100)
fitLXs=np.power(10, fitResults['A'])*np.power((plotRange/pivotT), fitResults['B'])
#fitLabel='$L_{\sf X}$ (erg s$^{-1}$) = 10$^{%.2f \pm %.2f}$ ($T/%.1f$ keV)$^{%.2f \pm %.2f}$ (1+$z$)$^{%.2f \pm %.2f}$' % (fitResults['A'], fitResults['AErr'], pivotT, fitResults['B'], fitResults['BErr'], fitResults['C'], fitResults['CErr'])
plt.plot(plotRange, fitLXs, 'b--', label = fitLabel)
outFileName=outDir+os.path.sep+"L-T_ABC_PDetCoded.pdf"
plt.figure(figsize=(8, 8))
plt.axes([0.5, 0.5, 0.1, 0.1])
cmdata=np.outer(np.linspace(0, 1, 10), np.linspace(0, 1, 10)) # to easily make a colorbar 0-1
cmim=plt.imshow(cmdata, cmap = "gray")
ax=plt.axes([0.1, 0.17, 0.85, 0.78])
for row, pLX in zip(sampleTab, plotLXs):
plt.plot(row['temp'], [pLX], 'D', color = (row['detP'], row['detP'], row['detP']))
cmax=plt.axes([0.1, 0.075, 0.85, 0.1], frameon=False)
plt.xticks([], [])
plt.yticks([], [])
plt.colorbar(cmim, orientation = 'v', aspect = 40.0)
plt.figtext(0.52, 0.03, "P$_{\sf det}$", va = 'center', ha = 'center')
plt.axes(ax)
else:
raise Exception, "didn't understand mode"
plt.loglog()
plt.ylabel(yLabel, size = 16)
plt.xlabel("%s (%s)" % (settingsDict['xPlotLabel'], settingsDict['xPlotLabelUnits']), size = 16)
plt.xlim(settingsDict['xPlotMin'], settingsDict['xPlotMax'])
plt.ylim(settingsDict['yPlotMin'], settingsDict['yPlotMax'])
#leg=plt.legend(loc = 'upper left', prop = {'size': 16}, scatterpoints = 1, numpoints = 1)
#leg.draw_frame(False)
plt.draw()
ax=plt.gca()
plt.text(0.95, 0.05, sampleDict['plotLabel'], ha = 'right', va = 'center', transform = ax.transAxes,
fontdict = {"size": 16, "linespacing" : 1.2, 'family': 'serif'})
plt.savefig(outFileName)
plt.close()
#-------------------------------------------------------------------------------------------------------------
def makeScalingRelationPlots_sideBySide(sampleDefs, outDir, settingsDict):
"""Makes side by side subpanel plots of all the scaling relations in sampleDefs
"""
# Stuff we need from settings...
xColumnName=settingsDict['xColumnName']
xPlusErrColumnName=settingsDict['xPlusErrColumnName']
xMinusErrColumnName=settingsDict['xMinusErrColumnName']
yColumnName=settingsDict['yColumnName']
yPlusErrColumnName=settingsDict['yPlusErrColumnName']
yMinusErrColumnName=settingsDict['yMinusErrColumnName']
xPivot=settingsDict['xPivot']
xTakeLog10=settingsDict['xTakeLog10']
yTakeLog10=settingsDict['yTakeLog10']
redshiftColumnName=settingsDict['redshiftColumnName']
xScaleFactor=settingsDict['xScaleFactor']
yScaleFactor=settingsDict['yScaleFactor']
yScaleFactorPower=settingsDict['yScaleFactorPower']
# Make an uber plot with multiple panels
# NOTE: add adjustable layout later...
cols=len(sampleDefs)
plt.figure(figsize=(6*cols, 6))
plt.subplots_adjust(0.05, 0.1, 0.99, 0.99, 0.02, 0.02)
count=0
for s in sampleDefs:
sampleTab=s['stab']
fitResults=s['fitResults']
count=count+1
plt.subplot(1, cols, count)
if yScaleFactor != None:
yPlot=np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yColumnName]
yPlotErrs=np.array([np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yMinusErrColumnName],
np.power(sampleTab['E(z)'], yScaleFactorPower)*sampleTab[yPlusErrColumnName]])
else:
yPlot=sampleTab[yColumnName]
yPlotErrs=np.array([sampleTab[yMinusErrColumnName],
sampleTab[yPlusErrColumnName]])
plt.errorbar(sampleTab[xColumnName], yPlot,
yerr = yPlotErrs,
xerr = np.array([sampleTab[xMinusErrColumnName],
sampleTab[xPlusErrColumnName]]),
fmt = 'kD', mec = 'k', label = s['label']+" (N=%d)" % (len(sampleTab)))
plt.loglog()
plotRange=np.linspace(0.1*sampleTab[xColumnName].min(), 10*sampleTab[xColumnName].max(), 100)
yFit=settingsDict['yPivot']*np.power(10, fitResults['A'])*np.power((plotRange/xPivot), fitResults['B'])
fitLabel='%s (%s) = 10$^{%.2f \pm %.2f}$ (%s/%.1f %s)$^{%.2f \pm %.2f}$' % (settingsDict['yPlotLabel'], settingsDict['yPlotLabelUnits'], fitResults['A'], fitResults['AErr'], settingsDict['xPlotLabel'], xPivot, settingsDict['xPlotLabelUnits'], fitResults['B'], fitResults['BErr'])
yLabel="%s (%s)" % (settingsDict['yPlotLabel'], settingsDict['yPlotLabelUnits'])
if settingsDict['yScaleFactor'] == "E(z)":
fitLabel="$E^{%d}(z)$ " % (settingsDict['yScaleFactorPower'])+fitLabel
yLabel="$E^{%d}(z)$ " % (settingsDict['yScaleFactorPower'])+yLabel
plt.plot(plotRange, yFit, 'b--', label = fitLabel)
plt.ylabel(yLabel, size = 16)
plt.xlabel("%s (%s)" % (settingsDict['xPlotLabel'], settingsDict['xPlotLabelUnits']), size = 16)
ax=plt.gca()
plt.text(0.95, 0.05, s['plotLabel'], ha = 'right', va = 'center', transform = ax.transAxes,
fontdict = {"size": 16, "linespacing" : 1.2, 'family': 'serif'})
if count > 1:
ylocs, ylabels=plt.yticks()
plt.ylabel("")
plt.yticks(ylocs, [""]*len(ylabels))
plt.xlim(settingsDict['xPlotMin'], settingsDict['xPlotMax'])
plt.ylim(settingsDict['yPlotMin'], settingsDict['yPlotMax'])
outFileName=outDir+os.path.sep+"scalingRelation_multiPlot_%s_%s.pdf" % (yColumnName, xColumnName)
plt.savefig(outFileName)
plt.close()
#-------------------------------------------------------------------------------------------------------------
def makeRoundedPlotLabelStrings(fitResults, variables, numSigFig = 1):
"""Add plot labels to fitResults, to given number of sig fig, taking care of rounding
NOTE: disabled the rounding for now
"""
# Not rounding, just dp not sf
dps=[2, 2, 1, 3, 3]
for p, dp in zip(variables, dps):
if fitResults['%sErr' % (p)] != 0:
fmt="%."+str(dp)+"f"
valStr=fmt % (fitResults['%s' % (p)])
errStr=fmt % (fitResults['%sErr' % (p)])
fitResults['plotLabel_%s' % (p)]="%s \pm %s" % (valStr, errStr)
#-------------------------------------------------------------------------------------------------------------
def makeNormEvoPlot(stab, fitResults, outDir, settingsDict):
"""Makes plot of evolution of the normalisation.
"""
zs=np.linspace(0, 2.0, 100)
Ez=[]
for z in zs:
Ez.append(astCalc.Ez(z))
Ez=np.array(Ez)
plt.figure(figsize=(8,6))
plt.axes([0.13, 0.1, 0.85, 0.86])
xColumnName=settingsDict['xColumnName']
yColumnName=settingsDict['yColumnName']
redshiftColumnName=settingsDict['redshiftColumnName']
yLabel="%s / %s$_{Fit (z=0)}$" % (settingsDict['yPlotLabel'], settingsDict['yPlotLabel'])
# If we have applied E(z)^{some power}, we want to plot that expected scaling,
# as well as a null line for no evolution
if settingsDict['yScaleFactor'] == 'E(z)':
dataNormalisation=((np.power(stab['E(z)'], settingsDict['yScaleFactorPower'])*stab[yColumnName])/np.power(stab[xColumnName]/settingsDict['xPivot'], fitResults['B']))/np.power(10, fitResults['A'])
nullLine=np.power(Ez, settingsDict['yScaleFactorPower']) # because E(z)^{some power} is flat in this form, null line is not
yScalingLine=np.ones(len(Ez)) # because we've scaled it out it's flat
yLabel="($E^{-1}(z)$ %s) / %s$_{Fit (z=0)}$" % (settingsDict['yPlotLabel'], settingsDict['yPlotLabel'])
else:
dataNormalisation=(stab[yColumnName]/np.power(stab[xColumnName]/settingsDict['xPivot'], fitResults['B']))/np.power(10, fitResults['A'])
nullLine=np.zeros(len(Ez))
yScalingLine=None
yLabel="%s / %s$_{Fit (z=0)}$" % (settingsDict['yPlotLabel'], settingsDict['yPlotLabel'])
dataLabel='%s$_{Fit (z=0)}$ = (%s/%d)$^{%.2f}$ / 10$^{%.2f}$' % (settingsDict['yPlotLabel'], settingsDict['xPlotLabel'], settingsDict['xPivot'], fitResults['B'], fitResults['A'])
if settingsDict['yScaleFactor'] == 'E(z)':
# Look for fractions
if settingsDict['yScaleFactorPower'] == -1:
yScalingLineLabel='$E(z)$'
elif abs(settingsDict['yScaleFactorPower']) == 2/3.0:
yScalingLineLabel='$E(z)$'
powerFactor=settingsDict['yScaleFactorPower']
# Need to swap power, remember we scaled these out...
if powerFactor > 0:
yScalingLineLabel=yScalingLineLabel+"$^{-2/3}$"
else:
yScalingLineLabel=yScalingLineLabel+"$^{2/3}$"
else:
print "yScalingLineLabel fraction handling?"
IPython.embed()
sys.exit()
plt.plot(stab[redshiftColumnName], dataNormalisation, 'kD', label = dataLabel)
if np.any(yScalingLine) != None:
plt.plot(zs, yScalingLine, 'b--', label = yScalingLineLabel, lw = 2)
plt.plot(zs, nullLine, 'g-.', label = 'no evolution', lw = 2)
if settingsDict['evoModel'] == '1+z':
plt.plot(zs, np.power(1+zs, fitResults['C']), 'r', lw = 2, label = '(1+z)$^{%.2f \pm %.2f}$' % (fitResults['C'], fitResults['CErr']))
shadedX=np.linspace(0, 2.0, 100)
shadedYPlus=np.power(shadedX+1, fitResults['C']+fitResults['CErr'])
shadedYMinus=np.power(shadedX+1, fitResults['C']-fitResults['CErr'])
elif settingsDict['evoModel'] == 'E(z)':
plt.plot(zs, np.power(Ez, fitResults['C']), 'r', lw = 2, label = '$E(z)^{%.2f \pm %.2f}$' % (fitResults['C'], fitResults['CErr']))
shadedX=np.linspace(0, 2.0, len(Ez))
shadedYPlus=np.power(Ez, fitResults['C']+fitResults['CErr'])
shadedYMinus=np.power(Ez, fitResults['C']-fitResults['CErr'])
if fitResults['C'] < 0:
loc="upper right"
else:
loc="lower left"
leg=plt.legend(loc = loc, prop = {'size': 14}, numpoints = 1)
leg.draw_frame(False)
plt.draw()
plt.xlabel("$z$", fontdict = {'size': 20})
plt.ylabel(yLabel, fontdict = {'size': 20})
xs=shadedX.tolist()+shadedX[::-1].tolist()
ys=shadedYPlus.tolist()+shadedYMinus[::-1].tolist()
plt.fill(xs, ys, 'b', alpha=0.2, edgecolor='none', label = "None", lw = 0.1)
plt.semilogy()
#plt.loglog()
plt.xlim(0, 1.6)
plt.ylim(1e-2, 1e2)
plt.savefig(outDir+os.path.sep+"normEvo_%s_%s.pdf" % (yColumnName, xColumnName))
plt.close()
#-------------------------------------------------------------------------------------------------------------
def makePaperContourPlots(fitResults, parDict, outDir):
"""Special case of plots, for 4 parameter fits, for the paper.
"""
if 'S' not in fitResults.keys():
print "... using bisector method - 2D contour plots disabled ..."
return None
mlA, mlAErr=fitResults['A'], fitResults['AErr']
mlB, mlBErr=fitResults['B'], fitResults['BErr']
mlC, mlCErr=fitResults['C'], fitResults['CErr']
mlS, mlSErr=fitResults['S'], fitResults['SErr']
pars=fitResults['pars']
# We only want to go on if we have a full set...
if mlAErr == 0 or mlBErr == 0 or mlCErr == 0 or mlSErr == 0:
return None
plt.figure(figsize=(10, 10))
plt.subplots_adjust(0.08, 0.07, 0.97, 0.97, 0.0, 0.0)
# Make 2d contour plots of valid combinations, determined by if they have a non null 1 sigma error
# NOTE: here steps have to be smaller than AStep, BStep, CStep, SStep below
# NOTE: any strange numbers in here are fiddling to get non-overlapping plot labels
As=np.linspace(mlA-5.0*mlAErr-math.fmod(mlA-5.0*mlAErr, 0.1), mlA+5.0*mlAErr-math.fmod(mlA+5.0*mlAErr, 0.1), 81)
Bs=np.linspace(mlB-5.0*mlBErr-math.fmod(mlB-5.0*mlBErr, 0.1), mlB+5.0*mlBErr-math.fmod(mlB+5.0*mlBErr, 0.1), 81)
Cs=np.linspace(mlC-5.0*mlCErr-math.fmod(mlC-5.0*mlCErr, 0.1), mlC+5.0*mlCErr-math.fmod(mlC+5.0*mlCErr, 0.1), 81)
Ss=np.linspace(mlS-5.0*mlSErr-math.fmod(mlS-5.0*mlSErr, 0.01), mlS+5.0*mlSErr-math.fmod(mlS+5.0*mlSErr, 0.01), 81)
# Steps for tick label plotting adjustment
AStep=0.2
BStep=0.4
CStep=1.0
SStep=0.02
# Bottom row
# AB
plt.subplot(4, 4, 15)
PDist2D=csr.fast2DProbProjection(As, Bs, 0, 1, pars)
probContourPlot_subPlot(As, Bs, "A", "B", AStep, BStep, mlA, mlB, mlAErr, mlBErr, PDist2D, noYLabels = True)
# AC
plt.subplot(4, 4, 14)
PDist2D=csr.fast2DProbProjection(As, Cs, 0, 2, pars)
probContourPlot_subPlot(As, Cs, "A", "C", AStep, CStep, mlA, mlC, mlAErr, mlCErr, PDist2D, noYLabels = True)
# AS
plt.subplot(4, 4, 13)
PDist2D=csr.fast2DProbProjection(As, Ss, 0, 3, pars)
probContourPlot_subPlot(As, Ss, "A", "S", AStep, SStep, mlA, mlS, mlAErr, mlSErr, PDist2D)
# Middle row
# BC
plt.subplot(4, 4, 10)
PDist2D=csr.fast2DProbProjection(Bs, Cs, 1, 2, pars)
probContourPlot_subPlot(Bs, Cs, "B", "C", BStep, CStep, mlB, mlC, mlBErr, mlCErr, PDist2D, noXLabels = True, noYLabels = True)
# BS
plt.subplot(4, 4, 9)
PDist2D=csr.fast2DProbProjection(Bs, Ss, 1, 3, pars)
probContourPlot_subPlot(Bs, Ss, "B", "S", BStep, SStep, mlB, mlS, mlBErr, mlSErr, PDist2D, noXLabels = True)
# Top row
# CS
plt.subplot(4, 4, 5)
PDist2D=csr.fast2DProbProjection(Cs, Ss, 2, 3, pars)
probContourPlot_subPlot(Cs, Ss, "C", "S", CStep, SStep, mlC, mlS, mlCErr, mlSErr, PDist2D, noXLabels = True)
# 1D plots
# S
plt.subplot(4, 4, 1)
PDist1D=csr.fast1DProbProjection(Ss, 3, pars)
probPlot1D_subPlot(Ss, "S", SStep, mlS, mlSErr, PDist1D, fitResults['plotLabel_S'], noYLabels = True, noXLabels = True)
# C
plt.subplot(4, 4, 6)
PDist1D=csr.fast1DProbProjection(Cs, 2, pars)
probPlot1D_subPlot(Cs, "C", CStep, mlC, mlCErr, PDist1D, fitResults['plotLabel_C'], noYLabels = True, noXLabels = True)
# B
plt.subplot(4, 4, 11)
PDist1D=csr.fast1DProbProjection(Bs, 1, pars)
probPlot1D_subPlot(Bs, "B", BStep, mlB, mlBErr, PDist1D, fitResults['plotLabel_B'], noYLabels = True, noXLabels = True)
# A
plt.subplot(4, 4, 16)
PDist1D=csr.fast1DProbProjection(As, 0, pars)
probPlot1D_subPlot(As, "A", AStep, mlA, mlAErr, PDist1D, fitResults['plotLabel_A'], noYLabels = True, noXLabels = False)
plt.savefig(outDir+os.path.sep+"2DProb_allPars.pdf")
plt.close()
#-------------------------------------------------------------------------------------------------------------
def probPlot1D_subPlot(par1Values, par1Label, par1TickStep, mlPar1, mlPar1Err, PDist1D, resultLabel,
noXLabels = False, noYLabels = False):
"""Make a 1d contour plot of marginalised probability for a parameter.
par1Values = values for parameter 1 (plotted on Y axis)
par1Label = text label for Y axis
par1TickStep = tick step along Y axis
mlPar1 = maximum likelihood value for parameter 1
mlPar1Err = 1d 1-sigma error in parameter 1
PDist1D = 1d prob distribution for parameter 1
"""
par1TickLabels=np.arange(par1Values.min(), par1Values.max(), par1TickStep)
plt.xticks(par1TickLabels, par1TickLabels)
PDist1D=PDist1D/PDist1D.max()
ax=plt.gca()
fitLabel='%s = %s' % (par1Label, resultLabel.replace("\pm", "$\pm$"))
plt.plot(par1Values, PDist1D, 'k-', label = fitLabel)
plt.ylabel("")
plt.yticks([], [])
#ax.xaxis.set_major_locator(matplotlib.ticker.MaxNLocator(6))
plt.ylim(0, 1.2)
leg=plt.legend(loc = (0.0, 0.86), prop = {'size': 12})
leg.draw_frame(False)
plt.draw()
plt.xlabel(par1Label)
if noYLabels == True:
ylocs, ylabels=plt.yticks()
plt.ylabel("")
plt.yticks(ylocs, [""]*len(ylabels))
if noXLabels == True:
xlocs, xlabels=plt.xticks()
plt.xlabel("")
plt.xticks(xlocs, [""]*len(xlabels))
#-------------------------------------------------------------------------------------------------------------
def probContourPlot_subPlot(par1Values, par2Values, par1Label, par2Label, par1TickStep, par2TickStep, mlPar1, mlPar2,
mlPar1Err, mlPar2Err, PDist2D, noXLabels = False, noYLabels = False):
"""Make a 2d contour plot of probability surface of given parameters. Somewhat needless duplication of
code, for makePaperContourPlots
par1Values = values for parameter 1 (plotted on Y axis)
par2Values = values for parameter 2 (plotted on X axis)
par1Label = text label for Y axis
par2Label = text label for X axis
par1TickStep = tick step along Y axis
par2TickStep = tick step along X axis
mlPar1 = maximum likelihood value for parameter 1
mlPar2 = maximum likelihood value for parameter 2
mlPar1Err = 1d 1-sigma error in parameter 1
mlPar2Err = 1d 1-sigma error in parameter 2
PDist2D = 2d likelihood surface, made using fast2DProbProjection
"""
tck1=interpolate.splrep(par1Values, np.arange(par1Values.shape[0]))
par1TickLabels=np.arange(par1Values.min(), par1Values.max(), par1TickStep)
par1TickIndices=interpolate.splev(par1TickLabels, tck1)
plt.yticks(par1TickIndices, par1TickLabels)
tck2=interpolate.splrep(par2Values, np.arange(par2Values.shape[0]))
par2TickLabels=np.arange(par2Values.min(), par2Values.max(), par2TickStep)
par2TickIndices=interpolate.splev(par2TickLabels, tck2)
plt.xticks(par2TickIndices, par2TickLabels)
# We have to smooth to get decent looking contours
# Gaussian smoothing preserves the normalisation
# NOTE: smoothing only needed if very fine grid
PDist2D=ndimage.gaussian_filter(PDist2D, 1)
# Work out where to put contours
sigma1Level=calc2DProbThreshold(PDist2D, 0.683)
sigma2Level=calc2DProbThreshold(PDist2D, 0.95)
# Apparently, we need to switch the order in newer versions of matplotlib
try:
plt.contour(PDist2D, [sigma2Level, sigma1Level], colors = 'k')
except:
print "contour problem"
IPython.embed()
sys.exit()
# Save plot - trim down area first (?) and add axes labels
plt.plot(interpolate.splev(mlPar2, tck2), interpolate.splev(mlPar1, tck1), 'k*',
label = "%s = %.2f $\pm$ %.2f, %s = %.2f $\pm$ %.2f" % (par1Label, mlPar1, mlPar1Err, par2Label, mlPar2, mlPar2Err))
#plt.legend(numpoints = 1)
plt.xlabel(par2Label)
plt.ylabel(par1Label)
if noYLabels == True:
ylocs, ylabels=plt.yticks()
plt.ylabel("")
plt.yticks(ylocs, [""]*len(ylabels))
if noXLabels == True:
xlocs, xlabels=plt.xticks()
plt.xlabel("")
plt.xticks(xlocs, [""]*len(xlabels))
| gpl-3.0 | 6,037,897,584,997,941,000 | 42.088194 | 287 | 0.585862 | false |
mtwestra/akvo-wandelenvoorwater | wvw/urls.py | 1 | 1878 | from django.conf.urls.defaults import *
from django.views.static import serve
from W4W.models import school, inschrijving,steunpunt
from django.conf import settings
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
info_dict_list_scholen = {
'queryset': school.objects.all().order_by('NAAM_VOLLEDIG'),
'paginate_by': 20,
'extra_context':{'order_by':'nd'}
}
result=inschrijving.objects.filter(ACTIEF=True).order_by('-id')
numschools=len(result)
info_dict_list_inschrijvingen = {
'queryset': result,
'paginate_by': 20,
'extra_context':{'order_by':'id','numschools':numschools}
}
info_dict_list_steunpunten = {
'queryset': steunpunt.objects.filter(ACTIEF=True).exclude(NAAM__contains='onbekend').order_by('id'),
'paginate_by': 20,
'extra_context':{'order_by':'id'}
}
info_dict_detail={
'queryset': school.objects.all(),
}
urlpatterns = patterns('',
(r'^scholen/$', 'django.views.generic.list_detail.object_list', info_dict_list_scholen),
(r'^inschrijvingen/$', 'django.views.generic.list_detail.object_list', info_dict_list_inschrijvingen),
(r'^steunpunten/$', 'django.views.generic.list_detail.object_list', info_dict_list_steunpunten),
(r'^scholen/(?P<object_id>\d+)/$', 'django.views.generic.list_detail.object_detail', info_dict_detail),
(r'^scholen/query/$', 'W4W.views.query_school'),
(r'^inschrijvingen/query/$', 'W4W.views.query_inschrijving'),
(r'^steunpunten/query/$', 'W4W.views.query_steunpunt'),
(r'^inschrijf/$', 'W4W.views.inschrijf'),
(r'^admin/', include(admin.site.urls)),
(r'^scholen/export/$','W4W.views.export_to_excel_school' ),
(r'^inschrijvingen/export/$','W4W.views.export_to_excel_inschrijf' ),
(r'^WvW_media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_DOC_ROOT}),
)
| agpl-3.0 | -2,006,749,946,259,257,000 | 37.326531 | 108 | 0.685836 | false |
junwoo091400/MyCODES | Projects/FootPad_Logger/logged_data_analyzer_LSTM/Data_manipulation.py | 1 | 1131 | from Base import FootLog
def Datestr_to_Int(datestr):
bigStr = ''.join(datestr.split('-'))
return int(bigStr[2:])#'20' is eliminated![We live in 20th century.]
'''
def Time2Float(timestr,state):
if(state == DateHandler.BREAKFAST):
elif(state == DateHandler.LUNCH):
elif(state == DateHandler.DINNER):
return TimeDiff('')
else:
return -1
'''
def find_1_maxIdx(arr):
for i in range(len(arr)-1,-1,-1):
if(arr[i] == 1):
return i
return -1 # Not found.
def find_MaxValue_Idx(predict):
max = 0
idx = -1
for i in range(len(predict)):
if(predict[i]>max):
max = predict[i]
idx = i
return idx
FLAG_1 = 1<<0
FLAG_2 = 1<<1
FLAG_3 = 1<<2
FLAG_4 = 1<<3
FLAG_5 = 1<<4
def Encrpted_to_List5(ts):
retArr = [0,0,0,0,0]
if(ts & FLAG_1):
retArr[0] = 1
if(ts & FLAG_2):
retArr[1] = 1
if(ts & FLAG_3):
retArr[2] = 1
if(ts & FLAG_4):
retArr[3] = 1
if(ts & FLAG_5):
retArr[4] = 1
return retArr
def Timestr_difference_Seconds(start,stop):
stt = [int(x) for x in start.split(':')]
stp = [int(x) for x in stop.split(':')]
delta = (stp[0]-stt[0])*3600 + (stp[1]-stt[1])*60 + (stp[2]-stt[2])
return delta | gpl-3.0 | -3,336,045,622,675,393,500 | 19.214286 | 69 | 0.609195 | false |
beiko-lab/gengis | bin/Lib/site-packages/scipy/ndimage/tests/test_measurements.py | 1 | 38896 | from __future__ import division, print_function, absolute_import
from numpy.testing import assert_, assert_array_almost_equal, assert_equal, \
assert_almost_equal, assert_array_equal, \
assert_raises, run_module_suite, TestCase
import numpy as np
import scipy.ndimage as ndimage
import os.path
types = [np.int8, np.uint8, np.int16,
np.uint16, np.int32, np.uint32,
np.int64, np.uint64,
np.float32, np.float64]
np.mod(1., 1) # Silence fmod bug on win-amd64. See #1408 and #1238.
class Test_measurements_stats(TestCase):
"""ndimage.measurements._stats() is a utility function used by other functions."""
def test_a(self):
x = [0,1,2,6]
labels = [0,0,1,1]
index = [0,1]
for shp in [(4,), (2,2)]:
x = np.array(x).reshape(shp)
labels = np.array(labels).reshape(shp)
counts, sums = ndimage.measurements._stats(x, labels=labels, index=index)
assert_array_equal(counts, [2, 2])
assert_array_equal(sums, [1.0, 8.0])
def test_b(self):
# Same data as test_a, but different labels. The label 9 exceeds the
# length of 'labels', so this test will follow a different code path.
x = [0,1,2,6]
labels = [0,0,9,9]
index = [0,9]
for shp in [(4,), (2,2)]:
x = np.array(x).reshape(shp)
labels = np.array(labels).reshape(shp)
counts, sums = ndimage.measurements._stats(x, labels=labels, index=index)
assert_array_equal(counts, [2, 2])
assert_array_equal(sums, [1.0, 8.0])
def test_a_centered(self):
x = [0,1,2,6]
labels = [0,0,1,1]
index = [0,1]
for shp in [(4,), (2,2)]:
x = np.array(x).reshape(shp)
labels = np.array(labels).reshape(shp)
counts, sums, centers = ndimage.measurements._stats(x, labels=labels,
index=index, centered=True)
assert_array_equal(counts, [2, 2])
assert_array_equal(sums, [1.0, 8.0])
assert_array_equal(centers, [0.5, 8.0])
def test_b_centered(self):
x = [0,1,2,6]
labels = [0,0,9,9]
index = [0,9]
for shp in [(4,), (2,2)]:
x = np.array(x).reshape(shp)
labels = np.array(labels).reshape(shp)
counts, sums, centers = ndimage.measurements._stats(x, labels=labels,
index=index, centered=True)
assert_array_equal(counts, [2, 2])
assert_array_equal(sums, [1.0, 8.0])
assert_array_equal(centers, [0.5, 8.0])
def test_nonint_labels(self):
x = [0,1,2,6]
labels = [0.0, 0.0, 9.0, 9.0]
index = [0.0, 9.0]
for shp in [(4,), (2,2)]:
x = np.array(x).reshape(shp)
labels = np.array(labels).reshape(shp)
counts, sums, centers = ndimage.measurements._stats(x, labels=labels,
index=index, centered=True)
assert_array_equal(counts, [2, 2])
assert_array_equal(sums, [1.0, 8.0])
assert_array_equal(centers, [0.5, 8.0])
class Test_measurements_select(TestCase):
"""ndimage.measurements._select() is a utility function used by other functions."""
def test_basic(self):
x = [0,1,6,2]
cases = [
([0,0,1,1], [0,1]), # "Small" integer labels
([0,0,9,9], [0,9]), # A label larger than len(labels)
([0.0,0.0,7.0,7.0], [0.0, 7.0]), # Non-integer labels
]
for labels, index in cases:
result = ndimage.measurements._select(x, labels=labels, index=index)
assert_(len(result) == 0)
result = ndimage.measurements._select(x, labels=labels, index=index, find_max=True)
assert_(len(result) == 1)
assert_array_equal(result[0], [1, 6])
result = ndimage.measurements._select(x, labels=labels, index=index, find_min=True)
assert_(len(result) == 1)
assert_array_equal(result[0], [0, 2])
result = ndimage.measurements._select(x, labels=labels, index=index,
find_min=True, find_min_positions=True)
assert_(len(result) == 2)
assert_array_equal(result[0], [0, 2])
assert_array_equal(result[1], [0, 3])
result = ndimage.measurements._select(x, labels=labels, index=index,
find_max=True, find_max_positions=True)
assert_(len(result) == 2)
assert_array_equal(result[0], [1, 6])
assert_array_equal(result[1], [1, 2])
def test_label01():
"label 1"
data = np.ones([])
out, n = ndimage.label(data)
assert_array_almost_equal(out, 1)
assert_equal(n, 1)
def test_label02():
"label 2"
data = np.zeros([])
out, n = ndimage.label(data)
assert_array_almost_equal(out, 0)
assert_equal(n, 0)
def test_label03():
"label 3"
data = np.ones([1])
out, n = ndimage.label(data)
assert_array_almost_equal(out, [1])
assert_equal(n, 1)
def test_label04():
"label 4"
data = np.zeros([1])
out, n = ndimage.label(data)
assert_array_almost_equal(out, [0])
assert_equal(n, 0)
def test_label05():
"label 5"
data = np.ones([5])
out, n = ndimage.label(data)
assert_array_almost_equal(out, [1, 1, 1, 1, 1])
assert_equal(n, 1)
def test_label06():
"label 6"
data = np.array([1, 0, 1, 1, 0, 1])
out, n = ndimage.label(data)
assert_array_almost_equal(out, [1, 0, 2, 2, 0, 3])
assert_equal(n, 3)
def test_label07():
"label 7"
data = np.array([[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]])
out, n = ndimage.label(data)
assert_array_almost_equal(out, [[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]])
assert_equal(n, 0)
def test_label08():
"label 8"
data = np.array([[1, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 0],
[1, 1, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 0]])
out, n = ndimage.label(data)
assert_array_almost_equal(out, [[1, 0, 0, 0, 0, 0],
[0, 0, 2, 2, 0, 0],
[0, 0, 2, 2, 2, 0],
[3, 3, 0, 0, 0, 0],
[3, 3, 0, 0, 0, 0],
[0, 0, 0, 4, 4, 0]])
assert_equal(n, 4)
def test_label09():
"label 9"
data = np.array([[1, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 0],
[1, 1, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 0]])
struct = ndimage.generate_binary_structure(2, 2)
out, n = ndimage.label(data, struct)
assert_array_almost_equal(out, [[1, 0, 0, 0, 0, 0],
[0, 0, 2, 2, 0, 0],
[0, 0, 2, 2, 2, 0],
[2, 2, 0, 0, 0, 0],
[2, 2, 0, 0, 0, 0],
[0, 0, 0, 3, 3, 0]])
assert_equal(n, 3)
def test_label10():
"label 10"
data = np.array([[0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 1, 0],
[0, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0]])
struct = ndimage.generate_binary_structure(2, 2)
out, n = ndimage.label(data, struct)
assert_array_almost_equal(out, [[0, 0, 0, 0, 0, 0],
[0, 1, 1, 0, 1, 0],
[0, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0]])
assert_equal(n, 1)
def test_label11():
"label 11"
for type in types:
data = np.array([[1, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 0],
[1, 1, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 0]], type)
out, n = ndimage.label(data)
expected = [[1, 0, 0, 0, 0, 0],
[0, 0, 2, 2, 0, 0],
[0, 0, 2, 2, 2, 0],
[3, 3, 0, 0, 0, 0],
[3, 3, 0, 0, 0, 0],
[0, 0, 0, 4, 4, 0]]
assert_array_almost_equal(out, expected)
assert_equal(n, 4)
def test_label11_inplace():
"label 11 in place"
for type in types:
data = np.array([[1, 0, 0, 0, 0, 0],
[0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 1, 0],
[1, 1, 0, 0, 0, 0],
[1, 1, 0, 0, 0, 0],
[0, 0, 0, 1, 1, 0]], type)
n = ndimage.label(data, output=data)
expected = [[1, 0, 0, 0, 0, 0],
[0, 0, 2, 2, 0, 0],
[0, 0, 2, 2, 2, 0],
[3, 3, 0, 0, 0, 0],
[3, 3, 0, 0, 0, 0],
[0, 0, 0, 4, 4, 0]]
assert_array_almost_equal(data, expected)
assert_equal(n, 4)
def test_label12():
"label 12"
for type in types:
data = np.array([[0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 1],
[0, 0, 1, 0, 1, 1],
[0, 0, 1, 1, 1, 1],
[0, 0, 0, 1, 1, 0]], type)
out, n = ndimage.label(data)
expected = [[0, 0, 0, 0, 1, 1],
[0, 0, 0, 0, 0, 1],
[0, 0, 1, 0, 1, 1],
[0, 0, 1, 1, 1, 1],
[0, 0, 0, 1, 1, 0]]
assert_array_almost_equal(out, expected)
assert_equal(n, 1)
def test_label13():
"label 13"
for type in types:
data = np.array([[1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1],
[1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]],
type)
out, n = ndimage.label(data)
expected = [[1, 0, 1, 1, 1, 0, 1, 1, 1, 0, 1],
[1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 1],
[1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]]
assert_array_almost_equal(out, expected)
assert_equal(n, 1)
def test_label_output_typed():
"test label with specified output with type"
data = np.ones([5])
for t in types:
output = np.zeros([5], dtype=t)
n = ndimage.label(data, output=output)
assert_array_almost_equal(output, 1)
assert_equal(n, 1)
def test_label_output_dtype():
"test label with specified output dtype"
data = np.ones([5])
for t in types:
output, n = ndimage.label(data, output=t)
assert_array_almost_equal(output, 1)
assert output.dtype == t
def test_label_output_wrong_size():
"test label with output of wrong size"
data = np.ones([5])
for t in types:
output = np.zeros([10], t)
assert_raises((RuntimeError, ValueError), ndimage.label, data, output=output)
def test_label_structuring_elements():
"test label with different structuring element neighborhoods"
data = np.loadtxt(os.path.join(os.path.dirname(__file__), "data", "label_inputs.txt"))
strels = np.loadtxt(os.path.join(os.path.dirname(__file__), "data", "label_strels.txt"))
results = np.loadtxt(os.path.join(os.path.dirname(__file__), "data", "label_results.txt"))
data = data.reshape((-1, 7, 7))
strels = strels.reshape((-1, 3, 3))
results = results.reshape((-1, 7, 7))
r = 0
for i in range(data.shape[0]):
d = data[i, :, :]
for j in range(strels.shape[0]):
s = strels[j, :, :]
assert_equal(ndimage.label(d, s)[0], results[r, :, :])
r += 1
def test_label_default_dtype():
test_array = np.random.rand(10, 10)
label, no_features = ndimage.label(test_array > 0.5)
assert_(label.dtype in (np.int32, np.int64))
# Shouldn't raise an exception
ndimage.find_objects(label)
def test_find_objects01():
"find_objects 1"
data = np.ones([], dtype=int)
out = ndimage.find_objects(data)
assert_(out == [()])
def test_find_objects02():
"find_objects 2"
data = np.zeros([], dtype=int)
out = ndimage.find_objects(data)
assert_(out == [])
def test_find_objects03():
"find_objects 3"
data = np.ones([1], dtype=int)
out = ndimage.find_objects(data)
assert_equal(out, [(slice(0, 1, None),)])
def test_find_objects04():
"find_objects 4"
data = np.zeros([1], dtype=int)
out = ndimage.find_objects(data)
assert_equal(out, [])
def test_find_objects05():
"find_objects 5"
data = np.ones([5], dtype=int)
out = ndimage.find_objects(data)
assert_equal(out, [(slice(0, 5, None),)])
def test_find_objects06():
"find_objects 6"
data = np.array([1, 0, 2, 2, 0, 3])
out = ndimage.find_objects(data)
assert_equal(out, [(slice(0, 1, None),),
(slice(2, 4, None),),
(slice(5, 6, None),)])
def test_find_objects07():
"find_objects 7"
data = np.array([[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0]])
out = ndimage.find_objects(data)
assert_equal(out, [])
def test_find_objects08():
"find_objects 8"
data = np.array([[1, 0, 0, 0, 0, 0],
[0, 0, 2, 2, 0, 0],
[0, 0, 2, 2, 2, 0],
[3, 3, 0, 0, 0, 0],
[3, 3, 0, 0, 0, 0],
[0, 0, 0, 4, 4, 0]])
out = ndimage.find_objects(data)
assert_equal(out, [(slice(0, 1, None), slice(0, 1, None)),
(slice(1, 3, None), slice(2, 5, None)),
(slice(3, 5, None), slice(0, 2, None)),
(slice(5, 6, None), slice(3, 5, None))])
def test_find_objects09():
"find_objects 9"
data = np.array([[1, 0, 0, 0, 0, 0],
[0, 0, 2, 2, 0, 0],
[0, 0, 2, 2, 2, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0],
[0, 0, 0, 4, 4, 0]])
out = ndimage.find_objects(data)
assert_equal(out, [(slice(0, 1, None), slice(0, 1, None)),
(slice(1, 3, None), slice(2, 5, None)),
None,
(slice(5, 6, None), slice(3, 5, None))])
def test_sum01():
"sum 1"
for type in types:
input = np.array([], type)
output = ndimage.sum(input)
assert_equal(output, 0.0)
def test_sum02():
"sum 2"
for type in types:
input = np.zeros([0, 4], type)
output = ndimage.sum(input)
assert_equal(output, 0.0)
def test_sum03():
"sum 3"
for type in types:
input = np.ones([], type)
output = ndimage.sum(input)
assert_almost_equal(output, 1.0)
def test_sum04():
"sum 4"
for type in types:
input = np.array([1, 2], type)
output = ndimage.sum(input)
assert_almost_equal(output, 3.0)
def test_sum05():
"sum 5"
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output = ndimage.sum(input)
assert_almost_equal(output, 10.0)
def test_sum06():
"sum 6"
labels = np.array([], bool)
for type in types:
input = np.array([], type)
output = ndimage.sum(input, labels=labels)
assert_equal(output, 0.0)
def test_sum07():
"sum 7"
labels = np.ones([0, 4], bool)
for type in types:
input = np.zeros([0, 4], type)
output = ndimage.sum(input, labels=labels)
assert_equal(output, 0.0)
def test_sum08():
"sum 8"
labels = np.array([1, 0], bool)
for type in types:
input = np.array([1, 2], type)
output = ndimage.sum(input, labels=labels)
assert_equal(output, 1.0)
def test_sum09():
"sum 9"
labels = np.array([1, 0], bool)
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output = ndimage.sum(input, labels=labels)
assert_almost_equal(output, 4.0)
def test_sum10():
"sum 10"
labels = np.array([1, 0], bool)
input = np.array([[1, 2], [3, 4]], bool)
output = ndimage.sum(input, labels=labels)
assert_almost_equal(output, 2.0)
def test_sum11():
"sum 11"
labels = np.array([1, 2], np.int8)
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output = ndimage.sum(input, labels=labels,
index=2)
assert_almost_equal(output, 6.0)
def test_sum12():
"sum 12"
labels = np.array([[1, 2], [2, 4]], np.int8)
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output = ndimage.sum(input, labels=labels,
index=[4, 8, 2])
assert_array_almost_equal(output, [4.0, 0.0, 5.0])
def test_mean01():
"mean 1"
labels = np.array([1, 0], bool)
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output = ndimage.mean(input, labels=labels)
assert_almost_equal(output, 2.0)
def test_mean02():
"mean 2"
labels = np.array([1, 0], bool)
input = np.array([[1, 2], [3, 4]], bool)
output = ndimage.mean(input, labels=labels)
assert_almost_equal(output, 1.0)
def test_mean03():
"mean 3"
labels = np.array([1, 2])
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output = ndimage.mean(input, labels=labels,
index=2)
assert_almost_equal(output, 3.0)
def test_mean04():
"mean 4"
labels = np.array([[1, 2], [2, 4]], np.int8)
olderr = np.seterr(all='ignore')
try:
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output = ndimage.mean(input, labels=labels,
index=[4, 8, 2])
assert_array_almost_equal(output[[0,2]], [4.0, 2.5])
assert_(np.isnan(output[1]))
finally:
np.seterr(**olderr)
def test_minimum01():
"minimum 1"
labels = np.array([1, 0], bool)
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output = ndimage.minimum(input, labels=labels)
assert_almost_equal(output, 1.0)
def test_minimum02():
"minimum 2"
labels = np.array([1, 0], bool)
input = np.array([[2, 2], [2, 4]], bool)
output = ndimage.minimum(input, labels=labels)
assert_almost_equal(output, 1.0)
def test_minimum03():
"minimum 3"
labels = np.array([1, 2])
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output = ndimage.minimum(input, labels=labels,
index=2)
assert_almost_equal(output, 2.0)
def test_minimum04():
"minimum 4"
labels = np.array([[1, 2], [2, 3]])
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output = ndimage.minimum(input, labels=labels,
index=[2, 3, 8])
assert_array_almost_equal(output, [2.0, 4.0, 0.0])
def test_maximum01():
"maximum 1"
labels = np.array([1, 0], bool)
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output = ndimage.maximum(input, labels=labels)
assert_almost_equal(output, 3.0)
def test_maximum02():
"maximum 2"
labels = np.array([1, 0], bool)
input = np.array([[2, 2], [2, 4]], bool)
output = ndimage.maximum(input, labels=labels)
assert_almost_equal(output, 1.0)
def test_maximum03():
"maximum 3"
labels = np.array([1, 2])
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output = ndimage.maximum(input, labels=labels,
index=2)
assert_almost_equal(output, 4.0)
def test_maximum04():
"maximum 4"
labels = np.array([[1, 2], [2, 3]])
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output = ndimage.maximum(input, labels=labels,
index=[2, 3, 8])
assert_array_almost_equal(output, [3.0, 4.0, 0.0])
def test_maximum05():
"Ticket #501"
x = np.array([-3,-2,-1])
assert_equal(ndimage.maximum(x),-1)
def test_median01():
"median 1"
a = np.array([[1, 2, 0, 1],
[5, 3, 0, 4],
[0, 0, 0, 7],
[9, 3, 0, 0]])
labels = np.array([[1, 1, 0, 2],
[1, 1, 0, 2],
[0, 0, 0, 2],
[3, 3, 0, 0]])
output = ndimage.median(a, labels=labels, index=[1, 2, 3])
assert_array_almost_equal(output, [2.5, 4.0, 6.0])
def test_median02():
"median 2"
a = np.array([[1, 2, 0, 1],
[5, 3, 0, 4],
[0, 0, 0, 7],
[9, 3, 0, 0]])
output = ndimage.median(a)
assert_almost_equal(output, 1.0)
def test_median03():
"median 3"
a = np.array([[1, 2, 0, 1],
[5, 3, 0, 4],
[0, 0, 0, 7],
[9, 3, 0, 0]])
labels = np.array([[1, 1, 0, 2],
[1, 1, 0, 2],
[0, 0, 0, 2],
[3, 3, 0, 0]])
output = ndimage.median(a, labels=labels)
assert_almost_equal(output, 3.0)
def test_variance01():
"variance 1"
olderr = np.seterr(all='ignore')
try:
for type in types:
input = np.array([], type)
output = ndimage.variance(input)
assert_(np.isnan(output))
finally:
np.seterr(**olderr)
def test_variance02():
"variance 2"
for type in types:
input = np.array([1], type)
output = ndimage.variance(input)
assert_almost_equal(output, 0.0)
def test_variance03():
"variance 3"
for type in types:
input = np.array([1, 3], type)
output = ndimage.variance(input)
assert_almost_equal(output, 1.0)
def test_variance04():
"variance 4"
input = np.array([1, 0], bool)
output = ndimage.variance(input)
assert_almost_equal(output, 0.25)
def test_variance05():
"variance 5"
labels = [2, 2, 3]
for type in types:
input = np.array([1, 3, 8], type)
output = ndimage.variance(input, labels, 2)
assert_almost_equal(output, 1.0)
def test_variance06():
"variance 6"
labels = [2, 2, 3, 3, 4]
olderr = np.seterr(all='ignore')
try:
for type in types:
input = np.array([1, 3, 8, 10, 8], type)
output = ndimage.variance(input, labels, [2, 3, 4])
assert_array_almost_equal(output, [1.0, 1.0, 0.0])
finally:
np.seterr(**olderr)
def test_standard_deviation01():
"standard deviation 1"
olderr = np.seterr(all='ignore')
try:
for type in types:
input = np.array([], type)
output = ndimage.standard_deviation(input)
assert_(np.isnan(output))
finally:
np.seterr(**olderr)
def test_standard_deviation02():
"standard deviation 2"
for type in types:
input = np.array([1], type)
output = ndimage.standard_deviation(input)
assert_almost_equal(output, 0.0)
def test_standard_deviation03():
"standard deviation 3"
for type in types:
input = np.array([1, 3], type)
output = ndimage.standard_deviation(input)
assert_almost_equal(output, np.sqrt(1.0))
def test_standard_deviation04():
"standard deviation 4"
input = np.array([1, 0], bool)
output = ndimage.standard_deviation(input)
assert_almost_equal(output, 0.5)
def test_standard_deviation05():
"standard deviation 5"
labels = [2, 2, 3]
for type in types:
input = np.array([1, 3, 8], type)
output = ndimage.standard_deviation(input, labels, 2)
assert_almost_equal(output, 1.0)
def test_standard_deviation06():
"standard deviation 6"
labels = [2, 2, 3, 3, 4]
olderr = np.seterr(all='ignore')
try:
for type in types:
input = np.array([1, 3, 8, 10, 8], type)
output = ndimage.standard_deviation(input, labels, [2, 3, 4])
assert_array_almost_equal(output, [1.0, 1.0, 0.0])
finally:
np.seterr(**olderr)
def test_standard_deviation07():
"standard deviation 7"
labels = [1]
olderr = np.seterr(all='ignore')
try:
for type in types:
input = np.array([-0.00619519], type)
output = ndimage.standard_deviation(input, labels, [1])
assert_array_almost_equal(output, [0])
finally:
np.seterr(**olderr)
def test_minimum_position01():
"minimum position 1"
labels = np.array([1, 0], bool)
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output = ndimage.minimum_position(input, labels=labels)
assert_equal(output, (0, 0))
def test_minimum_position02():
"minimum position 2"
for type in types:
input = np.array([[5, 4, 2, 5],
[3, 7, 0, 2],
[1, 5, 1, 1]], type)
output = ndimage.minimum_position(input)
assert_equal(output, (1, 2))
def test_minimum_position03():
"minimum position 3"
input = np.array([[5, 4, 2, 5],
[3, 7, 0, 2],
[1, 5, 1, 1]], bool)
output = ndimage.minimum_position(input)
assert_equal(output, (1, 2))
def test_minimum_position04():
"minimum position 4"
input = np.array([[5, 4, 2, 5],
[3, 7, 1, 2],
[1, 5, 1, 1]], bool)
output = ndimage.minimum_position(input)
assert_equal(output, (0, 0))
def test_minimum_position05():
"minimum position 5"
labels = [1, 2, 0, 4]
for type in types:
input = np.array([[5, 4, 2, 5],
[3, 7, 0, 2],
[1, 5, 2, 3]], type)
output = ndimage.minimum_position(input, labels)
assert_equal(output, (2, 0))
def test_minimum_position06():
"minimum position 6"
labels = [1, 2, 3, 4]
for type in types:
input = np.array([[5, 4, 2, 5],
[3, 7, 0, 2],
[1, 5, 1, 1]], type)
output = ndimage.minimum_position(input, labels, 2)
assert_equal(output, (0, 1))
def test_minimum_position07():
"minimum position 7"
labels = [1, 2, 3, 4]
for type in types:
input = np.array([[5, 4, 2, 5],
[3, 7, 0, 2],
[1, 5, 1, 1]], type)
output = ndimage.minimum_position(input, labels,
[2, 3])
assert_equal(output[0], (0, 1))
assert_equal(output[1], (1, 2))
def test_maximum_position01():
"maximum position 1"
labels = np.array([1, 0], bool)
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output = ndimage.maximum_position(input,
labels=labels)
assert_equal(output, (1, 0))
def test_maximum_position02():
"maximum position 2"
for type in types:
input = np.array([[5, 4, 2, 5],
[3, 7, 8, 2],
[1, 5, 1, 1]], type)
output = ndimage.maximum_position(input)
assert_equal(output, (1, 2))
def test_maximum_position03():
"maximum position 3"
input = np.array([[5, 4, 2, 5],
[3, 7, 8, 2],
[1, 5, 1, 1]], bool)
output = ndimage.maximum_position(input)
assert_equal(output, (0, 0))
def test_maximum_position04():
"maximum position 4"
labels = [1, 2, 0, 4]
for type in types:
input = np.array([[5, 4, 2, 5],
[3, 7, 8, 2],
[1, 5, 1, 1]], type)
output = ndimage.maximum_position(input, labels)
assert_equal(output, (1, 1))
def test_maximum_position05():
"maximum position 5"
labels = [1, 2, 0, 4]
for type in types:
input = np.array([[5, 4, 2, 5],
[3, 7, 8, 2],
[1, 5, 1, 1]], type)
output = ndimage.maximum_position(input, labels, 1)
assert_equal(output, (0, 0))
def test_maximum_position06():
"maximum position 6"
labels = [1, 2, 0, 4]
for type in types:
input = np.array([[5, 4, 2, 5],
[3, 7, 8, 2],
[1, 5, 1, 1]], type)
output = ndimage.maximum_position(input, labels,
[1, 2])
assert_equal(output[0], (0, 0))
assert_equal(output[1], (1, 1))
def test_maximum_position07():
"maximum position 7 - float labels"
labels = np.array([1.0, 2.5, 0.0, 4.5])
for type in types:
input = np.array([[5, 4, 2, 5],
[3, 7, 8, 2],
[1, 5, 1, 1]], type)
output = ndimage.maximum_position(input, labels,
[1.0, 4.5])
assert_equal(output[0], (0, 0))
assert_equal(output[1], (0, 3))
def test_extrema01():
"extrema 1"
labels = np.array([1, 0], bool)
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output1 = ndimage.extrema(input, labels=labels)
output2 = ndimage.minimum(input, labels=labels)
output3 = ndimage.maximum(input, labels=labels)
output4 = ndimage.minimum_position(input,
labels=labels)
output5 = ndimage.maximum_position(input,
labels=labels)
assert_equal(output1, (output2, output3, output4, output5))
def test_extrema02():
"extrema 2"
labels = np.array([1, 2])
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output1 = ndimage.extrema(input, labels=labels,
index=2)
output2 = ndimage.minimum(input, labels=labels,
index=2)
output3 = ndimage.maximum(input, labels=labels,
index=2)
output4 = ndimage.minimum_position(input,
labels=labels, index=2)
output5 = ndimage.maximum_position(input,
labels=labels, index=2)
assert_equal(output1, (output2, output3, output4, output5))
def test_extrema03():
"extrema 3"
labels = np.array([[1, 2], [2, 3]])
for type in types:
input = np.array([[1, 2], [3, 4]], type)
output1 = ndimage.extrema(input, labels=labels,
index=[2, 3, 8])
output2 = ndimage.minimum(input, labels=labels,
index=[2, 3, 8])
output3 = ndimage.maximum(input, labels=labels,
index=[2, 3, 8])
output4 = ndimage.minimum_position(input,
labels=labels, index=[2, 3, 8])
output5 = ndimage.maximum_position(input,
labels=labels, index=[2, 3, 8])
assert_array_almost_equal(output1[0], output2)
assert_array_almost_equal(output1[1], output3)
assert_array_almost_equal(output1[2], output4)
assert_array_almost_equal(output1[3], output5)
def test_extrema04():
"extrema 4"
labels = [1, 2, 0, 4]
for type in types:
input = np.array([[5, 4, 2, 5],
[3, 7, 8, 2],
[1, 5, 1, 1]], type)
output1 = ndimage.extrema(input, labels, [1, 2])
output2 = ndimage.minimum(input, labels, [1, 2])
output3 = ndimage.maximum(input, labels, [1, 2])
output4 = ndimage.minimum_position(input, labels,
[1, 2])
output5 = ndimage.maximum_position(input, labels,
[1, 2])
assert_array_almost_equal(output1[0], output2)
assert_array_almost_equal(output1[1], output3)
assert_array_almost_equal(output1[2], output4)
assert_array_almost_equal(output1[3], output5)
def test_center_of_mass01():
"center of mass 1"
expected = [0.0, 0.0]
for type in types:
input = np.array([[1, 0], [0, 0]], type)
output = ndimage.center_of_mass(input)
assert_array_almost_equal(output, expected)
def test_center_of_mass02():
"center of mass 2"
expected = [1, 0]
for type in types:
input = np.array([[0, 0], [1, 0]], type)
output = ndimage.center_of_mass(input)
assert_array_almost_equal(output, expected)
def test_center_of_mass03():
"center of mass 3"
expected = [0, 1]
for type in types:
input = np.array([[0, 1], [0, 0]], type)
output = ndimage.center_of_mass(input)
assert_array_almost_equal(output, expected)
def test_center_of_mass04():
"center of mass 4"
expected = [1, 1]
for type in types:
input = np.array([[0, 0], [0, 1]], type)
output = ndimage.center_of_mass(input)
assert_array_almost_equal(output, expected)
def test_center_of_mass05():
"center of mass 5"
expected = [0.5, 0.5]
for type in types:
input = np.array([[1, 1], [1, 1]], type)
output = ndimage.center_of_mass(input)
assert_array_almost_equal(output, expected)
def test_center_of_mass06():
"center of mass 6"
expected = [0.5, 0.5]
input = np.array([[1, 2], [3, 1]], bool)
output = ndimage.center_of_mass(input)
assert_array_almost_equal(output, expected)
def test_center_of_mass07():
"center of mass 7"
labels = [1, 0]
expected = [0.5, 0.0]
input = np.array([[1, 2], [3, 1]], bool)
output = ndimage.center_of_mass(input, labels)
assert_array_almost_equal(output, expected)
def test_center_of_mass08():
"center of mass 8"
labels = [1, 2]
expected = [0.5, 1.0]
input = np.array([[5, 2], [3, 1]], bool)
output = ndimage.center_of_mass(input, labels, 2)
assert_array_almost_equal(output, expected)
def test_center_of_mass09():
"center of mass 9"
labels = [1, 2]
expected = [(0.5, 0.0), (0.5, 1.0)]
input = np.array([[1, 2], [1, 1]], bool)
output = ndimage.center_of_mass(input, labels, [1, 2])
assert_array_almost_equal(output, expected)
def test_histogram01():
"histogram 1"
expected = np.ones(10)
input = np.arange(10)
output = ndimage.histogram(input, 0, 10, 10)
assert_array_almost_equal(output, expected)
def test_histogram02():
"histogram 2"
labels = [1, 1, 1, 1, 2, 2, 2, 2]
expected = [0, 2, 0, 1, 1]
input = np.array([1, 1, 3, 4, 3, 3, 3, 3])
output = ndimage.histogram(input, 0, 4, 5, labels, 1)
assert_array_almost_equal(output, expected)
def test_histogram03():
"histogram 3"
labels = [1, 0, 1, 1, 2, 2, 2, 2]
expected1 = [0, 1, 0, 1, 1]
expected2 = [0, 0, 0, 3, 0]
input = np.array([1, 1, 3, 4, 3, 5, 3, 3])
output = ndimage.histogram(input, 0, 4, 5, labels, (1,2))
assert_array_almost_equal(output[0], expected1)
assert_array_almost_equal(output[1], expected2)
def test_stat_funcs_2d():
"""Apply the stat funcs to a 2-d array."""
a = np.array([[5,6,0,0,0], [8,9,0,0,0], [0,0,0,3,5]])
lbl = np.array([[1,1,0,0,0], [1,1,0,0,0], [0,0,0,2,2]])
mean = ndimage.mean(a, labels=lbl, index=[1, 2])
assert_array_equal(mean, [7.0, 4.0])
var = ndimage.variance(a, labels=lbl, index=[1, 2])
assert_array_equal(var, [2.5, 1.0])
std = ndimage.standard_deviation(a, labels=lbl, index=[1, 2])
assert_array_almost_equal(std, np.sqrt([2.5, 1.0]))
med = ndimage.median(a, labels=lbl, index=[1, 2])
assert_array_equal(med, [7.0, 4.0])
min = ndimage.minimum(a, labels=lbl, index=[1, 2])
assert_array_equal(min, [5, 3])
max = ndimage.maximum(a, labels=lbl, index=[1, 2])
assert_array_equal(max, [9, 5])
if __name__ == "__main__":
run_module_suite()
| gpl-3.0 | -6,009,158,939,059,965,000 | 30.467446 | 95 | 0.465112 | false |
MrNeon/qbittorrent-search-plugins | strikesearch.py | 1 | 1268 | #VERSION: 1.03
#AUTHORS: MrNeon
from novaprinter import prettyPrinter
from helpers import retrieve_url, download_file
import json
class strikesearch(object):
url = 'https://getstrike.net/'
name = 'Strike Search'
supported_categories = {'all': '', 'movies': 'Movies', 'tv': 'TV', 'anime': 'Anime', 'books': 'Books',
'music': 'Music', 'games': 'Games', 'software': 'Applications'}
def __init__(self):
pass
def download_torrent(self, info):
print(download_file(info))
def search(self, what, cat='all'):
json_data = retrieve_url("".join((self.url, 'api/v2/torrents/search/?phrase=', what,
'&category=', self.supported_categories.get(cat, ''))))
json_dict = json.loads(json_data)
if json_dict['results'] < 1:
return
for r in json_dict['torrents']:
r_dict = {'link': r['magnet_uri'],
'name': r['torrent_title'],
'size': str(r['size']) + 'B',
'seeds': r['seeds'],
'leech': r['leeches'],
'desc_link': r['page'],
'engine_url': self.url}
prettyPrinter(r_dict)
| mit | 2,467,185,917,792,230,400 | 33.27027 | 106 | 0.501577 | false |
szarroug3/X-Ray_Calibre_Plugin | lib/book.py | 1 | 48525 | # Book.py
'''Controls book functions and holds book data'''
import os
import json
import struct
from sqlite3 import connect
from datetime import datetime
from cStringIO import StringIO
from shutil import copy
from calibre.ebooks.mobi import MobiError
from calibre.ebooks.metadata.mobi import MetadataUpdater
from calibre_plugins.xray_creator.lib.utilities import LIBRARY
from calibre_plugins.xray_creator.lib.status_info import StatusInfo
from calibre_plugins.xray_creator.lib.book_parser import BookParser
from calibre_plugins.xray_creator.lib.book_settings import BookSettings
from calibre_plugins.xray_creator.lib.exceptions import PageDoesNotExist
from calibre_plugins.xray_creator.lib.xray_db_writer import XRayDBWriter
from calibre_plugins.xray_creator.lib.goodreads_parser import GoodreadsParser
class Book(object):
'''Class to hold book information and creates/sends files depending on user settings'''
def __init__(self, database, book_id, connections, settings):
self._basic_info = {'book_id': book_id, 'xray_send_fmt': None}
self._goodreads_conn = connections['goodreads']
self._settings = settings
self._xray_format_information = None
self._statuses = {'general': StatusInfo(status=StatusInfo.IN_PROGRESS),
'xray': StatusInfo(), 'xray_send': StatusInfo(),
'author_profile': StatusInfo(), 'author_profile_send': StatusInfo(),
'start_actions': StatusInfo(), 'start_actions_send': StatusInfo(),
'end_actions': StatusInfo(), 'end_actions_send': StatusInfo()}
self._goodreads_data = {}
self._book_settings = BookSettings(database, book_id, connections)
self._get_basic_information(database, settings['formats'])
if self._statuses['general'].status != StatusInfo.FAIL:
self._statuses['general'].status = StatusInfo.SUCCESS
@property
def status(self):
return self._statuses['general']
@property
def xray_status(self):
return self._statuses['xray']
@property
def xray_send_status(self):
return self._statuses['xray_send']
@property
def xray_send_fmt(self):
return self._basic_info['xray_send_fmt']
@property
def author_profile_status(self):
return self._statuses['author_profile']
@property
def author_profile_send_status(self):
return self._statuses['author_profile_send']
@property
def start_actions_status(self):
return self._statuses['start_actions']
@property
def start_actions_send_status(self):
return self._statuses['start_actions_send']
@property
def end_actions_status(self):
return self._statuses['end_actions']
@property
def end_actions_send_status(self):
return self._statuses['end_actions_send']
@property
def book_id(self):
return self._basic_info['book_id']
@property
def title(self):
return self._basic_info['title']
@property
def author(self):
return self._basic_info['author']
@property
def title_and_author(self):
return '{0} - {1}'.format(self._basic_info['title'], self._basic_info['author'])
def xray_formats_failing(self):
'''Yields x-ray formats that are failing'''
for fmt, info in self._xray_format_information.items():
if info['status'].status is StatusInfo.FAIL:
yield (fmt, info)
def xray_formats_not_failing(self):
'''Yields x-ray formats that are not failing'''
for fmt, info in self._xray_format_information.items():
if info['status'].status is not StatusInfo.FAIL:
yield (fmt, info)
def xray_formats_not_failing_exist(self):
'''Checks if any formats that aren't failing exist'''
return any(self.xray_formats_not_failing())
def _get_basic_information(self, database, formats):
'''Gets title, author, goodreads url, ASIN, and file specific info for the book'''
self._basic_info['title'] = database.field_for('title', self._basic_info['book_id'])
self._basic_info['author'] = ' & '.join(database.field_for('authors', self._basic_info['book_id']))
if self._basic_info['title'] == 'Unknown' or self._basic_info['author'] == 'Unknown':
self._statuses['general'].set(StatusInfo.FAIL, StatusInfo.F_BASIC_INFORMATION_MISSING)
return
if not self._book_settings.prefs['goodreads_url'] or self._book_settings.prefs['goodreads_url'] == '':
self._statuses['general'].set(StatusInfo.FAIL, StatusInfo.F_COULD_NOT_FIND_GOODREADS_PAGE)
return
if not self._book_settings.prefs['asin'] or self._book_settings.prefs['asin'] == '':
self._statuses['general'].set(StatusInfo.FAIL, StatusInfo.F_COULD_NOT_FIND_ASIN)
return
self._basic_info['goodreads_url'] = self._book_settings.prefs['goodreads_url']
self._basic_info['asin'] = self._book_settings.prefs['asin']
if os.path.isfile(self._book_settings.prefs['sample_xray']):
self._basic_info['sample_xray'] = self._book_settings.prefs['sample_xray']
else:
self._basic_info['sample_xray'] = None
if self._settings['create_send_xray']:
self._get_basic_xray_information(database, formats)
if (self._settings['create_send_author_profile']
or self._settings['create_send_start_actions']
or self._settings['create_send_end_actions']):
self._get_basic_non_xray_information(database)
def _get_basic_xray_information(self, database, formats):
'''Gets aliases and format information for the book and initializes x-ray variables'''
self._basic_info['aliases'] = self._book_settings.prefs['aliases']
self._xray_format_information = {}
self._statuses['xray'].status = StatusInfo.IN_PROGRESS
for fmt in formats:
info = {'status': StatusInfo(status=StatusInfo.IN_PROGRESS)}
# find local book if it exists; fail if it doesn't
local_book = database.format_abspath(self._basic_info['book_id'], fmt.upper())
if not local_book or not os.path.exists(local_book):
info['status'].set(StatusInfo.FAIL, StatusInfo.F_LOCAL_BOOK_NOT_FOUND)
else:
info['local_book'] = local_book
local_xray = '.'.join(local_book.split('.')[:-1]) + '.sdr'
if not os.path.exists(local_xray):
os.mkdir(local_xray)
info['local_xray'] = os.path.join(local_xray, fmt)
if not os.path.exists(info['local_xray']):
os.mkdir(info['local_xray'])
self._xray_format_information[fmt.lower()] = info
if not self.xray_formats_not_failing_exist():
self._statuses['xray'].set(StatusInfo.FAIL, StatusInfo.F_NO_APPROPRIATE_LOCAL_BOOK_FOUND)
def _get_basic_non_xray_information(self, database):
'''Gets local book's directory and initializes non-xray variables'''
book_path = database.field_for('path', self._basic_info['book_id']).replace('/', os.sep)
local_book_directory = os.path.join(LIBRARY, book_path)
self._basic_info['local_non_xray'] = os.path.join(local_book_directory, 'non_xray')
if not os.path.exists(self._basic_info['local_non_xray']):
os.mkdir(self._basic_info['local_non_xray'])
if self._settings['create_send_author_profile']:
self._statuses['author_profile'].status = StatusInfo.IN_PROGRESS
if self._settings['create_send_start_actions']:
self._statuses['start_actions'].status = StatusInfo.IN_PROGRESS
if self._settings['create_send_end_actions']:
self._statuses['end_actions'].status = StatusInfo.IN_PROGRESS
def create_files_event(self, create_file_params, log, notifications, abort):
'''Creates and sends files depending on user's settings'''
title_and_author = self.title_and_author
device_books, perc, total = create_file_params
# Prep
if not self._settings['overwrite_when_creating']:
notifications.put((self._calculate_percentage(perc, total),
'Checking for {0} existing files'.format(title_and_author)))
log('{0} Checking for existing files...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
self._check_for_existing_files()
perc += 1
if abort.isSet():
return
create_xray = self._settings['create_send_xray'] and self.xray_formats_not_failing_exist()
author_profile = (self._settings['create_send_author_profile'] and
self._statuses['author_profile'].status != StatusInfo.FAIL)
start_actions = (self._settings['create_send_start_actions'] and
self._statuses['start_actions'].status != StatusInfo.FAIL)
end_actions = self._settings['create_send_end_actions'] and self._statuses['end_actions'].status != StatusInfo.FAIL
if create_xray or author_profile or start_actions or end_actions:
if self._basic_info['sample_xray'] and create_xray:
notifications.put((self._calculate_percentage(perc, total),
'Parsing {0} given data'.format(title_and_author)))
log('{0} Parsing given data...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
self._parse_input_file()
self._parse_goodreads_data(create_xray=False, create_author_profile=author_profile,
create_start_actions=start_actions, create_end_actions=end_actions)
else:
notifications.put((self._calculate_percentage(perc, total),
'Parsing {0} Goodreads data'.format(title_and_author)))
log('{0} Parsing Goodreads data...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
self._parse_goodreads_data(create_xray=create_xray, create_author_profile=author_profile,
create_start_actions=start_actions, create_end_actions=end_actions)
perc += 1
if self._statuses['general'].status is StatusInfo.FAIL:
return
# Creating Files
if abort.isSet():
return
files_to_send = self._create_files(perc, total, notifications, log)
self._update_general_statuses()
# Sending Files
if self._settings['send_to_device'] and device_books is not None:
send_files = False
if self._settings['create_send_xray'] and self.xray_formats_not_failing_exist():
send_files = True
elif (self._settings['create_send_author_profile'] and
self._statuses['author_profile'].status != StatusInfo.FAIL):
send_files = True
elif (self._settings['create_send_start_actions'] and
self._statuses['start_actions'].status != StatusInfo.FAIL):
send_files = True
elif self._settings['create_send_end_actions'] and self._statuses['end_actions'].status != StatusInfo.FAIL:
send_files = True
if send_files:
notifications.put((self._calculate_percentage(perc, total),
'Sending {0} files to device'.format(self.title_and_author)))
log('{0} Sending files to device...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
self._check_fmts_for_create_event(device_books, files_to_send)
if len(files_to_send) > 0:
self._send_files(device_books, files_to_send)
perc += 1
def _create_files(self, perc, total, notifications, log):
'''Create files for create_files_event'''
files_to_send = {}
if self._settings['create_send_xray']:
if self.xray_formats_not_failing_exist() and self._statuses['xray'].status != StatusInfo.FAIL:
notifications.put((self._calculate_percentage(perc, total),
'Parsing {0} book data'.format(self.title_and_author)))
log('{0} Creating x-ray...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
log('{0} Parsing book data...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
for fmt, info in self.xray_formats_not_failing():
self._parse_book(fmt, info)
perc += 1
if self.xray_formats_not_failing_exist():
notifications.put((self._calculate_percentage(perc, total),
'Writing {0} x-ray'.format(self.title_and_author)))
log('{0} Writing x-ray...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
for fmt, info in self.xray_formats_not_failing():
self._write_xray(info)
perc += 1
if self._settings['create_send_author_profile']:
if self._statuses['author_profile'].status != StatusInfo.FAIL:
notifications.put((self._calculate_percentage(perc, total),
'Writing {0} author profile'.format(self.title_and_author)))
log('{0} Writing author profile...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
self._write_author_profile(files_to_send)
perc += 1
if self._settings['create_send_start_actions']:
if self._statuses['start_actions'].status != StatusInfo.FAIL:
notifications.put((self._calculate_percentage(perc, total),
'Writing {0} start actions'.format(self.title_and_author)))
log('{0} Writing start actions...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
self._write_start_actions(files_to_send)
perc += 1
if self._settings['create_send_end_actions']:
if self._statuses['end_actions'].status != StatusInfo.FAIL:
notifications.put((self._calculate_percentage(perc, total),
'Writing {0} end actions'.format(self.title_and_author)))
log('{0} Writing end actions...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
self._write_end_actions(files_to_send)
perc += 1
return files_to_send
def send_files_event(self, send_file_params, log, notifications, abort):
'''Sends files to device depending on user's settings'''
device_books, book_num, total = send_file_params
if abort.isSet():
return
notifications.put((self._calculate_percentage(book_num, total), self.title_and_author))
files_to_send = {}
checked_data = self._check_fmts_for_send_event(device_books, files_to_send)
create_xray_format_info, create_author_profile, create_start_actions, create_end_actions = checked_data
if create_xray_format_info or create_author_profile or create_start_actions or create_end_actions:
log('{0} Parsing {1} Goodreads data...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S'),
self.title_and_author))
create_xray = True if create_xray_format_info != None else False
if create_xray and self._basic_info['sample_xray']:
self._parse_input_file()
else:
self._parse_goodreads_data(create_xray=create_xray, create_author_profile=create_author_profile,
create_start_actions=create_start_actions, create_end_actions=create_end_actions)
if self._statuses['general'].status is StatusInfo.FAIL:
return
if create_xray and self._statuses['xray'].status != StatusInfo.FAIL:
log('{0} Creating {1} x-ray...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S'),
self.title_and_author))
self._parse_book(create_xray_format_info['format'],
self._xray_format_information[create_xray_format_info['format']])
if self._xray_format_information[create_xray_format_info['format']]['status'].status != StatusInfo.FAIL:
self._write_xray(self._xray_format_information[create_xray_format_info['format']])
if os.path.exists(create_xray_format_info['local']):
files_to_send['xray'] = create_xray_format_info
if create_author_profile and self._statuses['author_profile'].status != StatusInfo.FAIL:
log('{0} Creating {1} author profile...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S'),
self.title_and_author))
self._write_author_profile(files_to_send)
if create_start_actions and self._statuses['start_actions'].status != StatusInfo.FAIL:
log('{0} Creating {1} start actions...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S'),
self.title_and_author))
self._write_start_actions(files_to_send)
if create_end_actions and self._statuses['end_actions'].status != StatusInfo.FAIL:
log('{0} Creating {1} end actions...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S'),
self.title_and_author))
self._write_end_actions(files_to_send)
self._update_general_statuses()
if len(files_to_send) > 0:
log('{0} Sending files to device...'.format(datetime.now().strftime('%m-%d-%Y %H:%M:%S')))
self._send_files(device_books, files_to_send)
def _update_general_statuses(self):
if self._settings['create_send_xray'] and self._statuses['xray'].status != StatusInfo.FAIL:
self._statuses['xray'].status = StatusInfo.SUCCESS
if self._settings['create_send_author_profile'] and self._statuses['author_profile'].status != StatusInfo.FAIL:
self._statuses['author_profile'].status = StatusInfo.SUCCESS
if self._settings['create_send_start_actions'] and self._statuses['start_actions'].status != StatusInfo.FAIL:
self._statuses['start_actions'].status = StatusInfo.SUCCESS
if self._settings['create_send_end_actions'] and self._statuses['end_actions'].status != StatusInfo.FAIL:
self._statuses['end_actions'].status = StatusInfo.SUCCESS
@staticmethod
def _calculate_percentage(amt_completed, total):
'''Calculates percentage of amt_completed compared to total; Minimum returned is .01'''
return amt_completed/total if amt_completed/total >= .01 else .01
def _parse_input_file(self):
'''Checks input file type and calls appropriate parsing function'''
filetype = os.path.splitext(self._basic_info['sample_xray'])[1][1:].lower()
if filetype == 'asc':
characters, settings = self._parse_input_asc()
quotes = []
elif filetype == 'json':
characters, settings, quotes = self._parse_input_json()
else:
return
self._process_goodreads_xray_results({'characters': characters, 'settings': settings, 'quotes': quotes})
def _parse_input_asc(self):
'''Gets character and setting information from sample x-ray file'''
cursor = connect(self._basic_info['sample_xray']).cursor()
characters = {}
settings = {}
for entity_desc in cursor.execute('SELECT * FROM entity_description').fetchall():
entity_id = entity_desc[3]
description = entity_desc[0]
entity = cursor.execute('SELECT * FROM entity WHERE id = "{0}"'.format(entity_id)).fetchall()
if not entity:
continue
entity_label = entity[0][1]
entity_type = entity[0][3]
if entity_type == 1:
aliases = self._basic_info['aliases'][entity_label] if entity_label in self._basic_info['aliases'] else []
characters[entity_id] = {'label': entity_label, 'description': description, 'aliases': aliases}
elif entity_type == 2:
settings[entity_id] = {'label': entity_label, 'description': description, 'aliases': []}
return characters, settings
def _parse_input_json(self):
'''Gets characters, setting, and quote data from json file'''
entity_num = 1
characters = {}
settings = {}
data = json.load(open(self._basic_info['sample_xray']))
if 'characters' in data:
for name, char_data in data['characters'].items():
description = char_data['description'] if 'description' in char_data else 'No description found.'
aliases = self._basic_info['aliases'][name] if name in self._basic_info['aliases'] else []
characters[entity_num] = {'label': name, 'description': description, 'aliases': aliases}
entity_num += 1
if 'settings' in data:
for setting, char_data in data['settings'].items():
description = char_data['description'] if 'description' in char_data else 'No description found.'
aliases = self._basic_info['aliases'][setting] if setting in self._basic_info['aliases'] else []
settings[entity_num] = {'label': setting, 'description': description, 'aliases': aliases}
entity_num += 1
quotes = data['quotes'] if 'quotes' in data else []
return characters, settings, quotes
def _parse_goodreads_data(self, create_xray=None, create_author_profile=None,
create_start_actions=None, create_end_actions=None):
if create_xray is None:
create_xray = self._settings['create_send_xray']
if create_author_profile is None:
create_author_profile = self._settings['create_send_author_profile']
if create_start_actions is None:
create_start_actions = self._settings['create_send_start_actions']
if create_end_actions is None:
create_end_actions = self._settings['create_send_end_actions']
try:
goodreads_data = GoodreadsParser(self._basic_info['goodreads_url'], self._goodreads_conn,
self._basic_info['asin'])
results = goodreads_data.parse(create_xray=create_xray, create_author_profile=create_author_profile,
create_start_actions=create_start_actions, create_end_actions=create_end_actions)
compiled_xray, compiled_author_profile, compiled_start_actions, compiled_end_actions = results
except PageDoesNotExist:
self._statuses['general'].set(StatusInfo.FAIL, StatusInfo.F_COULD_NOT_PARSE_GOODREADS_DATA)
return
if create_xray:
self._process_goodreads_xray_results(compiled_xray)
if create_author_profile:
self._process_goodreads_author_profile_results(compiled_author_profile)
if create_start_actions:
self._process_goodreads_start_actions_results(compiled_start_actions)
if create_end_actions:
self._process_goodreads_end_actions_results(compiled_end_actions)
def _process_goodreads_xray_results(self, compiled_xray):
'''Sets aliases in book settings and basic info if compiled xray has data; sets status to fail if it doesn't'''
if compiled_xray:
self._goodreads_data['xray'] = compiled_xray
for char in self._goodreads_data['xray']['characters'].values():
if char['label'] not in self._basic_info['aliases'].keys():
self._basic_info['aliases'][char['label']] = char['aliases']
self._book_settings.prefs['aliases'] = self._basic_info['aliases']
else:
self._statuses['xray'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_CREATE_XRAY)
def _process_goodreads_author_profile_results(self, compiled_author_profile):
'''Sets author profile in goodreads data if compiled author profile has data; sets status to fail if it doesn't'''
if compiled_author_profile:
self._goodreads_data['author_profile'] = compiled_author_profile
else:
self._statuses['author_profile'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_CREATE_AUTHOR_PROFILE)
def _process_goodreads_start_actions_results(self, compiled_start_actions):
'''Sets start actions in goodreads data if compiled start actions has data; sets status to fail if it doesn't'''
if compiled_start_actions:
self._goodreads_data['start_actions'] = compiled_start_actions
else:
self._statuses['start_actions'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_CREATE_START_ACTIONS)
def _process_goodreads_end_actions_results(self, compiled_end_actions):
'''Sets end actions in goodreads data if compiled end actions has data; sets status to fail if it doesn't'''
if compiled_end_actions:
self._goodreads_data['end_actions'] = compiled_end_actions
else:
self._statuses['end_actions'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_CREATE_END_ACTIONS)
def _parse_book(self, fmt, info):
'''Will parse book using the format info given'''
try:
book_parser = BookParser(fmt, info['local_book'], self._goodreads_data['xray'], self._basic_info['aliases'])
info['parsed_book_data'] = book_parser.parse()
except MobiError:
info['status'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_PARSE_BOOK)
def _check_for_existing_files(self):
'''Checks if files exist and fails for that type if they do'''
if self._settings['create_send_xray']:
for fmt_info in self.xray_formats_not_failing():
info = fmt_info[1]
if os.path.exists(os.path.join(info['local_xray'],
'XRAY.entities.{0}.asc'.format(self._basic_info['asin']))):
info['status'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_NOT_OVERWRITE_LOCAL_XRAY)
if self._settings['create_send_author_profile']:
if os.path.exists(os.path.join(self._basic_info['local_non_xray'],
'AuthorProfile.profile.{0}.asc'.format(self._basic_info['asin']))):
self._statuses['author_profile'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_NOT_OVERWRITE_LOCAL_AUTHOR_PROFILE)
if self._settings['create_send_start_actions']:
if os.path.exists(os.path.join(self._basic_info['local_non_xray'],
'StartActions.data.{0}.asc'.format(self._basic_info['asin']))):
self._statuses['start_actions'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_NOT_OVERWRITE_LOCAL_START_ACTIONS)
if self._settings['create_send_end_actions']:
if os.path.exists(os.path.join(self._basic_info['local_non_xray'],
'EndActions.data.{0}.asc'.format(self._basic_info['asin']))):
self._statuses['end_actions'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_NOT_OVERWRITE_LOCAL_END_ACTIONS)
def _write_xray(self, info):
'''Writes x-ray file using goodreads and parsed book data; Will save in local directory'''
try:
filename = os.path.join(info['local_xray'], 'XRAY.entities.{0}.asc'.format(self._basic_info['asin']))
if os.path.exists(filename):
os.remove(filename)
except OSError:
info['status'].set(StatusInfo.FAIL, StatusInfo.F_REMOVE_LOCAL_XRAY)
xray_db_writer = XRayDBWriter(info['local_xray'], self._basic_info['goodreads_url'],
self._basic_info['asin'], info['parsed_book_data'])
xray_db_writer.write_xray()
if not os.path.exists(os.path.join(info['local_xray'], 'XRAY.entities.{0}.asc'.format(self._basic_info['asin']))):
info['status'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_WRITE_XRAY)
return
info['status'].status = StatusInfo.SUCCESS
def _write_author_profile(self, files_to_send):
'''Writes author profile file using goodreads; Will save in local directory'''
try:
filename = os.path.join(self._basic_info['local_non_xray'],
'AuthorProfile.profile.{0}.asc'.format(self._basic_info['asin']))
if os.path.exists(filename):
os.remove(filename)
except OSError:
self._statuses['author_profile'].set(StatusInfo.FAIL, StatusInfo.F_REMOVE_LOCAL_AUTHOR_PROFILE)
try:
with open(os.path.join(self._basic_info['local_non_xray'],
'AuthorProfile.profile.{0}.asc'.format(self._basic_info['asin'])),
'w+') as author_profile:
json.dump(self._goodreads_data['author_profile'], author_profile)
except OSError:
self._statuses['author_profile'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_WRITE_AUTHOR_PROFILE)
return
if self._settings['send_to_device']:
filename = 'AuthorProfile.profile.{0}.asc'.format(self._basic_info['asin'])
local_file = os.path.join(self._basic_info['local_non_xray'], filename)
files_to_send['author_profile'] = {'local': local_file, 'filename': filename}
def _write_start_actions(self, files_to_send):
'''Writes start actions file using goodreads; Will save in local directory'''
try:
filename = os.path.join(self._basic_info['local_non_xray'],
'StartActions.data.{0}.asc'.format(self._basic_info['asin']))
if os.path.exists(filename):
os.remove(filename)
except OSError:
self._statuses['start_actions'].set(StatusInfo.FAIL, StatusInfo.F_REMOVE_LOCAL_START_ACTIONS)
try:
with open(os.path.join(self._basic_info['local_non_xray'],
'StartActions.data.{0}.asc'.format(self._basic_info['asin'])),
'w+') as start_actions:
json.dump(self._goodreads_data['start_actions'], start_actions)
except OSError:
self._statuses['start_actions'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_WRITE_START_ACTIONS)
return
if self._settings['send_to_device']:
filename = 'StartActions.data.{0}.asc'.format(self._basic_info['asin'])
local_file = os.path.join(self._basic_info['local_non_xray'], filename)
files_to_send['start_actions'] = {'local': local_file, 'filename': filename}
def _write_end_actions(self, files_to_send):
'''Writes end actions file using goodreads; Will save in local directory'''
try:
filename = os.path.join(self._basic_info['local_non_xray'],
'EndActions.data.{0}.asc'.format(self._basic_info['asin']))
if os.path.exists(filename):
os.remove(filename)
except OSError:
self._statuses['end_actions'].set(StatusInfo.FAIL, StatusInfo.F_REMOVE_LOCAL_END_ACTIONS)
try:
with open(os.path.join(self._basic_info['local_non_xray'],
'EndActions.data.{0}.asc'.format(self._basic_info['asin'])),
'w+') as end_actions:
json.dump(self._goodreads_data['end_actions'], end_actions)
except OSError:
self._statuses['end_actions'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_WRITE_END_ACTIONS)
return
if self._settings['send_to_device']:
filename = 'EndActions.data.{0}.asc'.format(self._basic_info['asin'])
local_file = os.path.join(self._basic_info['local_non_xray'], filename)
files_to_send['end_actions'] = {'local': local_file, 'filename': filename}
def _check_fmts_for_create_event(self, device_books, files_to_send):
'''Compiles dict of file type info to use when creating files'''
if len(device_books) == 0 or not device_books.has_key(self._basic_info['book_id']):
if self._settings['create_send_xray'] and self.xray_formats_not_failing_exist():
self._statuses['xray_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
if (self._settings['create_send_author_profile'] and
self._statuses['author_profile'].status == StatusInfo.SUCCESS):
self._statuses['author_profile_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
if files_to_send.has_key('author_profile'):
del files_to_send['author_profile']
if self._settings['create_send_start_actions'] and self._statuses['start_actions'].status == StatusInfo.SUCCESS:
self._statuses['start_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
if files_to_send.has_key('start_actions'):
del files_to_send['start_actions']
if self._settings['create_send_end_actions'] and self._statuses['end_actions'].status == StatusInfo.SUCCESS:
self._statuses['end_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
if files_to_send.has_key('end_actions'):
del files_to_send['end_actions']
return
first_fmt = device_books[self._basic_info['book_id']].keys()[0]
self._basic_info['device_sdr'] = device_books[self._basic_info['book_id']][first_fmt]['device_sdr']
if not os.path.exists(self._basic_info['device_sdr']):
os.mkdir(self._basic_info['device_sdr'])
if self._settings['create_send_xray'] and self.xray_formats_not_failing_exist():
# figure out which format to send
self._check_xray_format_to_create(device_books, files_to_send)
def _check_xray_format_to_create(self, device_books, files_to_send):
'''Compiles dict of file type to use for x-ray'''
formats_not_failing = [fmt for fmt, info in self.xray_formats_not_failing()]
formats_on_device = device_books[self._basic_info['book_id']].keys()
common_formats = list(set(formats_on_device).intersection(formats_not_failing))
if len(common_formats) == 0:
for fmt, info in self.xray_formats_not_failing():
info['status'].status = StatusInfo.SUCCESS
self._statuses['xray_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
else:
format_picked = self._settings['file_preference']
if len(common_formats) == 1:
format_picked = common_formats[0]
for fmt, info in self.xray_formats_not_failing():
if fmt != format_picked:
info['status'].status = StatusInfo.SUCCESS
continue
filename = 'XRAY.entities.{0}.asc'.format(self._basic_info['asin'])
local_file = os.path.join(info['local_xray'], filename)
files_to_send['xray'] = {'local': local_file, 'filename': filename, 'format': format_picked}
def _check_fmts_for_send_event(self, device_books, files_to_send):
'''Compiles dict of file type info to use when sending files'''
create_xray = None
create_author_profile = False
create_start_actions = False
create_end_actions = False
if not device_books.has_key(self._basic_info['book_id']):
if self._settings['create_send_xray']:
self._statuses['xray_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
if self._settings['create_send_author_profile']:
self._statuses['author_profile_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
if self._settings['create_send_start_actions']:
self._statuses['start_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
if self._settings['create_send_end_actions']:
self._statuses['end_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
return create_xray, create_author_profile, create_start_actions, create_end_actions
first_fmt = device_books[self._basic_info['book_id']].keys()[0]
self._basic_info['device_sdr'] = device_books[self._basic_info['book_id']][first_fmt]['device_sdr']
if not os.path.exists(self._basic_info['device_sdr']):
os.mkdir(self._basic_info['device_sdr'])
if self._settings['create_send_xray']:
# figure out which format to send
create_xray = self._check_xray_fmt_for_send(device_books, files_to_send)
if self._settings['create_send_author_profile']:
create_author_profile = self._check_author_profile_for_send(files_to_send)
if self._settings['create_send_start_actions']:
create_start_actions = self._check_start_actions_for_send(files_to_send)
if self._settings['create_send_end_actions']:
create_end_actions = self._check_end_actions_for_send(files_to_send)
return create_xray, create_author_profile, create_start_actions, create_end_actions
def _check_xray_fmt_for_send(self, device_books, files_to_send):
'''Check if there's a valid x-ray to send'''
formats_not_failing = [fmt for fmt, info in self._xray_format_information.items()]
formats_on_device = device_books[self._basic_info['book_id']].keys()
common_formats = list(set(formats_on_device).intersection(formats_not_failing))
if len(common_formats) == 0:
for fmt, info in self._xray_format_information.items():
info['status'].status = StatusInfo.SUCCESS
self._statuses['xray_send'].set(StatusInfo.FAIL, StatusInfo.F_BOOK_NOT_ON_DEVICE)
else:
format_picked = self._settings['file_preference']
if len(common_formats) == 1:
format_picked = common_formats[0]
filename = 'XRAY.entities.{0}.asc'.format(self._basic_info['asin'])
local_file = os.path.join(self._xray_format_information[format_picked]['local_xray'], filename)
if (os.path.exists(os.path.join(self._basic_info['device_sdr'], filename)) and not
self._settings['overwrite_when_sending']):
self._statuses['xray_send'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_NOT_OVERWRITE_DEVICE_XRAY)
else:
if os.path.exists(local_file):
files_to_send['xray'] = {'local': local_file, 'filename': filename, 'format': format_picked}
else:
if not self._settings['create_files_when_sending']:
self._statuses['xray_send'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_SET_TO_NOT_CREATE_XRAY)
else:
return {'local': local_file, 'filename': filename, 'format': format_picked}
return None
def _check_author_profile_for_send(self, files_to_send):
'''Check if there's a valid author profile to send'''
filename = 'AuthorProfile.profile.{0}.asc'.format(self._basic_info['asin'])
local_file = os.path.join(self._basic_info['local_non_xray'], filename)
if (os.path.exists(os.path.join(self._basic_info['device_sdr'], filename)) and not
self._settings['overwrite_when_sending']):
self._statuses['author_profile_send'].set(StatusInfo.FAIL,
StatusInfo.F_PREFS_NOT_OVERWRITE_DEVICE_AUTHOR_PROFILE)
else:
if os.path.exists(local_file):
files_to_send['author_profile'] = {'local': local_file, 'filename': filename}
else:
if not self._settings['create_files_when_sending']:
self._statuses['author_profile_send'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_SET_TO_NOT_CREATE_XRAY)
else:
return True
return False
def _check_start_actions_for_send(self, files_to_send):
'''Check if there's a valid start actions file to send'''
filename = 'StartActions.data.{0}.asc'.format(self._basic_info['asin'])
local_file = os.path.join(self._basic_info['local_non_xray'], filename)
if (os.path.exists(os.path.join(self._basic_info['device_sdr'], filename)) and not
self._settings['overwrite_when_sending']):
self._statuses['start_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_NOT_OVERWRITE_DEVICE_START_ACTIONS)
else:
if os.path.exists(local_file):
files_to_send['start_actions'] = {'local': local_file, 'filename': filename}
else:
if not self._settings['create_files_when_sending']:
self._statuses['start_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_SET_TO_NOT_CREATE_XRAY)
else:
return True
return False
def _check_end_actions_for_send(self, files_to_send):
'''Check if there's a valid end actions file to send'''
filename = 'EndActions.data.{0}.asc'.format(self._basic_info['asin'])
local_file = os.path.join(self._basic_info['local_non_xray'], filename)
if (os.path.exists(os.path.join(self._basic_info['device_sdr'], filename)) and not
self._settings['overwrite_when_sending']):
self._statuses['end_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_NOT_OVERWRITE_DEVICE_END_ACTIONS)
else:
if os.path.exists(local_file):
files_to_send['end_actions'] = {'local': local_file, 'filename': filename}
else:
if not self._settings['create_files_when_sending']:
self._statuses['end_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_PREFS_SET_TO_NOT_CREATE_XRAY)
else:
return True
return False
def _send_files(self, device_books, files_to_send):
'''Sends files to device depending on list compiled in files_to_send'''
number_of_failed_asin_updates = 0
formats_on_device = device_books[self._basic_info['book_id']].keys()
try:
for fmt in formats_on_device:
with open(device_books[self._basic_info['book_id']][fmt]['device_book'], 'r+b') as stream:
mobi_updater = ASINUpdater(stream)
mobi_updater.update(self._basic_info['asin'])
except MobiError:
number_of_failed_asin_updates += 1
if (self._settings['create_send_xray'] and self._settings['send_to_device'].has_key('xray') and
fmt == self._settings['send_to_device']['xray']['format']):
self._statuses['xray_send'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_UPDATE_ASIN)
self._basic_info['xray_send_fmt'] = files_to_send['xray']['format']
if files_to_send.has_key('xray'):
del files_to_send['xray']
if number_of_failed_asin_updates == len(formats_on_device):
if self._settings['create_send_author_profile']:
self._statuses['author_profile_send'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_UPDATE_ASIN)
if self._settings['create_send_start_actions']:
self._statuses['start_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_UPDATE_ASIN)
if self._settings['create_send_end_actions']:
self._statuses['end_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_UPDATE_ASIN)
return
# temporarily rename current file in case send fails
for filetype, info in files_to_send.items():
self._send_file(filetype, info)
def _send_file(self, filetype, info):
'''Send file to device and update status accordingly'''
device_filename = os.path.join(self._basic_info['device_sdr'], info['filename'])
if os.path.exists(device_filename):
os.rename(device_filename, '{0}.old'.format(device_filename))
copy(info['local'], self._basic_info['device_sdr'])
if os.path.exists(device_filename):
if os.path.exists('{0}.old'.format(device_filename)):
os.remove('{0}.old'.format(device_filename))
if filetype == 'xray':
self._statuses['xray_send'].status = StatusInfo.SUCCESS
self._basic_info['xray_send_fmt'] = info['format']
elif filetype == 'author_profile':
self._statuses['author_profile_send'].status = StatusInfo.SUCCESS
elif filetype == 'start_actions':
self._statuses['start_actions_send'].status = StatusInfo.SUCCESS
elif filetype == 'end_actions':
self._statuses['end_actions_send'].status = StatusInfo.SUCCESS
else:
os.rename('{0}.old'.format(device_filename), device_filename)
if filetype == 'xray':
self._statuses['xray_send'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_SEND_XRAY)
self._basic_info['xray_send_fmt'] = self._basic_info['xray_send_fmt']
elif filetype == 'author_profile':
self._statuses['author_profile_send'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_SEND_AUTHOR_PROFILE)
elif filetype == 'start_actions':
self._statuses['start_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_SEND_START_ACTIONS)
elif filetype == 'end_actions':
self._statuses['end_actions_send'].set(StatusInfo.FAIL, StatusInfo.F_UNABLE_TO_SEND_END_ACTIONS)
class ASINUpdater(MetadataUpdater):
'''Class to modify MOBI book'''
def update(self, asin):
'''This will update ASIN'''
def update_exth_record(rec):
'''Gets exth records'''
recs.append(rec)
if rec[0] in self.original_exth_records:
self.original_exth_records.pop(rec[0])
if self.type != "BOOKMOBI":
raise MobiError("Setting ASIN only supported for MOBI files of type 'BOOK'.\n"
"\tThis is a '%s' file of type '%s'" % (self.type[0:4], self.type[4:8]))
recs = []
original = None
if 113 in self.original_exth_records:
original = self.original_exth_records[113]
elif 504 in self.original_exth_records:
original = self.original_exth_records[504]
if original == asin:
return
update_exth_record((113, asin.encode(self.codec, 'replace')))
update_exth_record((504, asin.encode(self.codec, 'replace')))
# Include remaining original EXTH fields
for record_id in sorted(self.original_exth_records):
recs.append((record_id, self.original_exth_records[record_id]))
recs = sorted(recs, key=lambda x: (x[0], x[0]))
exth = StringIO()
for code, data in recs:
exth.write(struct.pack('>II', code, len(data) + 8))
exth.write(data)
exth = exth.getvalue()
trail = len(exth) % 4
pad = '\0' * (4 - trail) # Always pad w/ at least 1 byte
exth = ''.join(['EXTH', struct.pack('>II', len(exth) + 12, len(recs)), exth, pad])
if getattr(self, 'exth', None) is None:
raise MobiError('No existing EXTH record. Cannot update ASIN.')
self.create_exth(exth=exth)
return
| gpl-3.0 | 4,600,604,134,167,618,000 | 53.64527 | 124 | 0.593673 | false |
arnif/CouchPotatoServer | couchpotato/core/settings/__init__.py | 1 | 6612 | from __future__ import with_statement
from couchpotato.api import addApiView
from couchpotato.core.event import addEvent, fireEvent
from couchpotato.core.helpers.encoding import isInt, toUnicode
from couchpotato.core.helpers.request import getParams, jsonified
from couchpotato.core.helpers.variable import mergeDicts, tryInt
from couchpotato.core.settings.model import Properties
import ConfigParser
import os.path
import time
import traceback
class Settings(object):
options = {}
types = {}
def __init__(self):
addApiView('settings', self.view, docs = {
'desc': 'Return the options and its values of settings.conf. Including the default values and group ordering used on the settings page.',
'return': {'type': 'object', 'example': """{
// objects like in __init__.py of plugin
"options": {
"moovee" : {
"groups" : [{
"description" : "SD movies only",
"name" : "#alt.binaries.moovee",
"options" : [{
"default" : false,
"name" : "enabled",
"type" : "enabler"
}],
"tab" : "providers"
}],
"name" : "moovee"
}
},
// object structured like settings.conf
"values": {
"moovee": {
"enabled": false
}
}
}"""}
})
addApiView('settings.save', self.saveView, docs = {
'desc': 'Save setting to config file (settings.conf)',
'params': {
'section': {'desc': 'The section name in settings.conf'},
'option': {'desc': 'The option name'},
'value': {'desc': 'The value you want to save'},
}
})
def setFile(self, config_file):
self.file = config_file
self.p = ConfigParser.RawConfigParser()
self.p.read(config_file)
from couchpotato.core.logger import CPLog
self.log = CPLog(__name__)
self.connectEvents()
def parser(self):
return self.p
def sections(self):
return self.p.sections()
def connectEvents(self):
addEvent('settings.options', self.addOptions)
addEvent('settings.register', self.registerDefaults)
addEvent('settings.save', self.save)
def registerDefaults(self, section_name, options = {}, save = True):
self.addSection(section_name)
for option_name, option in options.iteritems():
self.setDefault(section_name, option_name, option.get('default', ''))
if option.get('type'):
self.setType(section_name, option_name, option.get('type'))
if save:
self.save(self)
def set(self, section, option, value):
return self.p.set(section, option, value)
def get(self, option = '', section = 'core', default = '', type = None):
try:
try: type = self.types[section][option]
except: type = 'unicode' if not type else type
if hasattr(self, 'get%s' % type.capitalize()):
return getattr(self, 'get%s' % type.capitalize())(section, option)
else:
return self.getUnicode(section, option)
except:
return default
def getEnabler(self, section, option):
return self.getBool(section, option)
def getBool(self, section, option):
try:
return self.p.getboolean(section, option)
except:
return self.p.get(section, option)
def getInt(self, section, option):
try:
return self.p.getint(section, option)
except:
return tryInt(self.p.get(section, option))
def getFloat(self, section, option):
try:
return self.p.getfloat(section, option)
except:
return tryInt(self.p.get(section, option))
def getUnicode(self, section, option):
value = self.p.get(section, option).decode('unicode_escape')
return toUnicode(value).strip()
def getValues(self):
values = {}
for section in self.sections():
values[section] = {}
for option in self.p.items(section):
(option_name, option_value) = option
values[section][option_name] = self.get(option_name, section)
return values
def save(self):
with open(self.file, 'wb') as configfile:
self.p.write(configfile)
self.log.debug('Saved settings')
def addSection(self, section):
if not self.p.has_section(section):
self.p.add_section(section)
def setDefault(self, section, option, value):
if not self.p.has_option(section, option):
self.p.set(section, option, value)
def setType(self, section, option, type):
if not self.types.get(section):
self.types[section] = {}
self.types[section][option] = type
def addOptions(self, section_name, options):
if not self.options.get(section_name):
self.options[section_name] = options
else:
self.options[section_name] = mergeDicts(self.options[section_name], options)
def getOptions(self):
return self.options
def view(self):
return jsonified({
'options': self.getOptions(),
'values': self.getValues()
})
def saveView(self):
params = getParams()
section = params.get('section')
option = params.get('name')
value = params.get('value')
# See if a value handler is attached, use that as value
new_value = fireEvent('setting.save.%s.%s' % (section, option), value, single = True)
self.set(section, option, (new_value if new_value else value).encode('unicode_escape'))
self.save()
return jsonified({
'success': True,
})
def getProperty(self, identifier):
from couchpotato import get_session
db = get_session()
prop = None
try:
propert = db.query(Properties).filter_by(identifier = identifier).first()
prop = propert.value
except:
pass
return prop
def setProperty(self, identifier, value = ''):
from couchpotato import get_session
db = get_session()
p = db.query(Properties).filter_by(identifier = identifier).first()
if not p:
p = Properties()
db.add(p)
p.identifier = identifier
p.value = toUnicode(value)
db.commit()
| gpl-3.0 | -3,391,935,378,621,741,600 | 28.783784 | 149 | 0.568966 | false |
gisdev-km/pyras | pyras/io/hecras/read_ras.py | 1 | 2308 | """
"""
class SimpleAttribute:
"""
"""
def __init__(self, name, options=['SI Units', 'English Units']):
pass
class NamedAttribute:
"""
"""
def __init__(self, name, type_, value=None, separator='=',
max_length=None):
pass
class TagAttribute:
"""
"""
def __init__(self, name, start_tag, end_tag, type_, value=None,
max_length=None):
pass
def _generic_reader():
""" """
def read_project(filename):
"""
Proj Title=new_project
Default Exp/Contr=0.3,0.1
SI Units
Y Axis Title=Elevation
X Axis Title(PF)=Main Channel Distance
X Axis Title(XS)=Station
BEGIN DESCRIPTION:
Example text
END DESCRIPTION:
DSS Start Date=
DSS Start Time=
DSS End Date=
DSS End Time=
DSS Export Filename=
DSS Export Rating Curves= 0
DSS Export Rating Curve Sorted= 0
DSS Export Volume Flow Curves= 0
DXF Filename=
DXF OffsetX= 0
DXF OffsetY= 0
DXF ScaleX= 1
DXF ScaleY= 10
GIS Export Profiles= 0
"""
sep ='='
tags = {
'description': ['BEGIN DESCRIPTION:', 'END DESCRIPTION:']
}
fixed = {
'units': ['SI Units' 'English Units']
}
keys = {
'Proj Title': '',
'Default Exp/Contr': '=0.3,0.1',
'Current Plan': '=p03',
'Geom File': '=g01',
'Flow File': '=f01',
'Plan File': '=p01',
'Y Axis Title=Elevation': '',
'X Axis Title(PF)': '=Main Channel Distance',
'X Axis Title(XS)': '=Station',
'DSS Start Date': '=',
'DSS Start Time': '=',
'DSS End Date': '=',
'DSS End Time': '=',
'DSS Export Filename': '=',
'DSS Export Rating Curves': '= 0',
'DSS Export Rating Curve Sorted': '= 0',
'DSS Export Volume Flow Curves': '= 0',
'DXF Filename': '=',
'DXF OffsetX': '= 0',
'DXF OffsetY': '= 0',
'DXF ScaleX': '= 1',
'DXF ScaleY': '= 10',
'GIS Export Profiles': '= 0'
}
def read_geometry(filename):
""" """
def read_plan(filename):
""" """
def read_boundary(filename):
""" """
def test_project():
""" """
'D:\Users\penac1\Dropbox (Personal)\it\repos\git\pyras\temp_examples\Steady Examples'
if __name__ == '__main__':
test_project()
| mit | 6,103,303,954,101,331,000 | 18.726496 | 89 | 0.526863 | false |
kennedyshead/home-assistant | homeassistant/components/risco/__init__.py | 1 | 4766 | """The Risco integration."""
import asyncio
from datetime import timedelta
import logging
from pyrisco import CannotConnectError, OperationError, RiscoAPI, UnauthorizedError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
CONF_PASSWORD,
CONF_PIN,
CONF_SCAN_INTERVAL,
CONF_USERNAME,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.storage import Store
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DATA_COORDINATOR, DEFAULT_SCAN_INTERVAL, DOMAIN, EVENTS_COORDINATOR
PLATFORMS = ["alarm_control_panel", "binary_sensor", "sensor"]
UNDO_UPDATE_LISTENER = "undo_update_listener"
LAST_EVENT_STORAGE_VERSION = 1
LAST_EVENT_TIMESTAMP_KEY = "last_event_timestamp"
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Risco from a config entry."""
data = entry.data
risco = RiscoAPI(data[CONF_USERNAME], data[CONF_PASSWORD], data[CONF_PIN])
try:
await risco.login(async_get_clientsession(hass))
except CannotConnectError as error:
raise ConfigEntryNotReady() from error
except UnauthorizedError:
_LOGGER.exception("Failed to login to Risco cloud")
return False
scan_interval = entry.options.get(CONF_SCAN_INTERVAL, DEFAULT_SCAN_INTERVAL)
coordinator = RiscoDataUpdateCoordinator(hass, risco, scan_interval)
await coordinator.async_config_entry_first_refresh()
events_coordinator = RiscoEventsDataUpdateCoordinator(
hass, risco, entry.entry_id, 60
)
undo_listener = entry.add_update_listener(_update_listener)
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][entry.entry_id] = {
DATA_COORDINATOR: coordinator,
UNDO_UPDATE_LISTENER: undo_listener,
EVENTS_COORDINATOR: events_coordinator,
}
async def start_platforms():
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_setup(entry, platform)
for platform in PLATFORMS
]
)
await events_coordinator.async_refresh()
hass.async_create_task(start_platforms())
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
hass.data[DOMAIN][entry.entry_id][UNDO_UPDATE_LISTENER]()
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
async def _update_listener(hass: HomeAssistant, entry: ConfigEntry):
"""Handle options update."""
await hass.config_entries.async_reload(entry.entry_id)
class RiscoDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching risco data."""
def __init__(self, hass, risco, scan_interval):
"""Initialize global risco data updater."""
self.risco = risco
interval = timedelta(seconds=scan_interval)
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
update_interval=interval,
)
async def _async_update_data(self):
"""Fetch data from risco."""
try:
return await self.risco.get_state()
except (CannotConnectError, UnauthorizedError, OperationError) as error:
raise UpdateFailed(error) from error
class RiscoEventsDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching risco data."""
def __init__(self, hass, risco, eid, scan_interval):
"""Initialize global risco data updater."""
self.risco = risco
self._store = Store(
hass, LAST_EVENT_STORAGE_VERSION, f"risco_{eid}_last_event_timestamp"
)
interval = timedelta(seconds=scan_interval)
super().__init__(
hass,
_LOGGER,
name=f"{DOMAIN}_events",
update_interval=interval,
)
async def _async_update_data(self):
"""Fetch data from risco."""
last_store = await self._store.async_load() or {}
last_timestamp = last_store.get(
LAST_EVENT_TIMESTAMP_KEY, "2020-01-01T00:00:00Z"
)
try:
events = await self.risco.get_events(last_timestamp, 10)
except (CannotConnectError, UnauthorizedError, OperationError) as error:
raise UpdateFailed(error) from error
if len(events) > 0:
await self._store.async_save({LAST_EVENT_TIMESTAMP_KEY: events[0].time})
return events
| apache-2.0 | 229,201,222,406,742,140 | 33.042857 | 88 | 0.671423 | false |
pism/pism | examples/python/ssa_tests/ssa_testj.py | 1 | 4091 | #! /usr/bin/env python3
#
# Copyright (C) 2011, 2012, 2013, 2014, 2015, 2016, 2018 Ed Bueler and Constantine Khroulev and David Maxwell
#
# This file is part of PISM.
#
# PISM is free software; you can redistribute it and/or modify it under the
# terms of the GNU General Public License as published by the Free Software
# Foundation; either version 3 of the License, or (at your option) any later
# version.
#
# PISM is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License
# along with PISM; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import PISM
from PISM.util import convert
class testj(PISM.ssa.SSAExactTestCase):
def _initGrid(self):
halfWidth = 300.0e3
Lx = halfWidth
Ly = halfWidth
ctx = PISM.Context().ctx
self.grid = PISM.IceGrid.Shallow(ctx, Lx, Ly, 0, 0,
self.Mx, self.My,
PISM.CELL_CENTER,
PISM.XY_PERIODIC)
def _initPhysics(self):
config = self.modeldata.config
config.set_flag("basal_resistance.pseudo_plastic.enabled", False)
enthalpyconverter = PISM.EnthalpyConverter(config)
config.set_string("stress_balance.ssa.flow_law", "isothermal_glen")
self.modeldata.setPhysics(enthalpyconverter)
def _initSSACoefficients(self):
self._allocStdSSACoefficients()
self._allocateBCs()
vecs = self.modeldata.vecs
vecs.tauc.set(0.0) # irrelevant for test J
# ensures that the ice is floating (max. thickness if 770 m)
vecs.bedrock_altitude.set(-1000.0)
vecs.mask.set(PISM.MASK_FLOATING)
vecs.bc_mask.set(0) # No dirichlet data.
EC = PISM.EnthalpyConverter(PISM.Context().config)
enth0 = EC.enthalpy(273.15, 0.01, 0) # 0.01 water fraction
vecs.enthalpy.set(enth0)
ocean_rho = self.config.get_number("constants.sea_water.density")
ice_rho = self.config.get_number("constants.ice.density")
# The PISM.vec.Access object ensures that we call beginAccess for each
# variable in 'vars', and that endAccess is called for each one on exiting
# the 'with' block.
with PISM.vec.Access(comm=[vecs.land_ice_thickness,
vecs.surface_altitude,
vecs.bc_mask,
vecs.vel_bc]):
grid = self.grid
for (i, j) in grid.points():
p = PISM.exactJ(grid.x(i), grid.y(j))
vecs.land_ice_thickness[i, j] = p.H
vecs.surface_altitude[i, j] = (1.0 - ice_rho / ocean_rho) * p.H # // FIXME task #7297
# special case at center point (Dirichlet BC)
if (i == grid.Mx() // 2) and (j == grid.My() // 2):
vecs.bc_mask[i, j] = 1
vecs.vel_bc[i, j] = [p.u, p.v]
def _initSSA(self):
# Test J has a viscosity that is independent of velocity. So we force a
# constant viscosity by settting the strength_extension
# thickness larger than the given ice thickness. (max = 770m).
nu0 = convert(30.0, "MPa year", "Pa s")
H0 = 500.0 # 500 m typical thickness
ssa = self.ssa
ssa.strength_extension.set_notional_strength(nu0 * H0)
ssa.strength_extension.set_min_thickness(800.)
def exactSolution(self, i, j, x, y):
p = PISM.exactJ(x, y)
return [p.u, p.v]
# The main code for a run follows:
if __name__ == '__main__':
context = PISM.Context()
config = context.config
tc = testj(int(config.get_number("grid.Mx")), int(config.get_number("grid.My")))
tc.run(config.get_string("output.file_name"))
| gpl-3.0 | 8,439,416,760,993,107,000 | 37.233645 | 109 | 0.599364 | false |
dimtion/jml | inputFiles/ourIA/old_ias/dijkstra.py | 1 | 8663 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
##################################################################################################################
#################################################### PRE-DEFINED IMPORTS ####################################################
##################################################################################################################
# Imports that are necessary for the program architecture to work properly
# Do not edit this code
import ast
import sys
import os
##################################################################################################################
###################################################### YOUR IMPORTS ######################################################
##################################################################################################################
import utils as u
import algorithms as algo
#################################################### PRE-DEFINED CONSTANTS ###################################################
##################################################################################################################
# Possible characters to send to the maze application
# Any other will be ignored
# Do not edit this code
UP = 'U'
DOWN = 'D'
LEFT = 'L'
RIGHT = 'R'
##################################################################################################################
# Name of your team
# It will be displayed in the maze
# You have to edit this code
TEAM_NAME = "Your name here"
##################################################################################################################
##################################################### YOUR CONSTANTS #####################################################
##################################################################################################################
##################################################################################################################
##################################################### YOUR VARIABLES #####################################################
##################################################################################################################
route = []
##################################################################################################################
#################################################### PRE-DEFINED FUNCTIONS ###################################################
##################################################################################################################
# Writes a message to the shell
# Use for debugging your program
# Channels stdout and stdin are captured to enable communication with the maze
# Do not edit this code
def debug (text) :
# Writes to the stderr channel
sys.stderr.write(str(text) + "\n")
sys.stderr.flush()
##################################################################################################################
# Reads one line of information sent by the maze application
# This function is blocking, and will wait for a line to terminate
# The received information is automatically converted to the correct type
# Do not edit this code
def readFromPipe () :
# Reads from the stdin channel and returns the structure associated to the string
try :
text = sys.stdin.readline()
return ast.literal_eval(text.strip())
except :
os._exit(-1)
##################################################################################################################
# Sends the text to the maze application
# Do not edit this code
def writeToPipe (text) :
# Writes to the stdout channel
sys.stdout.write(text)
sys.stdout.flush()
##################################################################################################################
# Reads the initial maze information
# The function processes the text and returns the associated variables
# The dimensions of the maze are positive integers
# Maze map is a dictionary associating to a location its adjacent locations and the associated weights
# The preparation time gives the time during which 'initializationCode' can make computations before the game starts
# The turn time gives the time during which 'determineNextMove' can make computations before returning a decision
# Player locations are tuples (line, column)
# Coins are given as a list of locations where they appear
# A boolean indicates if the game is over
# Do not edit this code
def processInitialInformation () :
# We read from the pipe
data = readFromPipe()
return (data['mazeWidth'], data['mazeHeight'], data['mazeMap'], data['preparationTime'], data['turnTime'], data['playerLocation'], data['opponentLocation'], data['coins'], data['gameIsOver'])
##################################################################################################################
# Reads the information after each player moved
# The maze map and allowed times are no longer provided since they do not change
# Do not edit this code
def processNextInformation () :
# We read from the pipe
data = readFromPipe()
return (data['playerLocation'], data['opponentLocation'], data['coins'], data['gameIsOver'])
##################################################################################################################
# This is where you should write your code to do things during the initialization delay
# This function should not return anything, but should be used for a short preprocessing
# This function takes as parameters the dimensions and map of the maze, the time it is allowed for computing, the players locations in the maze and the remaining coins locations
# Make sure to have a safety margin for the time to include processing times (communication etc.)
def initializationCode (mazeWidth, mazeHeight, mazeMap, timeAllowed, playerLocation, opponentLocation, coins) :
global route
routingTable = algo.dijkstra(mazeMap, playerLocation)
route = u.way_width(routingTable, playerLocation, (0, mazeWidth - 1))
##################################################################################################################
# This is where you should write your code to determine the next direction
# This function should return one of the directions defined in the CONSTANTS section
# This function takes as parameters the dimensions and map of the maze, the time it is allowed for computing, the players locations in the maze and the remaining coins locations
# Make sure to have a safety margin for the time to include processing times (communication etc.)
def determineNextMove (mazeWidth, mazeHeight, mazeMap, timeAllowed, playerLocation, opponentLocation, coins):
next_pos = route.pop(0)
return u.direction(playerLocation, next_pos)
##################################################################################################################
####################################################### MAIN LOOP ######################################################
##################################################################################################################
# This is the entry point when executing this file
# We first send the name of the team to the maze
# The first message we receive from the maze includes its dimensions and map, the times allowed to the various steps, and the players and coins locations
# Then, at every loop iteration, we get the maze status and determine a move
# Do not edit this code
if __name__ == "__main__" :
# We send the team name
writeToPipe(TEAM_NAME + "\n")
# We process the initial information and have a delay to compute things using it
(mazeWidth, mazeHeight, mazeMap, preparationTime, turnTime, playerLocation, opponentLocation, coins, gameIsOver) = processInitialInformation()
initializationCode(mazeWidth, mazeHeight, mazeMap, preparationTime, playerLocation, opponentLocation, coins)
# We decide how to move and wait for the next step
while not gameIsOver :
(playerLocation, opponentLocation, coins, gameIsOver) = processNextInformation()
if gameIsOver :
break
nextMove = determineNextMove(mazeWidth, mazeHeight, mazeMap, turnTime, playerLocation, opponentLocation, coins)
writeToPipe(nextMove)
##################################################################################################################
##################################################################################################################
| mit | 4,362,299,323,070,154,000 | 48.221591 | 195 | 0.461618 | false |
CuonDeveloper/cuon | cuon_client/cuon_newclient/bin/cuon/Bank/bank.py | 1 | 8104 | # -*- coding: utf-8 -*-
##Copyright (C) [2005] [Jürgen Hamel, D-32584 Löhne]
##This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as
##published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version.
##This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied
##warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
##for more details.
##You should have received a copy of the GNU General Public License along with this program; if not, write to the
##Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
import sys
import os
import os.path
from types import *
import pygtk
pygtk.require('2.0')
import gtk
import gtk.glade
import gobject
import string
import logging
from cuon.Windows.chooseWindows import chooseWindows
import cPickle
#import cuon.OpenOffice.letter
# localisation
import locale, gettext
locale.setlocale (locale.LC_NUMERIC, '')
import threading
import datetime as DateTime
import SingleBank
import cuon.Addresses.addresses
import cuon.Addresses.SingleAddress
class bankwindow(chooseWindows):
def __init__(self, allTables):
chooseWindows.__init__(self)
self.singleBank = SingleBank.SingleBank(allTables)
self.singleAddress = cuon.Addresses.SingleAddress.SingleAddress(allTables)
self.loadGlade('bank.xml')
self.win1 = self.getWidget('BankMainwindow')
#self.setStatusBar()
self.allTables = allTables
self.EntriesBank = 'bank.xml'
self.loadEntries(self.EntriesBank)
self.singleBank.setEntries(self.getDataEntries(self.EntriesBank) )
self.singleBank.setGladeXml(self.xml)
self.singleBank.setTreeFields( ['address.lastname as address_name', \
'address.city as city','bcn'] )
self.singleBank.setStore( gtk.ListStore(gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_STRING, gobject.TYPE_UINT) )
self.singleBank.setTreeOrder('bcn')
self.singleBank.setListHeader([_('Lastname'), _('City'),_('BCN')])
self.singleBank.setTree(self.xml.get_widget('tree1') )
self.singleBank.sWhere = 'where address.id = address_id '
# set values for comboBox
# Menu-items
self.initMenuItems()
# Close Menus for Tab
self.addEnabledMenuItems('tabs','bank11')
# seperate Menus
self.addEnabledMenuItems('address','bank1')
# enabledMenues for Address
self.addEnabledMenuItems('editAddress','mi_new1')
self.addEnabledMenuItems('editAddress','mi_clear1')
self.addEnabledMenuItems('editAddress','mi_print1')
self.addEnabledMenuItems('editAddress','mi_edit1')
# tabs from notebook
self.tabClients = 0
self.tabChanged()
def checkClient(self):
pass
#Menu File
def on_quit1_activate(self, event):
self.out( "exit clients V1")
self.closeWindow()
def on_tree1_row_activated(self, event, data1, data2):
print event
print data1
print data2
self.activateClick('bChooseClient', event, 'clicked')
def on_save1_activate(self, event):
self.out( "save addresses v2")
self.singleBank.save()
self.setEntriesEditable(self.EntriesBank, False)
self.tabChanged()
def on_new1_activate(self, event):
self.out( "new addresses v2")
self.singleBank.newRecord()
self.setEntriesEditable(self.EntriesBank, True)
def on_edit1_activate(self, event):
self.out( "edit addresses v2")
self.setEntriesEditable(self.EntriesBank, True)
def on_delete1_activate(self, event):
self.out( "delete addresses v2")
self.singleBank.deleteRecord()
# Button choose address
def on_bChooseAddressOfBank_clicked(self, event):
adr = cuon.Addresses.addresses.addresswindow(self.allTables)
adr.setChooseEntry('chooseAddress', self.getWidget( 'eAddressID'))
# signals from entry eAddressNumber
def on_eAddressID_changed(self, event):
print 'eAdrnbr changed'
iAdrNumber = self.getChangedValue('eAddressID')
eAdrField = self.getWidget('tvAddress')
liAdr = self.singleAddress.getAddress(iAdrNumber)
self.setTextbuffer(eAdrField,liAdr)
# search button
def on_bSearch_clicked(self, event):
self.out( 'Searching ....', self.ERROR)
sName = self.getWidget('eFindName').get_text()
sCity = self.getWidget('eFindCity').get_text()
self.out('Name and City = ' + sName + ', ' + sCity, self.ERROR)
self.singleBank.sWhere = 'where lastname ~* \'.*' + sName + '.*\' and city ~* \'.*' + sCity + '.*\''
self.out(self.singleBank.sWhere, self.ERROR)
self.refreshTree()
def refreshTree(self):
self.singleBank.disconnectTree()
if self.tabOption == self.tabClients:
self.singleBank.connectTree()
self.singleBank.refreshTree()
elif self.tabOption == self.tabMisc:
self.singleMisc.sWhere ='where address_id = ' + `int(self.singleBank.ID)`
self.singleMisc.fillEntries(self.singleMisc.findSingleId())
elif self.tabOption == self.tabPartner:
self.singlePartner.sWhere ='where addressid = ' + `int(self.singleBank.ID)`
self.singlePartner.connectTree()
self.singlePartner.refreshTree()
elif self.tabOption == self.tabSchedul:
self.singleSchedul.sWhere ='where partnerid = ' + `int(self.singlePartner.ID)`
self.singleSchedul.connectTree()
self.singleSchedul.refreshTree()
def tabChanged(self):
self.out( 'tab changed to :' + str(self.tabOption))
if self.tabOption == self.tabClients:
#Address
self.disableMenuItem('tabs')
self.enableMenuItem('address')
self.actualEntries = self.singleBank.getEntries()
self.editAction = 'editAddress'
#self.setStatusbarText([''])
self.setTreeVisible(True)
self.out( 'Seite 0')
elif self.tabOption == self.tabBank:
self.out( 'Seite 2')
self.disableMenuItem('tabs')
self.enableMenuItem('bank')
self.editAction = 'editBank'
self.setTreeVisible(False)
#self.setStatusbarText([self.singleBank.sStatus])
elif self.tabOption == self.tabMisc:
self.out( 'Seite 3')
self.disableMenuItem('tabs')
self.enableMenuItem('misc')
self.editAction = 'editMisc'
self.setTreeVisible(False)
#self.setStatusbarText([self.singleBank.sStatus])
elif self.tabOption == self.tabPartner:
#Partner
self.disableMenuItem('tabs')
self.enableMenuItem('partner')
self.out( 'Seite 1')
self.editAction = 'editPartner'
self.setTreeVisible(True)
#self.setStatusbarText([self.singleBank.sStatus])
elif self.tabOption == self.tabSchedul:
#Scheduling
self.disableMenuItem('tabs')
self.enableMenuItem('schedul')
self.out( 'Seite 4')
self.editAction = 'editSchedul'
self.setTreeVisible(True)
self.setStatusbarText([self.singlePartner.sStatus])
# refresh the Tree
self.refreshTree()
self.enableMenuItem(self.editAction)
self.editEntries = False
| gpl-3.0 | 6,315,108,617,176,841,000 | 28.786765 | 134 | 0.613182 | false |
YAtOff/python0-reloaded | week4/homework/tests.py | 1 | 1359 | import unittest
import logging
def warn_if_not_implemented(func):
def wrapper(*args, **kwargs):
try:
func(*args, **kwargs)
except Exception as e:
if e.args[0] == 'Not implemented':
logging.warning('%s is not implemented' % func.__name__[len('test_'):])
return
raise
return wrapper
class TestTasks(unittest.TestCase):
@warn_if_not_implemented
def test_number_to_text(self):
from number_to_text import number_to_text
self.assertEqual(number_to_text(0), 'zero')
self.assertEqual(number_to_text(1), 'one')
self.assertEqual(number_to_text(2), 'two')
self.assertEqual(number_to_text(3), 'three')
self.assertEqual(number_to_text(4), 'four')
self.assertEqual(number_to_text(5), 'five')
self.assertEqual(number_to_text(6), 'six')
self.assertEqual(number_to_text(7), 'seven')
self.assertEqual(number_to_text(8), 'eight')
self.assertEqual(number_to_text(9), 'nine')
@warn_if_not_implemented
def test_product_sign(self):
from product_sign import product_sign
self.assertEqual(product_sign(1, 1), '+')
self.assertEqual(product_sign(1, -1), '-')
self.assertEqual(product_sign(-1, -1), '+')
if __name__ == '__main__':
unittest.main()
| mit | -2,111,466,889,436,704,000 | 31.357143 | 87 | 0.594555 | false |
rtucker-mozilla/mozilla_inventory | api_v1/keyvalue_handler.py | 1 | 24912 | from piston.handler import BaseHandler, rc
from systems.models import System, SystemRack,SystemStatus,NetworkAdapter,KeyValue
from truth.models import Truth, KeyValue as TruthKeyValue
from dhcp.DHCP import DHCP as DHCPInterface
from dhcp.models import DHCP
from MacroExpansion import MacroExpansion
from KeyValueTree import KeyValueTree
import re
try:
import json
except:
from django.utils import simplejson as json
from django.test.client import Client
from settings import API_ACCESS
class KeyValueHandler(BaseHandler):
allowed_methods = API_ACCESS
def create(self, request, key_value_id=None):
if 'system_id' in request.POST:
n = KeyValue()
system = System.objects.get(id=request.POST['system_id'])
n.system = system
if 'key' in request.POST:
n.key = request.POST['key']
if 'value' in request.POST:
n.value = request.POST['value']
try:
n.save()
resp = rc.ALL_OK
resp.write('json = {"id":%i}' % (n.id))
except:
resp = rc.NOT_FOUND
resp.write('Unable to Create Key/Value Pair')
return resp
elif 'truth_name' in request.POST:
n = TruthKeyValue()
truth = Truth.objects.get(name=request.POST['truth_name'])
n.truth = truth
if 'key' in request.POST:
n.key = request.POST['key']
if 'value' in request.POST:
n.value = request.POST['value']
try:
n.save()
resp = rc.ALL_OK
resp.write('json = {"id":%i}' % (n.id))
except:
resp = rc.NOT_FOUND
resp.write('Unable to Create Key/Value Pair')
return resp
else:
resp = rc.NOT_FOUND
resp.write('system_id or truth_name required')
def build_validation_array(self):
input_regex_array = []
output_regex_array = []
error_message_array = []
ipv4_regex = re.compile(r'((2[0-5]|1[0-9]|[0-9])?[0-9]\.){3}((2[0-5]|1[0-9]|[0-9])?[0-9])')
true_false_regex = re.compile('(^True$|^False$)')
input_regex_array.append(re.compile('nic\.\d+\.ipv4_address\.\d+'))
output_regex_array.append(ipv4_regex)
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.scope\.netmask$'))
output_regex_array.append(ipv4_regex)
error_message_array.append('Requires Subnet Mask')
input_regex_array.append(re.compile('^is_dhcp_scope$'))
output_regex_array.append(re.compile(true_false_regex))
error_message_array.append('Requires True|False')
input_regex_array.append(re.compile('^dhcp\.scope\.start$'))
output_regex_array.append(re.compile(ipv4_regex))
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.scope\.end$'))
output_regex_array.append(re.compile(ipv4_regex))
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.pool\.start$'))
output_regex_array.append(re.compile(ipv4_regex))
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.pool\.end$'))
output_regex_array.append(re.compile(ipv4_regex))
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.option\.ntp_server\.\d+$'))
output_regex_array.append(re.compile(ipv4_regex))
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.dns_server\.\d+$'))
output_regex_array.append(re.compile(ipv4_regex))
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.option_router\.\d+$'))
output_regex_array.append(re.compile(ipv4_regex))
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.option\.subnet_mask\.\d+$'))
output_regex_array.append(re.compile(ipv4_regex))
error_message_array.append('Requires IP Address')
input_regex_array.append(re.compile('^dhcp\.pool\.allow_booting\.\d+$'))
output_regex_array.append(re.compile(true_false_regex))
error_message_array.append('Requires True|False')
input_regex_array.append(re.compile('^dhcp\.pool\.allow_bootp\.\d+$'))
output_regex_array.append(re.compile(true_false_regex))
error_message_array.append('Requires True|False')
input_regex_array.append(re.compile('^nic\.\d+\.mac_address\.\d+$'))
output_regex_array.append(re.compile('^([0-9a-f]{2}([:-]|$)){6}$', re.I))
error_message_array.append('Requires Mac Address XX:XX:XX:XX:XX:XX')
return input_regex_array, output_regex_array, error_message_array
def validate(self, key, passed_value):
error_message = None
return_regex = None
return_bool = True
input_regex_array, output_regex_array, error_message_array = self.build_validation_array()
## Here we loop through all of the possible input validation array. If they key matches one, then we need to validate the value for the key/value pair
for i in range(0, len(input_regex_array)):
if input_regex_array[i].match(key):
return_regex = output_regex_array[i]
error_message = error_message_array[i];
continue
## Check if we should validate the value portion of the key/value pair. No use validating it if the key doesn't require it
if return_regex is not None:
if return_regex.match(passed_value) is None:
return_bool = False
else:
error_message = None
return return_bool, error_message
def update(self, request, key_value_id=None):
###TODO This whole method is not functioning correctly. Just for version 2. Not getting the system_id or truth_id from the poster firefox plugin
if 'system_id' in request.POST:
n = None
key_validated, validation_error_string = self.validate(request.POST['key'], request.POST['value'])
if key_validated is False:
resp = rc.FORBIDDEN
resp.write('Validation Failed for %s %s' % (request.POST['key'], validation_error_string) )
return resp
try:
n = KeyValue.objects.get(id=key_value_id,key=request.POST['key'])
system = System.objects.get(id=request.POST['system_id'])
except:
resp = rc.NOT_FOUND
resp.write('Neither system_id or truth_id found')
if n is not None:
n.system = system
if 'value' in request.POST and n is not None:
n.value = request.POST['value']
if n is not None:
try:
n.save()
resp = rc.ALL_OK
resp.write('json = {"id":%i}' % (n.id))
except:
resp = rc.NOT_FOUND
resp.write('Unable to Create Key/Value Pair')
return resp
elif 'truth_id' in request.POST:
try:
truth = Truth.objects.get(name=key_value_id)
na = TruthKeyValue.objects.get(truth=truth,key=request.POST['key'])
if 'value' in request.POST:
n.value = request.POST['value']
except:
pass
try:
n.save()
resp = rc.ALL_OK
resp.write('json = {"id":%i}' % (n.id))
except Exception, e:
resp = rc.NOT_FOUND
resp.write('Unable to Update Key/Value Pair %s' % e)
return resp
else:
resp = rc.NOT_FOUND
resp.write('Neither system_id or truth_id found')
return resp
def read(self, request, key_value_id=None):
#if keystore get var is set return the whole keystore
if 'keystore' in request.GET:
#if key get var is set return the keystore based on the existance of this key
if 'key' in request.GET:
base = KeyValue.objects.filter(key=request.GET['keystore']).filter(keyvalue_set__contains=request.GET['key'])
tmp_list = []
for row in base:
matches = re.match("\$\{(.*)\}", row.value)
if matches is not None:
m = MacroExpansion(matches.group(1))
row.value = m.output()
for r in base:
key_name = 'host:%s:%s' % (r.system.hostname, r.key)
tmp_list[key_name] = r.value
if 'key' not in request.GET:
tree = KeyValueTree(request.GET['keystore']).final
return tree
elif 'key_type' in request.GET:
key_type = request.GET['key_type']
tmp_list = []
if key_type == 'dhcp_scopes':
#Get keystores from truth that have dhcp.is_scope = True
base = TruthKeyValue.objects.filter(key='dhcp.is_scope',value='True')
#Iterate through the list and get all of the key/value pairs
for row in base:
keyvalue = TruthKeyValue.objects.filter(truth=row.truth)
tmp_dict = {}
for kv in keyvalue:
tmp_dict[kv.key] = kv.value
tmp_list.append(tmp_dict)
return tmp_list
if key_type == 'system_by_reverse_dns_zone':
#Get keystores from truth that have dhcp.is_scope = True
keyvalue_pairs = KeyValue.objects.filter(key__contains='reverse_dns_zone',value=request.GET['zone']).filter(key__startswith='nic.')
#Iterate through the list and get all of the key/value pairs
tmp_list = []
for row in keyvalue_pairs:
keyvalue = KeyValue.objects.filter(system=row.system)
tmp_dict = {}
for kv in keyvalue:
tmp_dict[kv.key] = kv.value
tmp_dict['hostname'] = row.system.hostname
appendable = True
for the_items in tmp_list:
if 'hostname' not in the_items:
appendable = True
elif the_items['hostname'] == row.system.hostname:
appendable = False
if appendable is True:
tmp_list.append(tmp_dict)
#tmp_list = list(set(tmp_list))
return tmp_list
if key_type == 'system_by_scope':
#Get keystores from truth that have dhcp.is_scope = True
keyvalue_pairs = KeyValue.objects.filter(key__contains='dhcp_scope',value=request.GET['scope']).filter(key__startswith='nic.')
#Iterate through the list and get all of the key/value pairs
tmp_list = []
for row in keyvalue_pairs:
keyvalue = KeyValue.objects.filter(system=row.system)
tmp_dict = {}
for kv in keyvalue:
tmp_dict[kv.key] = kv.value
tmp_dict['hostname'] = row.system.hostname
appendable = True
for the_items in tmp_list:
if 'hostname' not in the_items:
appendable = True
elif the_items['hostname'] == row.system.hostname:
appendable = False
if appendable is True:
tmp_list.append(tmp_dict)
#tmp_list = list(set(tmp_list))
return tmp_list
if key_type == 'adapters_by_system':
#Get keystores from truth that have dhcp.is_scope = True
system = System.objects.get(hostname=request.GET['system'])
keyvalue_pairs = KeyValue.objects.filter(key__startswith='nic.').filter(system=system).order_by('key')
#Iterate through the list and get all of the key/value pairs
tmp_dict = {}
adapter_ids = []
final_list = []
for kv in keyvalue_pairs:
tmp_dict[kv.key] = kv.value
for k in tmp_dict.iterkeys():
matches = re.match('nic\.(\d+).*',k)
if matches.group is not None:
if matches.group(1) not in adapter_ids:
adapter_ids.append(matches.group(1))
adapter_ids.sort()
for a in adapter_ids:
adapter_name = ''
mac_address = ''
dhcp_hostname = ''
dhcp_filename = ''
ipv4_address = ''
if 'nic.%s.ipv4_address.0' % a in tmp_dict:
ipv4_address = tmp_dict['nic.%s.ipv4_address.0' % a]
if 'nic.%s.name.0' % a in tmp_dict:
adapter_name = tmp_dict['nic.%s.name.0' % a]
if 'nic.%s.mac_address.0' % a in tmp_dict:
mac_address = tmp_dict['nic.%s.mac_address.0' % a]
if 'nic.%s.dhcp_hostname.0' % a in tmp_dict:
dhcp_hostname = tmp_dict['nic.%s.dhcp_hostname.0' % a]
if 'nic.%s.dhcp_filename.0' % a in tmp_dict:
dhcp_filename = tmp_dict['nic.%s.dhcp_filename.0' % a]
try:
final_list.append({
'system_hostname':system.hostname,
'ipv4_address':ipv4_address,
'adapter_name':adapter_name,
'mac_address':mac_address,
'dhcp_hostname':dhcp_hostname,
'dhcp_filename':dhcp_filename}
)
except:
pass
#tmp_list.append(tmp_dict)
return final_list
if key_type == 'adapters_by_system_and_zone':
#Get keystores from truth that have dhcp.is_scope = True
zone = request.GET['zone']
system = System.objects.get(hostname=request.GET['system'])
keyvalue_pairs = KeyValue.objects.filter(key__startswith='nic.').filter(system=system).order_by('key')
#Iterate through the list and get all of the key/value pairs
tmp_dict = {}
adapter_ids = []
final_list = []
for kv in keyvalue_pairs:
tmp_dict[kv.key] = kv.value
for k in tmp_dict.iterkeys():
matches = re.match('nic\.(\d+).*',k)
if matches.group is not None:
dhcp_scope_match = 'nic.%s.reverse_dns_zone.0' % matches.group(1)
if matches.group(1) not in adapter_ids and dhcp_scope_match in tmp_dict and tmp_dict[dhcp_scope_match] == zone:
#if matches.group(1) not in adapter_ids and 'nic.%s.dhcp_scope.0' % matches.group(1) in tmp_dict and tmp_dict['nic.%s.dhcp_scope.0' % matches.group(1)] == dhcp_scope:
adapter_ids.append(matches.group(1))
adapter_ids.sort()
for a in adapter_ids:
adapter_name = ''
mac_address = ''
dhcp_hostname = ''
dhcp_filename = ''
dhcp_domain_name = ''
ipv4_address = ''
if 'nic.%s.ipv4_address.0' % a in tmp_dict:
ipv4_address = tmp_dict['nic.%s.ipv4_address.0' % a]
if 'nic.%s.name.0' % a in tmp_dict:
adapter_name = tmp_dict['nic.%s.name.0' % a]
if 'nic.%s.mac_address.0' % a in tmp_dict:
mac_address = tmp_dict['nic.%s.mac_address.0' % a]
if 'nic.%s.dhcp_hostname.0' % a in tmp_dict:
dhcp_hostname = tmp_dict['nic.%s.dhcp_hostname.0' % a]
if 'nic.%s.dhcp_filename.0' % a in tmp_dict:
dhcp_filename = tmp_dict['nic.%s.dhcp_filename.0' % a]
if 'nic.%s.dhcp_domain_name.0' % a in tmp_dict:
dhcp_domain_name = tmp_dict['nic.%s.dhcp_domain_name.0' % a]
final_list.append({'system_hostname':system.hostname, 'ipv4_address':ipv4_address})
#tmp_list.append(tmp_dict)
return final_list
if key_type == 'adapters_by_system_and_scope':
#Get keystores from truth that have dhcp.is_scope = True
dhcp_scope = request.GET['dhcp_scope']
system = System.objects.get(hostname=request.GET['system'])
keyvalue_pairs = KeyValue.objects.filter(key__startswith='nic.').filter(system=system).order_by('key')
#Iterate through the list and get all of the key/value pairs
tmp_dict = {}
adapter_ids = []
final_list = []
for kv in keyvalue_pairs:
tmp_dict[kv.key] = kv.value
for k in tmp_dict.iterkeys():
matches = re.match('nic\.(\d+).*',k)
if matches.group is not None:
dhcp_scope_match = 'nic.%s.dhcp_scope.0' % matches.group(1)
if matches.group(1) not in adapter_ids and dhcp_scope_match in tmp_dict and tmp_dict[dhcp_scope_match] == dhcp_scope:
#if matches.group(1) not in adapter_ids and 'nic.%s.dhcp_scope.0' % matches.group(1) in tmp_dict and tmp_dict['nic.%s.dhcp_scope.0' % matches.group(1)] == dhcp_scope:
adapter_ids.append(matches.group(1))
adapter_ids.sort()
for a in adapter_ids:
adapter_name = ''
mac_address = ''
dhcp_hostname = ''
dhcp_filename = ''
dhcp_domain_name = ''
ipv4_address = ''
if 'nic.%s.ipv4_address.0' % a in tmp_dict:
ipv4_address = tmp_dict['nic.%s.ipv4_address.0' % a]
if 'nic.%s.name.0' % a in tmp_dict:
adapter_name = tmp_dict['nic.%s.name.0' % a]
if 'nic.%s.mac_address.0' % a in tmp_dict:
mac_address = tmp_dict['nic.%s.mac_address.0' % a]
if 'nic.%s.dhcp_hostname.0' % a in tmp_dict and 'nic.%s.option_hostname.0' % a not in tmp_dict:
dhcp_hostname = tmp_dict['nic.%s.dhcp_hostname.0' % a]
if 'nic.%s.option_hostname.0' % a not in tmp_dict:
dhcp_hostname = tmp_dict['nic.%s.option_hostname.0' % a]
if 'nic.%s.dhcp_filename.0' % a in tmp_dict:
dhcp_filename = tmp_dict['nic.%s.dhcp_filename.0' % a]
if 'nic.%s.dhcp_domain_name.0' % a in tmp_dict:
dhcp_domain_name = tmp_dict['nic.%s.dhcp_domain_name.0' % a]
final_list.append({'system_hostname':system.hostname, 'ipv4_address':ipv4_address, 'adapter_name':adapter_name, 'mac_address':mac_address, 'dhcp_hostname':dhcp_hostname, 'dhcp_filename':dhcp_filename, 'dhcp_domain_name':dhcp_domain_name})
#tmp_list.append(tmp_dict)
return final_list
elif 'key' in request.GET and request.GET['key'] > '':
tmp_list = {}
try:
base = KeyValue.objects.filter(key=request.GET['key'])
for row in base:
matches = re.match("\$\{(.*)\}", row.value)
if matches is not None:
m = MacroExpansion(matches.group(1))
row.value = m.output()
for r in base:
key_name = 'host:%s:%s' % (r.system.hostname, r.key)
tmp_list[key_name] = r.value
except Exception, e:
pass
try:
base = TruthKeyValue.objects.filter(key=request.GET['key'])
for row in base:
matches = re.match("\$\{(.*)\}", row.value)
if matches is not None:
m = MacroExpansion(matches.group(1))
row.value = m.output()
for r in base:
key_name = 'truth:%s:%s' % (r.truth.name, r.key)
tmp_list[key_name] = r.value
except Exception, e:
pass
return tmp_list
elif 'value' in request.GET:
tmp_list = {}
try:
base = KeyValue.objects.filter(value=request.GET['value'])
for row in base:
matches = re.match("\$\{(.*)\}", row.value)
if matches is not None:
m = MacroExpansion(matches.group(1))
row.value = m.output()
for r in base:
key_name = 'host:%s:%s' % (r.system.hostname, r.key)
tmp_list[key_name] = r.value
except Exception, e:
pass
try:
base = TruthKeyValue.objects.filter(value=request.GET['value'])
for row in base:
matches = re.match("\$\{(.*)\}", row.value)
if matches is not None:
m = MacroExpansion(matches.group(1))
row.value = m.output()
for r in base:
key_name = 'truth:%s:%s' % (r.truth.name, r.key)
tmp_list[key_name] = r.value
except Exception, e:
pass
return tmp_list
def delete(self, request, key_value_id=None):
if 'key_type' in request.GET and request.GET['key_type'] == 'delete_all_network_adapters':
#Get keystores from truth that have dhcp.is_scope = True
try:
system_hostname = request.GET['system_hostname']
system = System.objects.get(hostname=system_hostname)
KeyValue.objects.filter(key__startswith='nic', system=system).delete()
resp = rc.ALL_OK
resp.write('json = {"id":"0"}')
except:
resp = rc.NOT_FOUND
resp.write('json = {"error_message":"Unable to Delete}')
return resp
if 'key_type' in request.GET and request.GET['key_type'] == 'delete_network_adapter':
#Get keystores from truth that have dhcp.is_scope = True
try:
adapter_number = request.GET['adapter_number']
system_hostname = request.GET['system_hostname']
system = System.objects.get(hostname=system_hostname)
KeyValue.objects.filter(key__startswith='nic.%s' % adapter_number, system=system).delete()
#KeyValue.objects.filter(key__startswith='nic.0', system=system).delete()
resp = rc.ALL_OK
resp.write('json = {"id":"14"}')
except:
resp = rc.NOT_FOUND
resp.write('json = {"error_message":"Unable to Delete}')
return resp
if 'key_type' not in request.GET:
if 'system_id' in request.GET:
try:
n = KeyValue.objects.get(id=key_value_id)
n.delete()
resp = rc.ALL_OK
resp.write('json = {"id":"%s"}' % str(key_value_id))
except:
resp = rc.NOT_FOUND
return resp
if 'truth_id' in request.GET:
try:
n = TruthKeyValue.objects.get(id=key_value_id)
n.delete()
resp = rc.ALL_OK
resp.write('json = {"id":"%s"}' % str(key_value_id))
except:
resp = rc.NOT_FOUND
return resp
resp = rc.ALL_OK
resp.write('json = {"id":"1"}')
return resp
| bsd-3-clause | -5,242,016,272,190,501,000 | 47.943026 | 259 | 0.504978 | false |
karstenw/nodebox-pyobjc | art/nodeboxlogo_larger.py | 1 | 2485 | size(512,512)
background(None)
def bar(x, y, w, depth, filled=1.0):
d1 = depth*filled
colormode(HSB)
f = fill()
s = stroke()
if f != None and f.brightness != 1:
s = color(f.hue, f.saturation+0.2, f.brightness-0.4)
nostroke()
#front
if f != None: fill(f)
rect(x, y, w, w)
#bottom
beginpath(x, y+w)
lineto(x-d1, y+w+d1)
lineto(x-d1+w, y+w+d1)
lineto(x+w, y+w)
endpath()
#left
beginpath(x, y)
lineto(x-d1, y+d1)
lineto(x-d1, y+w+d1)
lineto(x, y+w)
endpath()
#top
if f != None: fill(f.hue, f.saturation-0, f.brightness-0.15)
beginpath(x, y)
lineto(x+w, y)
lineto(x+w-d1, y+d1)
lineto(x-d1, y+d1)
endpath()
#right
if f != None: fill(f.hue, f.saturation-0, f.brightness-0.15)
beginpath(x+w, y)
lineto(x+w-d1, y+d1)
lineto(x+w-d1, y+w+d1)
lineto(x+w, y+w)
endpath()
if s != None: stroke(s)
line(x, y, x+w, y)
line(x, y, x-d1, y+d1)
line(x+w, y, x+w, y+w)
line(x+w, y+w, x+w-d1, y+w+d1)
line(x, y+w, x-d1, y+w+d1)
line(x+w, y, x+w-d1, y+d1)
#front
if f != None: fill(f)
rect(x-d1, y+d1, w, w)
x += d1
y += d1
d2 = depth*(1-filled)
if d2 != 0:
line(x, y, x+d2, y+d2)
line(x+w, y, x+w+d2, y+d2)
line(x+w, y+w, x+w+d2, y+w+d2)
line(x, y+w, x+d2, y+w+d2)
f = fill()
nofill()
rect(x+d2, y+d2, w, w)
if f != None: fill(f)
def cube(x, y, w, filled=1.0):
bar(x, y, w, w*0.5, filled)
from random import seed
seed(55)
w = 112
n = 3
strokewidth(0.5)
colormode(RGB)
c = color(0.05,0.65,0.85)
c.brightness += 0.2
for x in range(n):
for y in range(n):
bottom = w * n
for z in range(n):
stroke(0.1)
strokewidth(2)
colormode(RGB)
dr = (1-c.r)/(n-1) * (x*0.85+y*0.15+z*0.05) * 1.1
dg = (1-c.g)/(n-1) * (x*0.85+y*0.15+z*0.05) * 1.2
db = (1-c.b)/(n-1) * (x*0.85+y*0.15+z*0.05) * 1.1
fill(1.2-dr, 1.1-dg, 1.2-db)
if random() > 0.5:
nostroke()
nofill()
dx = w*x - w/2*z
dy = bottom-w*y + w/2*z
transform(CORNER)
translate(171,-112)
scale(1.01)
cube(dx, dy, w)
reset() | mit | -5,216,147,913,592,614,000 | 19.890756 | 64 | 0.440241 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.