repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
disqus/zumanji | src/zumanji/views.py | 1 | 6969 | from django.conf import settings
from django.core.urlresolvers import reverse
from django.db import transaction
from django.http import HttpResponseRedirect, HttpResponseForbidden
from django.shortcuts import render, get_object_or_404
from django.utils import simplejson
from django.views.decorators.csrf import csrf_protect, csrf_exempt
from functools import wraps
from zumanji.forms import UploadJsonForm
from zumanji.helpers import get_trace_data, get_changes, get_git_changes
from zumanji.models import Project, Build, BuildTag, Test
from zumanji.importer import import_build
NOTSET = object()
def api_auth(func):
@wraps(func)
def wrapped(request, *args, **kwargs):
if request.REQUEST.get('api_key'):
if request.REQUEST['api_key'] != settings.ZUMANJI_CONFIG.get('API_KEY', NOTSET):
return HttpResponseForbidden('Invalid api_key')
return func(request, *args, **kwargs)
return csrf_protect(func)(request, *args, **kwargs)
return csrf_exempt(wrapped)
def index(request):
build_qs = Build.objects.order_by('-revision__datetime', '-datetime').select_related('revision')
project_list = []
# lol O(N)
for project in Project.objects.all():
try:
latest_build = build_qs.filter(project=project)[0]
except IndexError:
latest_build = None
project_list.append((project, latest_build))
return render(request, 'zumanji/index.html', {
'project_list': project_list,
})
def view_project(request, project_label):
project = get_object_or_404(Project, label=project_label)
build_list = list(Build.objects
.filter(project=project)
.order_by('-revision__datetime', '-datetime')
.select_related('revision', 'project'))
return render(request, 'zumanji/project.html', {
'project': project,
'build_list': build_list,
})
def view_tag(request, project_label, tag_id):
project = get_object_or_404(Project, label=project_label)
tag = get_object_or_404(BuildTag, pk=tag_id)
build_list = list(Build.objects
.filter(project=project, tags=tag)
.order_by('-datetime')
.select_related('revision', 'project'))
return render(request, 'zumanji/tag.html', {
'project': project,
'tag': tag,
'build_list': build_list,
})
def view_build(request, project_label, build_id, tag_id=None):
filter_args = dict(project__label=project_label, id=build_id)
tag = None
if tag_id:
tag = get_object_or_404(BuildTag, id=tag_id)
filter_args["tags"] = tag
build = get_object_or_404(Build, **filter_args)
project = build.project
previous_build = build.get_previous_build(tag=tag)
next_build = build.get_next_build(tag=tag)
test_list = list(build.test_set
.filter(parent__isnull=True)
.order_by('-upper90_duration'))
compare_with = request.GET.get('compare_with')
if compare_with:
try:
compare_build = Build.objects.get(project__label=project_label, id=compare_with)
except Build.DoesNotExist:
compare_build = None
else:
compare_build = previous_build
changes = get_changes(compare_build, test_list)
if compare_build:
git_changes = get_git_changes(build, compare_build)
else:
git_changes = None
return render(request, 'zumanji/build.html', {
'project': project,
'tag': tag,
'build': build,
'previous_build': previous_build,
'compare_build': compare_build,
'next_build': next_build,
'test_list': test_list,
'changes': changes,
'git_changes': git_changes,
})
def view_test(request, project_label, build_id, test_label):
test = get_object_or_404(Test, project__label=project_label, build=build_id, label=test_label)
project = test.project
build = test.build
test_list = list(Test.objects.filter(parent=test)
.order_by('-upper90_duration')
.select_related('parent'))
# this is actually a <Test>
previous_test_by_build = test.get_test_in_previous_build()
next_test_by_build = test.get_test_in_next_build()
breadcrumbs = [
(reverse('zumanji:view_build', kwargs={'project_label': project.label, 'build_id': build.id}), 'Build #%s' % build.id)
]
last = ''
for node in test.get_context():
node_label = node.label[len(last):]
breadcrumbs.append(
(reverse('zumanji:view_test', kwargs={
'project_label': project.label,
'build_id': build.id,
'test_label': node.label,
}), node_label)
)
last = node.label + '.' # include the dot
previous_builds = test.get_previous_builds(50)
compare_with = request.GET.get('compare_with')
if compare_with:
try:
compare_build = Build.objects.get(project__label=project_label, id=compare_with)
except Build.DoesNotExist:
compare_build = None
else:
compare_build = previous_test_by_build.build if previous_test_by_build else None
if compare_build:
try:
compare_test = compare_build.test_set.get(label=test.label)
except Test.DoesNotExist:
compare_test = None
git_changes = get_git_changes(build, compare_build)
else:
compare_test = None
git_changes = None
trace_results = get_trace_data(test, compare_test)
if previous_test_by_build:
tests_to_check = test_list
changes = get_changes(compare_build, tests_to_check)
else:
changes = []
return render(request, 'zumanji/test.html', {
'breadcrumbs': breadcrumbs,
'project': project,
'build': build,
'previous_test_by_build': previous_test_by_build,
'next_test_by_build': next_test_by_build,
'previous_builds': previous_builds,
'test': test,
'test_list': test_list,
'changes': changes,
'compare_build': compare_build,
'trace_results': trace_results,
'git_changes': git_changes,
})
@api_auth
@transaction.commit_on_success
def upload_project_build(request, project_label):
project = get_object_or_404(Project, label=project_label)
form = UploadJsonForm(request.POST or None, request.FILES or None)
if form.is_valid():
data = simplejson.loads(request.FILES['json_file'].read())
try:
build = import_build(data, project=project.label, revision=form.cleaned_data.get('revision'))
except Exception, e:
form.errors['json_file'] = unicode(e)
else:
return HttpResponseRedirect(reverse('zumanji:view_build', kwargs={
'project_label': project.label, 'build_id': build.id}))
return render(request, 'zumanji/upload_build.html', {
'project': project,
'form': form,
})
| apache-2.0 | 3,989,766,211,965,808,000 | 31.565421 | 126 | 0.627924 | false |
ZwEin27/phone-number-matcher | dig_phone_extractor.py | 1 | 23737 | # -*- coding: utf-8 -*-
# @Author: ZwEin
# @Date: 2016-06-21 12:36:47
# @Last Modified by: ZwEin
# @Last Modified time: 2016-09-29 21:54:12
import os
import re
import sys
import json
import copy
import types
import string
import collections
import phonenumbers
from datetime import datetime
from crf_tokenizer import CrfTokenizer
from urlparse import urlparse
from string import maketrans
from phonenumbers.phonenumberutil import NumberParseException
from difflib import SequenceMatcher
def is_valid_datetime(raw, date_format):
try:
datetime.strptime(raw, date_format)
return True
except ValueError:
return False
class Preprocessor():
re_prep = re.compile(r'[\(\)]')
reg_simple_format = [
r'(?:(?<=[ \A\b-\.\?])\d{3}[ \?\.-]\d{3}[ \?\.-]\d{4}(?=[ \Z\b-\.\?]))'
]
re_simple_format = re.compile(r'(?:'+r'|'.join(reg_simple_format)+r')')
datetime_regexes = [
r"(?:\d{2}[ _-]\d{2}[ _-]\d{4})",
r"(?:\d{4}[ _-]\d{2}[ _-]\d{2})"
]
datetime_regex = r"(?:" + r"|".join(datetime_regexes) + ")"
re_datetime_regex = re.compile(datetime_regex)
re_digits_regex = re.compile(r"\d+")
def prep_datetime(self, raw):
m = Preprocessor.re_datetime_regex.findall(raw)
for d in m:
dd = ''.join(Preprocessor.re_digits_regex.findall(d))
if is_valid_datetime(dd, '%Y%m%d') or is_valid_datetime(dd, '%m%d%Y'):
raw = raw.replace(d, "")
return raw
money_regex = r"(?:(?<=[\D])\$\d+(?=[\W_]))"
units = ['lbs', 'kg', 'hour', 'hr', 'hh']
unit_regex = r"(?:\d+[\s\W]*(" + r"|".join(units) + "))"
others_regexes = [
r"24/7",
r"#\d+",
r"\d+\'\d+",
r"(?<=[\W_])\d{5}[\W_]{1,}\d{5}(?=[\W_])",
r"- {1,}\d+$",
r"\d+\%"
]
other_regex = r"(?:" + "|".join(others_regexes) + ")"
all_regexes = [money_regex, unit_regex, other_regex]
all_regex = r"(" + r"|".join(all_regexes) + ")"
re_all_regex = re.compile(all_regex)
def preprocess(self, raw):
raw = raw.lower()
raw = raw.encode('ascii', 'ignore')
raw = self.prep_datetime(raw)
raw = Preprocessor.re_prep.sub(' ', raw)
raw = Preprocessor.re_all_regex.sub('', raw)
raw = Preprocessor.re_simple_format.sub('pnwrapper \g<0> pnwrapper', raw)
return raw
SOURCE_TYPE_TEXT = 'text'
SOURCE_TYPE_URL = 'url'
class Tokenizer():
re_2_digts_only_in_url_regex = re.compile(r'(?<=[-_])\d{2}(?=[_/])')
re_all_alphabet_in_url_regex = re.compile(r'\w+')
def __init__(self, source_type='text'):
self.set_source_type(source_type)
def set_source_type(self, source_type):
"""
'text' or 'url'
"""
st = source_type.lower()
if source_type.lower() not in [SOURCE_TYPE_TEXT, SOURCE_TYPE_URL] :
raise Exception(source_type + ' is not a source type, which should be "text" or "url"')
self.source_type = source_type
def remove_punctuation(self, raw):
return raw.translate(string.maketrans("",""), string.punctuation)
def tokenize(self, raw):
result = None
if self.source_type == SOURCE_TYPE_TEXT:
result = self.tokenize_text(raw)
elif self.source_type == SOURCE_TYPE_URL:
result = self.tokenize_url(raw)
return ' '.join(result.split())
def tokenize_text(self, raw):
t = CrfTokenizer()
t.setRecognizeHtmlEntities(True)
t.setRecognizeHtmlTags(True)
t.setSkipHtmlTags(True)
t.setRecognizePunctuation(True)
tokens = t.tokenize(raw)
tokens = ' '.join(tokens)
tokens = self.remove_punctuation(tokens)
return tokens
def tokenize_url(self, raw):
SEPARATOR = ' '
url_obj = urlparse(raw)
# parse netloc
netloc = url_obj.netloc.split('.')[:-2] # get rid of port numbers, ext and domain name
# parse path
path = url_obj.path
path = Tokenizer.re_2_digts_only_in_url_regex.sub('', path)
path = path.split('/')
content = netloc + path
content = [SEPARATOR.join(Tokenizer.re_all_alphabet_in_url_regex.findall(_)) for _ in content]
# parse params
# url_obj.params
# parse query
# url_obj.query
return ' sep '.join(content)
class Cleaner():
def prep_misspelled_numeral_words(self, raw):
misspelling_dict = {
"th0usand": "thousand",
"th1rteen": "thirteen",
"f0urteen": "fourteen",
"e1ghteen": "eighteen",
"n1neteen": "nineteen",
"f1fteen": "fifteen",
"s1xteen": "sixteen",
"th1rty": "thirty",
"e1ghty": "eighty",
"n1nety": "ninety",
"fourty": "forty",
"f0urty": "forty",
"e1ght": "eight",
"f0rty": "forty",
"f1fty": "fifty",
"s1xty": "sixty",
"zer0": "zero",
"for": "four",
"f0ur": "four",
"f1ve": "five",
"n1ne": "nine",
"0ne": "one",
"too": "two",
"tw0": "two",
"to": "two",
"s1x": "six"
}
for key in misspelling_dict.keys():
raw = raw.replace(key, misspelling_dict[key])
return raw
numbers = ['zero', 'one', 'two', 'three', 'four', 'five', 'siz', 'seven', 'eight', 'nine']
re_twenty_x = re.compile(r"(two|twenty[\W_]+(?=(\d|" + r"|".join(numbers) + ")))")
re_thirty_x = re.compile(r"(three|thirty[\W_]+(?=(\d|" + r"|".join(numbers) + ")))")
re_forty_x = re.compile(r"(four|forty[\W_]+(?=(\d|" + r"|".join(numbers) + ")))")
re_fifty_x = re.compile(r"(five|fifty[\W_]+(?=(\d|" + r"|".join(numbers) + ")))")
re_sixty_x = re.compile(r"(six|sixty[\W_]+(?=(\d|" + r"|".join(numbers) + ")))")
re_seventy_x = re.compile(r"(seven|seventy[\W_]+(?=(\d|" + r"|".join(numbers) + ")))")
re_eighty_x = re.compile(r"(eight|eighty[\W_]+(?=(\d|" + r"|".join(numbers) + ")))")
re_ninety_x = re.compile(r"(nine|ninety[\W_]+(?=(\d|" + r"|".join(numbers) + ")))")
re_ten = re.compile(r"(?<=[ilo0-9])ten(?=[ \b0-9])")
re_one = re.compile(r'(?:(?<=([0-9yneorxt]| ))one|(?:(?<=[ils])[i]((?=[ils])|$)))')
re_zero = re.compile(r'(?:zero|oh|(?:(?<=[0-9])(o+?))|(?:o(?=[0-9]))|(?:(?<=[o\s])o(?=[o\s])))')
def prep_replace_numeral_words(self, raw):
raw = raw.replace("hundred", "00")
raw = raw.replace("thousand", "000")
raw = raw.replace("eleven", "11")
raw = raw.replace("twelve", "12")
raw = raw.replace("thirteen", "13")
raw = raw.replace("fourteen", "14")
raw = raw.replace("fifteen", "15")
raw = raw.replace("sixteen", "16")
raw = raw.replace("seventeen", "17")
raw = raw.replace("eighteen", "18")
raw = raw.replace("nineteen", "19")
raw = Cleaner.re_twenty_x.sub("2", raw)
raw = Cleaner.re_thirty_x.sub("3", raw)
raw = Cleaner.re_forty_x.sub("4", raw)
raw = Cleaner.re_fifty_x.sub("5", raw)
raw = Cleaner.re_sixty_x.sub("6", raw)
raw = Cleaner.re_seventy_x.sub("7", raw)
raw = Cleaner.re_eighty_x.sub("8", raw)
raw = Cleaner.re_ninety_x.sub("9", raw)
raw = Cleaner.re_ten.sub("10", raw)
raw = Cleaner.re_one.sub("1", raw)
raw = Cleaner.re_zero.sub("0", raw)
raw = raw.replace("twenty", "20")
raw = raw.replace("thirty", "30")
raw = raw.replace("forty", "40")
raw = raw.replace("fifty", "50")
raw = raw.replace("sixty", "60")
raw = raw.replace("seventy", "70")
raw = raw.replace("eighty", "80")
raw = raw.replace("ninety", "90")
return raw
def clean(self, raw):
raw = self.prep_misspelled_numeral_words(raw)
raw = self.prep_replace_numeral_words(raw)
# print raw
return raw
class ZEExtractor():
def __init__(self):
pass
prefix = r'(?:(?<=[\A\b\sa-zA-Z])|^)'
# prefix = r'\b'
# prefix = r'[ ]?'
postfix = r'(?:(?=[\Z\b\sa-zA-Z])|$)'
# postfix = r'\b'
# postfix = r'[ ]?'
phone_number_format_regex = [
r'(?:'+prefix+r"\d{10,13}"+postfix+r')',
r'(?:'+prefix+r"\d{9,10}"+postfix+r')',
r'(?:'+prefix+r"\d{8}[ ]\d{3,4}"+postfix+r')',
r'(?:'+prefix+r"\d{7}[ ]\d{3,4}"+postfix+r')',
r'(?:'+prefix+r"\d{6}[ ]\d{4}"+postfix+r')',
r'(?:'+prefix+r"\d{5}[ ]\d{6}"+postfix+r')',
r'(?:'+prefix+r"\d{5}[ ]\d{4}[ ]\d{4}"+postfix+r')',
r'(?:'+prefix+r"\d{5}[ ]\d{4}"+postfix+r')',
r'(?:'+prefix+r"\d{5}[ ]\d{4}[ ]\d{2}[ ]\d{2}"+postfix+r')',
r'(?:'+prefix+r"\d{4}[ ]\d{4}[ ]\d{2}"+postfix+r')',
r'(?:'+prefix+r"\d{4}[ ]\d{2}[ ]\d{2}[ ]\d{2}[ ]\d{2}"+postfix+r')',
r'(?:'+prefix+r"\d{4}[ ]\d{3}[ ]\d{3}"+postfix+r')',
r'(?:'+prefix+r"\d{3}[ ]\d{7,8}"+postfix+r')',
r'(?:'+prefix+r"\d{3}[ ]\d{4}[ ]\d{4}"+postfix+r')',
r'(?:'+prefix+r"\d{3}[ ]\d{4}[ ]\d{3}"+postfix+r')',
r'(?:'+prefix+r"\d{3}[ ]\d{3}[ ]\d{4}"+postfix+r')',
r'(?:'+prefix+r"\d{3}[ ]\d{3}[ ]\d{3}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{3}[ ]\d{3}[ ]\d{2}[ ]\d{1}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{3}[ ]\d{3}[ ]\d{1}[ ]\d{3}"+postfix+r')',
r'(?:'+prefix+r"\d{3}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{4}"+postfix+r')',
r'(?:'+prefix+r"\d{2}[ ]\d{4}[ ]\d{4}"+postfix+r')',
r'(?:'+prefix+r"\d{2}[ ]\d{8}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{8}[ ]\d{1}"+postfix+r')', # \d{2}[ ] ...
r'(?:'+prefix+r"\d{1}[ ]\d{3}[ ]\d{3}[ ]\d{3}"+postfix+r')',
r'(?:'+prefix+r"\d{2}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{2}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{1}[ ]\d{2}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{2}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{2}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{2}[ ]\d{1}[ ]\d{1}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{2}[ ]\d{1}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{2}[ ]\d{1}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{2}"+postfix+r')',
r'(?:'+prefix+r"\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}[ ]\d{1}"+postfix+r')'
]
# numbers_regex = r"(?:" + r"|".join(phone_number_format_regex) + r")"
numbers_regex = r"(?:" + r"|".join(phone_number_format_regex) + r")"
re_numbers_regex = re.compile(numbers_regex)
# print numbers_regex
def extract(self, raw):
raw = ZEExtractor.re_numbers_regex.findall(raw)
raw = [''.join(_.split()) for _ in raw if len(_.strip()) >= 10]
return '\t'.join(raw)
class Validator():
re_zero = re.compile(r'0{3,}')
def validate_phone_number_with_coutry_code(self, raw, country_code='US'):
try:
z = phonenumbers.parse(raw, country_code)
except NumberParseException, e:
pass
"""
if e.error_type == NumberParseException.INVALID_COUNTRY_CODE:
# Invalid country code specified
return []
elif e.error_type == NumberParseException.NOT_A_NUMBER:
# The string passed in had fewer than 3 digits in it.
# The number failed to match the regular expression
return []
elif e.error_type == NumberParseException.TOO_SHORT_AFTER_IDD:
# The string started with an international dialing prefix
# but after this was removed, it had fewer digits than any
# valid phone number (including country code) could have.
return []
elif e.error_type == NumberParseException.TOO_SHORT_NSN:
# After any country code has been stripped, the string
# had fewer digits than any valid phone number could have.
return []
elif e.error_type == NumberParseException.TOO_LONG:
# String had more digits than any valid phone number could have
return []
"""
# print e.error_type, e._msg
else:
if phonenumbers.is_possible_number(z) and phonenumbers.is_valid_number(z):
return [raw]
else:
return []
def validate_phone_number(self, raw):
# match all countries if using area_code.get_all_country_iso_two_letter_code()
# may include too short phone numbers if use 'DE'
country_code_list = ['US', 'CN', 'IN', 'UA', 'JP', 'RU', 'IT', 'DE', 'CA', 'TR']
for country_code in country_code_list:
rtn = self.validate_phone_number_with_coutry_code(raw, country_code=country_code)
if rtn:
return rtn
def is_datetime(self, raw):
size = len(raw)
date_format = ''
if size == 14:
return is_valid_datetime(raw, '%Y%m%d%H%M%S')
elif size == 8:
return is_valid_datetime(raw, '%Y%m%d')
elif size == 6:
return is_valid_datetime(raw, '%Y%m%d') or is_valid_datetime(raw, '%H%M%S')
else:
return False
re_num_digits = [
None,
re.compile(r"\d{1}"),
re.compile(r"\d{2}"),
re.compile(r"\d{3}"),
re.compile(r"\d{4}"),
re.compile(r"\d{5}"),
re.compile(r"\d{6}")
]
def is_all_dup_digits(self, raw):
for i in range(1, 6):
rtn = Validator.re_num_digits[i].findall(raw)
if len(raw) % i != 0:
continue
if all(rtn[0] == rest for rest in rtn):
return True
return False
re_start_zero = re.compile(r'^0+')
def suggest_most_overlap(self, extracted_phone_list):
def similar(a, b):
return SequenceMatcher(None, a, b).ratio()
potential_invalid, potential_valid = [], []
for pn in extracted_phone_list:
if len(pn) == 10:
potential_valid.append(pn)
else:
potential_invalid.append(pn)
ans = list(potential_valid)
for pi in potential_invalid:
if any(similar(pi, pv) < .3 for pv in potential_valid):
ans.append(pi)
return ans
def validate(self, raw):
ans = []
for nums in raw.split('\t'):
nums = nums.strip()
nums = Validator.re_start_zero.sub('', nums)
if len(nums) > 16:
continue
if len(Validator.re_zero.findall(nums)):
continue
if self.is_all_dup_digits(nums):
continue
if self.is_datetime(nums):
continue
ans += [nums]
# valid = self.validate_phone_number(nums)
# if valid:
# ans.extend(valid)
ans = list(set(ans))
ans = self.suggest_most_overlap(ans)
return ' '.join(ans)
class Normalizer():
# try extracting from this one live escort reviews pnwrapper 754 307 7279 pnwrapper 49 91 3524432077 you won t be disappointedangel
re_digits = re.compile(r'(?:(?<=[ \s\b\Aa-zA-Z])[\d ]+(?=[ \s\b\Za-zA-Z]))')
def normalize(self, cleaned_output, uncleaned_output, output_format='list'):
# print [_.strip() for _ in Normalizer.re_digits.findall(tokenized_content) if _.strip() != '']
if output_format == 'obfuscation':
output = []
for co in cleaned_output.split():
phonenum = {}
phonenum['telephone'] = co
if co in uncleaned_output:
phonenum['obfuscation'] = 'False'
else:
phonenum['obfuscation'] = 'True'
output.append(phonenum)
return output
else:
return cleaned_output.split()
class PhoneNumberExtractor(object):
PN_OUTPUT_FORMAT_LIST = 'list'
PN_OUTPUT_FORMAT_OBFUSCATION = 'obfuscation'
def __init__(self, _output_format='list'):
self.preprocessor = Preprocessor()
self.tokenizer = Tokenizer(source_type='text')
self.extractor = ZEExtractor()
self.cleaner = Cleaner()
self.validator = Validator()
self.normalizer = Normalizer()
self.set_output_format(_output_format)
def set_output_format(self, _output_format):
# 1. list, 2. obfuscation
if _output_format not in [PhoneNumberExtractor.PN_OUTPUT_FORMAT_LIST, PhoneNumberExtractor.PN_OUTPUT_FORMAT_OBFUSCATION]:
raise Exception('output_format should be "list" or "obfuscation"')
self.output_format = _output_format
def do_process(self, content, source_type='text', do_preprocess=True, do_tokenize=True, do_clean=True, do_extract=True, do_validate=True):
if do_preprocess:
content = self.preprocessor.preprocess(content)
if do_tokenize:
self.tokenizer.set_source_type(source_type)
content = self.tokenizer.tokenize(content)
if do_clean:
content = self.cleaner.clean(content)
if do_extract:
content = self.extractor.extract(content)
if do_validate:
content = self.validator.validate(content)
return content
def match(self, content, source_type='text'):
cleaned_ans = self.do_process(content, source_type=source_type)
uncleaned_ans = self.do_process(content, source_type=source_type, do_clean=False)
return self.normalizer.normalize(cleaned_ans, uncleaned_ans, output_format=self.output_format)
########################################################################
# URLExtractor
########################################################################
import esm
import idna
import tldextract
re_dot = re.compile(r'(?:\s+?dot\s+?)', re.IGNORECASE)
reg_url_charactor = '[a-z0-9-.]'
re_url_charactor = re.compile(reg_url_charactor, re.IGNORECASE)
re_pretld = re.compile(reg_url_charactor+'+?$', re.IGNORECASE)
re_posttld = re.compile(':?[0-9]*[/[!#$&-;=?a-z_]+]?', re.IGNORECASE)
class URLExtractor(object):
def __init_tld_index():
tldindex = esm.Index()
tlds = (tldextract.TLDExtract()._get_tld_extractor().tlds)
ldindex = esm.Index()
for tld in tlds:
tldindex.enter('.' + tld.encode('idna'))
tldindex.fix()
return tldindex
tldindex = __init_tld_index()
@staticmethod
def preprocess(text):
def clean(text):
text = re_dot.sub('.', text)
return text
text = clean(text)
return text
@staticmethod
def query(text):
ans = []
exts = URLExtractor.tldindex.query(text)
for ext in exts:
pretld, posttld = None, None
url = ''
tld = ext[1]
startpt, endpt = ext[0][0], ext[0][1]
if len(text) > endpt:
nextcharacter = text[endpt]
if re_url_charactor.match(nextcharacter):
continue
posttld = re_posttld.match(text[endpt:])
pretld = re_pretld.search(text[:startpt])
if pretld:
url = pretld.group(0)
startpt -= len(pretld.group(0))
url += tld
if posttld:
url += posttld.group(0)
endpt += len(posttld.group(0))
url = url.rstrip(',.')
ans.append(url)
ans = list(set([_ for _ in ans if _]))
return ans
@staticmethod
def extract(text):
text = text.encode('ascii', 'ignore')
text= URLExtractor.preprocess(text)
ans = URLExtractor.query(text)
return ans
# in production
# from digExtractor.extractor import Extractor
# in test
class Extractor:
def extract(doc):
raise NotImplementedError( "Need to implement extract function" )
# should create a new dictionary each time
def get_metadata():
raise NotImplementedError( "Need to implement get_metadata function" )
def set_metadata():
raise NotImplementedError( "Need to implement set_metadata function" )
def get_renamed_input_fields(self):
raise NotImplementedError( "Need to implement get_renamed_input_fields function" )
def set_renamed_input_fields(self, renamed_input_fields):
if not (isinstance(renamed_input_fields, basestring) or isinstance(renamed_input_fields, types.ListType)):
raise ValueError("renamed_input_fields must be a string or a list")
self.renamed_input_fields = renamed_input_fields
return self
class PhoneExtractor(Extractor):
def __init__(self):
self.renamed_input_fields = '' # ? renamed_input_fields
def extract(self, doc):
urls = URLExtractor.extract(doc)
extractor = PhoneNumberExtractor()
extracts = []
for url in urls:
extracts += extractor.match(url, source_type='url')
doc = doc.replace(url, '')
extracts += extractor.match(doc, source_type='text')
return extracts
def get_metadata(self):
return copy.copy(self.metadata)
def set_metadata(self, metadata):
self.metadata = metadata
return self
def get_renamed_input_fields(self):
return self.renamed_input_fields
def set_renamed_input_fields(self, renamed_input_fields):
if not (isinstance(renamed_input_fields, basestring) or isinstance(renamed_input_fields, types.ListType)):
raise ValueError("renamed_input_fields must be a string or a list")
self.renamed_input_fields = renamed_input_fields
return self
if __name__ == '__main__':
doc = "71857376 71857376718 test 71857376719 718573767185 71837376718 71981090718 718573767198 719810907185 71857376150 1171857376 http://costarica.backpage.com/BodyRubs/hoy-cerramos-a-las-11-71857376/2909373 Sexy new girl in town searching for a great date wiff u Naughty fresh girl here searching 4 a great date wiff you Sweet new girl in town seeking for a good date with u for80 2sixseven one9zerofor 90hr incall or out call"
pe = PhoneExtractor()
print pe.extract(doc)
"""
# Samples
# from phone_number_extractor import PhoneNumberExtractor
extractor = PhoneNumberExtractor()
url_string = "http://costarica.backpage.com/BodyRubs/hoy-cerramos-a-las-11-71857376/2909373"
url_phone_numbers = extractor.match(url_string, source_type='url')
print url_phone_numbers
# text_string = "Sexy new girl in town searching for a great date wiff u Naughty fresh girl here searching 4 a great date wiff you Sweet new girl in town seeking for a good date with u for80 2sixseven one9zerofor 90hr incall or out call"
text_string = "71857376 71857376718 test 71857376719 718573767185 71837376718 71981090718 718573767198 719810907185 71857376150 1171857376"
text_phone_numbers = extractor.match(text_string, source_type='text')
print text_phone_numbers
"""
| apache-2.0 | -8,800,745,410,716,933,000 | 34.694737 | 433 | 0.532376 | false |
kobejean/tensorflow | tensorflow/contrib/distribute/python/tpu_strategy.py | 1 | 20404 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TPU Distribution Strategy.
This is experimental. It's not ready for general use.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.distribute.python import cross_tower_ops as cross_tower_ops_lib
from tensorflow.contrib.distribute.python import one_device_strategy
from tensorflow.contrib.distribute.python import values
from tensorflow.contrib.tpu.python.ops import tpu_ops
from tensorflow.contrib.tpu.python.tpu import tpu
from tensorflow.contrib.tpu.python.tpu import tpu_system_metadata as tpu_system_metadata_lib
from tensorflow.contrib.tpu.python.tpu import training_loop
from tensorflow.python.eager import context
from tensorflow.python.eager import tape
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.ops import variables as variables_lib
from tensorflow.python.training import device_util
from tensorflow.python.training import distribute as distribute_lib
from tensorflow.python.util import nest
_TPU_INITIALIZE_SYSTEM_COLLECTION = "TPU_STRATEGY_INITIALIZE"
def get_tpu_system_metadata(tpu_cluster_resolver):
"""Retrieves TPU system metadata given a TPUClusterResolver."""
master = tpu_cluster_resolver.master()
# pylint: disable=protected-access
cluster_spec = tpu_cluster_resolver.cluster_spec()
cluster_def = cluster_spec.as_cluster_def() if cluster_spec else None
tpu_system_metadata = (
tpu_system_metadata_lib._query_tpu_system_metadata(
master,
cluster_def=cluster_def,
query_topology=False))
return tpu_system_metadata
# TODO(jhseu): Deduplicate with MirroredStrategy?
def _create_tpu_mirrored_variable(devices, real_mirrored_creator, *args,
**kwargs): # pylint: disable=g-missing-docstring
# Figure out what collections this variable should be added to.
# We'll add the TPUMirroredVariable to those collections instead.
collections = kwargs.pop("collections", None)
if collections is None:
collections = [ops.GraphKeys.GLOBAL_VARIABLES]
kwargs["collections"] = []
# TODO(jhseu): Should we have different behavior for different
# synchronization settings?
# Get aggregation value
# TODO(jhseu): Support aggregation in a tower context.
aggregation = kwargs.pop("aggregation", vs.VariableAggregation.NONE)
if aggregation not in [
vs.VariableAggregation.NONE,
vs.VariableAggregation.SUM,
vs.VariableAggregation.MEAN,
vs.VariableAggregation.ONLY_FIRST_TOWER,
]:
raise ValueError("Invalid variable aggregation mode: {} for variable: {}"
.format(aggregation, kwargs["name"]))
# Ignore user-specified caching device, not needed for mirrored variables.
kwargs.pop("caching_device", None)
# TODO(josh11b,apassos): It would be better if variable initialization
# was never recorded on the tape instead of having to do this manually
# here.
with tape.stop_recording():
index = real_mirrored_creator(devices, *args, **kwargs)
result = values.TPUMirroredVariable(index, index[devices[0]], aggregation)
if not context.executing_eagerly():
g = ops.get_default_graph()
# If "trainable" is True, next_creator() will add the member variables
# to the TRAINABLE_VARIABLES collection, so we manually remove
# them and replace with the MirroredVariable. We can't set
# "trainable" to False for next_creator() since that causes functions
# like implicit_gradients to skip those variables.
if kwargs.get("trainable", True):
collections.append(ops.GraphKeys.TRAINABLE_VARIABLES)
l = g.get_collection_ref(ops.GraphKeys.TRAINABLE_VARIABLES)
for v in index.values():
l.remove(v)
g.add_to_collections(collections, result)
return result
# TODO(jhseu): Stop inheriting from OneDeviceStrategy.
class TPUStrategy(one_device_strategy.OneDeviceStrategy):
"""Experimental TPU distribution strategy implementation."""
def __init__(self, tpu_cluster_resolver, steps_per_run, num_cores=None):
"""Initializes the TPUStrategy object.
Args:
tpu_cluster_resolver: A tf.contrib.cluster_resolver.TPUClusterResolver,
which provides information about the TPU cluster.
steps_per_run: Number of steps to run on device before returning to the
host. Note that this can have side-effects on performance, hooks,
metrics, summaries etc.
This parameter is only used when Distribution Strategy is used with
estimator or keras.
num_cores: Number of cores to use on the TPU. If None specified, then
auto-detect the cores and topology of the TPU system.
"""
# TODO(sourabhbajaj): OneDeviceStrategy should be initialized with the
# master node fetched from the cluster resolver.
super(TPUStrategy, self).__init__('/device:CPU:0')
self._tpu_cluster_resolver = tpu_cluster_resolver
self._tpu_metadata = get_tpu_system_metadata(self._tpu_cluster_resolver)
# TODO(sourabhbajaj): Change this from num_cores to metadata_override
self._num_cores_override = num_cores
# TODO(jhseu): Switch to DeviceAssignment to support pods and model
# parallelism.
device_map = {d.name: i for i, d in enumerate(self._tpu_metadata.devices)
if "device:TPU:" in d.name}
self._device_index = values.PerDevice(device_map)
self._tpu_devices = sorted(device_map.keys())
# Only create variables for the number of towers we're running.
self._tpu_devices = self._tpu_devices[:self.num_towers]
# TODO(sourabhbajaj): Remove this once performance of running one step
# at a time is comparable to multiple steps.
self.steps_per_run = steps_per_run
def _get_enqueue_op_per_host(self, host_id, iterator, input_shapes,
iterations):
"""Create an enqueue op for a single host identified using host_id.
The while_loop op returned will run `iterations` times and in each run
enqueue batches for each shard.
Args:
host_id: integer, id of the host to run the enqueue ops on.
iterator: `tf.data` iterator to read the input data.
input_shapes: shape of inputs to be enqueue on the queue. This is same as
the value of `nest.flatten(iterator.output_shapes)`.
iterations: integer, number of iterations to be run; determines the
number of batches to be enqueued.
Returns:
while_loop_op running `iterations` times; in each run we enqueue a batch
on the infeed queue from the host with id `host_id` for each device shard.
"""
host = self.get_host_cpu_device(host_id)
def _infeed_enqueue_ops_fn():
"""Enqueue ops for one iteration."""
control_deps = []
sharded_inputs = []
enqueue_ops = []
with ops.device(host):
for _ in range(self.num_towers_per_host):
# Use control dependencies to ensure a deterministic ordering.
with ops.control_dependencies(control_deps):
inputs = nest.flatten(iterator.get_next())
control_deps.extend(inputs)
sharded_inputs.append(inputs)
for core_id, shard_input in enumerate(sharded_inputs):
enqueue_ops.append(
tpu_ops.infeed_enqueue_tuple(
inputs=shard_input,
shapes=input_shapes,
device_ordinal=core_id))
return enqueue_ops
def enqueue_ops_loop_body(i):
"""Callable for the loop body of the while_loop instantiated below."""
with ops.control_dependencies(_infeed_enqueue_ops_fn()):
return i + 1
with ops.device(host):
enqueue_op_per_host = control_flow_ops.while_loop(
lambda i: i < iterations,
enqueue_ops_loop_body,
[constant_op.constant(0)],
parallel_iterations=1)
return enqueue_op_per_host
def distribute_dataset(self, dataset_fn):
# TODO(priyag): Perhaps distribute across cores here.
return self._call_dataset_fn(dataset_fn)
# TODO(priyag): Deal with OutOfRange errors once b/111349762 is fixed.
# TODO(sourabhbajaj): Remove the initial_loop_values parameter when we have
# a mechanism to infer the outputs of `fn`. Pending b/110550782.
def _run_steps_on_dataset(self, fn, iterator, iterations,
initial_loop_values=None):
shapes = nest.flatten(iterator.output_shapes)
if any([not s.is_fully_defined() for s in shapes]):
raise ValueError(
'TPU currently requires fully defined shapes. Either use '
'set_shape() on the input tensors or use '
'dataset.batch(..., drop_remainder=True).')
types = nest.flatten(iterator.output_types)
enqueue_ops = [
self._get_enqueue_op_per_host(host_id, iterator, shapes, iterations)
for host_id in range(self.num_hosts)]
def dequeue_fn():
dequeued = tpu_ops.infeed_dequeue_tuple(dtypes=types, shapes=shapes)
return nest.pack_sequence_as(iterator.output_shapes, dequeued)
# Wrap `fn` for repeat.
if initial_loop_values is None:
initial_loop_values = {}
initial_loop_values = nest.flatten(initial_loop_values)
ctx = values.MultiStepContext()
def run_fn(*args, **kwargs):
"""Single step on the TPU device."""
del args, kwargs
fn_inputs = dequeue_fn()
if not isinstance(fn_inputs, tuple):
fn_inputs = (fn_inputs,)
fn_result = fn(ctx, *fn_inputs)
flat_last_step_outputs = nest.flatten(ctx.last_step_outputs)
if flat_last_step_outputs:
with ops.control_dependencies([fn_result]):
return [array_ops.identity(f) for f in flat_last_step_outputs]
else:
return fn_result
# TODO(sourabhbajaj): The input to while loop should be based on the output
# type of the step_fn
def iterate_on_tpu():
return training_loop.repeat(iterations, run_fn, initial_loop_values)
# We capture the control_flow_context at this point, before we run `fn`
# inside a while_loop and TPU replicate context. This is useful in cases
# where we might need to exit these contexts and get back to the outer
# context to do some things, for e.g. create an op which should be
# evaluated only once at the end of the loop on the host. One such usage
# is in creating metrics' value op.
self._outer_control_flow_context = (
ops.get_default_graph()._get_control_flow_context()) # pylint: disable=protected-access
replicate_inputs = [[]] * self.num_towers
replicate_outputs = tpu.replicate(iterate_on_tpu, replicate_inputs)
del self._outer_control_flow_context
ctx.run_op = control_flow_ops.group(replicate_outputs, enqueue_ops)
# Filter out any ops from the outputs, typically this would be the case
# when there were no tensor outputs.
last_step_tensor_outputs = [x for x in replicate_outputs
if not isinstance(x, ops.Operation)]
# Outputs are currently of the structure (grouped by device)
# [[output0_device0, output1_device0, output2_device0],
# [output0_device1, output1_device1, output2_device1]]
# Convert this to the following structure instead: (grouped by output)
# [[output0_device0, output0_device1],
# [output1_device0, output1_device1],
# [output2_device0, output2_device1]]
last_step_tensor_outputs = [list(x) for x in zip(*last_step_tensor_outputs)]
# Convert replicate_outputs to the original dict structure of
# last_step_outputs.
last_step_tensor_outputs_dict = nest.pack_sequence_as(
ctx.last_step_outputs, last_step_tensor_outputs)
for (name, aggregation) in ctx._last_step_outputs_aggregations.items(): # pylint: disable=protected-access
output = last_step_tensor_outputs_dict[name]
# For outputs that have already been aggregated, take the first value
# from the list as each value should be the same. Else return the full
# list of values.
if aggregation is not variables_lib.VariableAggregation.NONE:
# TODO(priyag): Should this return the element or a list with 1 element
last_step_tensor_outputs_dict[name] = output[0]
ctx._set_last_step_outputs(last_step_tensor_outputs_dict) # pylint: disable=protected-access
return ctx
def _call_for_each_tower(self, fn, *args, **kwargs):
# TODO(jhseu): Consider making it so call_for_each_tower implies that we're
# in a tpu.rewrite(), and update TPUMirroredVariable accordingly.
kwargs.pop('run_concurrently', None)
with one_device_strategy._OneDeviceTowerContext(self): # pylint: disable=protected-access
return fn(*args, **kwargs)
def initialize(self):
if context.executing_eagerly():
# TODO(priyag): Add appopriate call here when eager is supported for TPUs.
raise NotImplementedError('Eager mode not supported in TPUStrategy.')
else:
# TODO(jhseu): We need this hack because DistributionStrategies must be
# pickleable for copy.deepcopy(). Remove when initialize_system goes away.
graph = ops.get_default_graph()
tpu_init = graph.get_collection(_TPU_INITIALIZE_SYSTEM_COLLECTION)
if tpu_init:
return tpu_init
graph.add_to_collection(_TPU_INITIALIZE_SYSTEM_COLLECTION,
tpu.initialize_system())
return graph.get_collection(_TPU_INITIALIZE_SYSTEM_COLLECTION)
def finalize(self):
if context.executing_eagerly():
# TODO(priyag): Add appopriate call here when eager is supported for TPUs.
raise NotImplementedError('Eager mode not supported in TPUStrategy.')
else:
return [tpu.shutdown_system()]
def _get_devices_from(self, colocate_with=None):
# TODO(jhseu): Change this when we support model parallelism.
return self._tpu_devices
def _create_variable(self, next_creator, *args, **kwargs):
"""Create a TPUMirroredVariable. See `DistributionStrategy.scope`."""
colocate_with = kwargs.pop("colocate_with", None)
devices = self._get_devices_from(colocate_with)
def _real_mirrored_creator(devices, *args, **kwargs): # pylint: disable=g-missing-docstring
index = {}
for i, d in enumerate(devices):
with ops.device(d):
if i > 0:
# Give replicas meaningful distinct names:
var0name = index[devices[0]].name.split(":")[0]
# We append a / to variable names created on towers with id > 0 to
# ensure that we ignore the name scope and instead use the given
# name as the absolute name of the variable.
kwargs["name"] = "%s/replica_%d/" % (var0name, i)
# Initialize replicas with the same value:
if context.executing_eagerly():
kwargs["initial_value"] = array_ops.identity(
index[devices[0]].value())
else:
def initial_value_fn(device=d):
with ops.device(device):
return array_ops.identity(index[devices[0]].initial_value)
kwargs["initial_value"] = initial_value_fn
with context.context().device_policy(context.DEVICE_PLACEMENT_SILENT):
v = next_creator(*args, **kwargs)
assert not isinstance(v, values.TPUMirroredVariable)
index[d] = v
return index
return _create_tpu_mirrored_variable(devices, _real_mirrored_creator, *args,
**kwargs)
def _reduce(self, aggregation, value, destinations):
if values._enclosing_tpu_context() is not None: # pylint: disable=protected-access
if aggregation == vs.VariableAggregation.MEAN:
# TODO(jhseu): Revisit once we support model-parallelism.
value *= (1. / self.num_towers)
elif aggregation != vs.VariableAggregation.SUM:
raise NotImplementedError(
"Currently only support sum & mean in TPUStrategy.")
return tpu_ops.cross_replica_sum(value)
# Validate that the destination is same as the host device
# Note we don't do this when in replicate context as the reduction is
# performed on the TPU device itself.
devices = cross_tower_ops_lib.get_devices_from(destinations)
if len(devices) == 1:
assert device_util.canonicalize(devices[0]) == device_util.canonicalize(
self.get_host_cpu_device(0))
else:
raise ValueError('Multiple devices are not supported for TPUStrategy')
if aggregation == vs.VariableAggregation.ONLY_FIRST_TOWER:
return value[0]
output = math_ops.add_n(value)
if aggregation == vs.VariableAggregation.MEAN:
return output * (1. / len(value))
return output
def _update(self, var, fn, *args, **kwargs):
# TODO(jhseu): Consider supporting grouped==False.
assert isinstance(var, values.TPUMirroredVariable)
if values._enclosing_tpu_context() is not None: # pylint: disable=protected-access
return fn(var, *args, **kwargs)
# Otherwise, we revert to MirroredStrategy behavior and update each variable
# directly.
updates = {}
for d, v in var._index.items(): # pylint: disable=protected-access
name = "update_%d" % self._device_index.get(d)
with ops.device(d), distribute_lib.UpdateContext(d), ops.name_scope(name):
# If args and kwargs are not mirrored, the value is returned as is.
updates[d] = fn(v,
*values.select_device_mirrored(d, args),
**values.select_device_mirrored(d, kwargs))
# Make a single control dependency to keep the variables mirrored. If one
# assignment is fetched, then run all assignments.
sorted_keys = sorted(updates.keys())
update_tuple = control_flow_ops.tuple([updates[d] for d in sorted_keys])
for i, d in enumerate(sorted_keys):
updates[d] = update_tuple[i]
return values.regroup(updates, values.Mirrored)
def read_var(self, var):
assert isinstance(var, values.TPUMirroredVariable)
return var.read_value()
def _unwrap(self, value):
if isinstance(value, list):
return value
return [value]
@property
def num_towers(self):
return self._num_cores_override or self._tpu_metadata.num_cores
@property
def num_hosts(self):
return self._tpu_metadata.num_hosts
@property
def num_towers_per_host(self):
return self._tpu_metadata.num_of_cores_per_host
@property
def between_graph(self):
return False
@property
def should_init(self):
return True
@property
def should_checkpoint(self):
return True
@property
def should_save_summary(self):
return True
@property
def worker_devices(self):
return self._tpu_devices
@property
def parameter_devices(self):
return self._tpu_devices
def get_host_cpu_device(self, host_id):
if self._tpu_cluster_resolver.get_master() in ('', 'local'):
return '/replica:0/task:0/device:CPU:0'
job_name = self._tpu_cluster_resolver.get_job_name() or 'tpu_worker'
return '/job:%s/task:%d/device:CPU:0' % (job_name, host_id)
def configure(self,
session_config=None,
cluster_spec=None,
task_type=None,
task_id=None):
del cluster_spec, task_type, task_id
if session_config:
session_config.isolate_session_state = True
cluster_spec = self._tpu_cluster_resolver.cluster_spec()
if cluster_spec:
session_config.cluster_def.CopyFrom(cluster_spec.as_cluster_def())
| apache-2.0 | 6,564,893,554,403,699,000 | 40.897331 | 111 | 0.680259 | false |
ladybug-tools/honeybee | honeybee_plus/utilcol.py | 1 | 1078 | """A collection of useful utilities for Honeybee"""
import uuid
import re
def random_name(shorten=True):
"""Generate a random name as a string using uuid.
Args:
shorten: If True the name will be the first to segment of uuid.
"""
if shorten:
return '-'.join(str(uuid.uuid4()).split('-')[:2])
else:
return str(uuid.uuid4())
def check_name(name):
"""Check if a name is a valid honeybee name.
A valid name can only have alphabet, digits, - and _.
"""
name = name.encode('utf-8')
try:
match = re.match(b"^[.A-Za-z0-9_-]*$", name)
except TypeError:
match = re.match(r"^[.A-Za-z0-9_-]*$", name)
if match:
return True
else:
raise ValueError(
'Invalid input name: ({}).'
' Name can only contain letters, numbers,'
' dots, underscores and dashes.'.format(name)
)
if __name__ == '__main__':
check_name('should_be_fine')
# check_name('also-fine')
check_name('this.is.also.fine.1234')
# check_name('not good')
| gpl-3.0 | 1,852,447,149,315,065,000 | 24.069767 | 71 | 0.56308 | false |
zjj/trac_hack | sample-plugins/HelloWorld.py | 1 | 2140 | """Example macro."""
revision = "$Rev: 6326 $"
url = "$URL: https://svn.edgewall.org/repos/trac/tags/trac-0.12.2/sample-plugins/HelloWorld.py $"
#
# The following shows the code for macro, old-style.
#
# The `execute` function serves no purpose other than to illustrate
# the example, it will not be used anymore.
#
# ---- (ignore in your own macro) ----
# --
from trac.util import escape
def execute(hdf, txt, env):
# Currently hdf is set only when the macro is called
# From a wiki page
if hdf:
hdf['wiki.macro.greeting'] = 'Hello World'
# args will be `None` if the macro is called without parenthesis.
args = txt or 'No arguments'
# then, as `txt` comes from the user, it's important to guard against
# the possibility to inject malicious HTML/Javascript, by using `escape()`:
return 'Hello World, args = ' + escape(args)
# --
# ---- (ignore in your own macro) ----
#
# The following is the converted new-style macro
#
# ---- (reuse for your own macro) ----
# --
from trac.wiki.macros import WikiMacroBase
class HelloWorldMacro(WikiMacroBase):
"""Simple HelloWorld macro.
Note that the name of the class is meaningful:
- it must end with "Macro"
- what comes before "Macro" ends up being the macro name
The documentation of the class (i.e. what you're reading)
will become the documentation of the macro, as shown by
the !MacroList macro (usually used in the TracWikiMacros page).
"""
def expand_macro(self, formatter, name, args):
"""Return some output that will be displayed in the Wiki content.
`name` is the actual name of the macro (no surprise, here it'll be
`'HelloWorld'`),
`args` is the text enclosed in parenthesis at the call of the macro.
Note that if there are ''no'' parenthesis (like in, e.g.
[[HelloWorld]]), then `args` is `None`.
"""
return 'Hello World, args = ' + unicode(args)
# Note that there's no need to HTML escape the returned data,
# as the template engine (Genshi) will do it for us.
# --
# ---- (reuse for your own macro) ----
| bsd-3-clause | 5,799,304,578,152,899,000 | 31.923077 | 97 | 0.649533 | false |
OptimalPayments/Python_SDK | src/sample_application/DirectDebitACHpurchase.py | 1 | 1780 | #!/usr/bin/env python3
'''
Created on 1-June-2016
@author: Asawari.Vaidya
'''
from PythonNetBanxSDK.CardPayments.BillingDetails import BillingDetails
from PythonNetBanxSDK.CustomerVault.ACHBankAccount import ACHBankAccount
from PythonNetBanxSDK.CustomerVault.Profile import Profile
from PythonNetBanxSDK.DirectDebit.Purchase import Purchase
from PythonNetBanxSDK.OptimalApiClient import OptimalApiClient
from utils.Utils import Utils
from Config import Config
from RandomTokenGenerator import RandomTokenGenerator
optimal_obj = OptimalApiClient(Config.api_key, Config.api_password, Config.environment, Config.account_number_ACH)
purchase_obj = Purchase(None)
purchase_obj.merchantRefNum(RandomTokenGenerator().generateToken())
purchase_obj.amount("10098")
purchase_obj.customerIp("192.0.126.111")
achbank_obj = ACHBankAccount (None)
achbank_obj.accountHolderName("XYZ Company")
achbank_obj.accountType("CHECKING")
#achbank_obj.accountNumber(RandomTokenGenerator().generateNumber())
achbank_obj.accountNumber("988948193")
achbank_obj.routingNumber("211589828")
achbank_obj.payMethod("WEB")
profile_obj = Profile(None)
profile_obj.firstName("Joe")
profile_obj.lastName("Smith")
profile_obj.email("[email protected]")
billingdetails_obj = BillingDetails(None)
billingdetails_obj.street("100 Queen Street West")
billingdetails_obj.city("Los Angeles")
billingdetails_obj.state("CA")
billingdetails_obj.country("US")
billingdetails_obj.zip("90210")
billingdetails_obj.phone("3102649010")
purchase_obj.profile(profile_obj)
purchase_obj.billingDetails(billingdetails_obj)
purchase_obj.ach(achbank_obj)
response_object = optimal_obj.direct_debit_service_handler().submit_purchase(purchase_obj)
print ("\nResponse Values ==========> ")
Utils.print_response(response_object)
| mit | -7,959,770,049,315,562,000 | 31.962963 | 114 | 0.811798 | false |
rymate1234/rymate-blog | migrations/versions/413f129e8b07_.py | 1 | 1535 | """empty message
Revision ID: 413f129e8b07
Revises: None
Create Date: 2014-05-02 08:09:09.906725
"""
# revision identifiers, used by Alembic.
revision = '413f129e8b07'
down_revision = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('users',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=80), nullable=False),
sa.Column('email', sa.String(length=80), nullable=False),
sa.Column('password', sa.String(length=128), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('first_name', sa.String(length=30), nullable=True),
sa.Column('last_name', sa.String(length=30), nullable=True),
sa.Column('active', sa.Boolean(), nullable=True),
sa.Column('is_admin', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('username')
)
op.create_table('roles',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(length=80), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('roles')
op.drop_table('users')
### end Alembic commands ###
| bsd-3-clause | 5,584,822,916,619,234,000 | 30.979167 | 65 | 0.663192 | false |
zdomjus60/astrometry | tools.py | 1 | 10051 | # -*- coding: utf-8 -*-
""" helper functions for time management
"""
import math
def sin(x):
return math.sin(math.radians(x))
def cos(x):
return math.cos(math.radians(x))
def atan2(y , x):
return math.degrees(math.atan2(y, x))
def reduce360(x):
return x % 360.0
def dms2ddd(hour, minute, second):
""" from sexagesimal to decimal """
return hour+minute/60.0+second/3600.0
def ddd2dms(dec_hour):
""" from decimal to sexagesimal representation of hours and angles."""
if dec_hour < 0:
sign = -1
dec_hour *= sign
else:
sign = 1
total_seconds = int(dec_hour * 3600.0+.5)
seconds = total_seconds % 60
total_minutes = int((total_seconds - seconds)/60.0)
minutes = total_minutes % 60
hours = int((total_minutes - minutes)/60.0)
return (hours * sign, minutes * sign, seconds * sign)
def cal2jul(year, month, day, hour=0, minute=0, second=0):
""" converts calendar date to julian date
this routine and the following are built following Duffet Smith /Zwart instructions
as given in Peter Duffett-Smith-Zwart Practical Astronomy with your Calculator or Spreadsheet
Fourth Edition, Cambridge University Press, Fourth Ed. 2011
For an easier use of the function, hours minutes and seconds are defaulted to 0, so it's
not necessary to give them as parameters when the hour is 00:00:00
"""
month2 = month
year2 = year
if month2 <= 2:
year2 -= 1
month2 += 12
else:
pass
if (year*10000 + month*100 + day) >= 15821015:
a = math.trunc(year2/100.0)
b = 2 - a + math.trunc(a/4.0)
else:
a = 0
b = 0
if year < 0:
c = math.trunc((365.25 * year2)-0.75)
else:
c = math.trunc(365.25 * year2)
d = math.trunc(30.6001 *(month2 + 1))
return b + c + d + day + hour / 24.0 + minute / 1440.0 + second / 86400.0 + 1720994.5
def jul2cal(jd):
""" converts julian date to calendar date """
jd += 0.5
i = math.modf(jd)[1]
f = math.modf(jd)[0]
if i > 2299160:
a = math.trunc((i-1867216.25)/36524.25)
b = i + a - math.trunc(a/4)+1
else:
b = i
c = b + 1524
d = math.trunc((c-122.1)/365.25)
e = math.trunc(365.25 * d)
g = math.trunc((c-e)/30.6001)
day = c-e+f-math.trunc(30.6001*g)
if g < 13.5:
month = g - 1
else:
month = g - 13
if month > 2.5:
year = d - 4716
else:
year = d - 4715
hours_frac = math.modf(day)[0]*24
day = int(day)
hour, minute, second = ddd2dms(hours_frac)
return (year, month, day, hour, minute, second)
def day_of_the_week(year, month, day):
""" given a calendar date, the routine returns a tuple with the Day Of The Week in number and in plaintext
0 for Sunday 1 for Monday and so on up to 6 Saturday
"""
doth = {0:'Sunday', 1:'Monday', 2:'Tuesday',
3:'Wednesday', 4:'Thursday', 5:'Friday',
6:'Saturday'}
jd = cal2jul(year, month, day, 0, 0, 0)
a = (jd+1.5)/7
f = math.trunc((a % 1)*7 +.5)
return (f,doth[f])
def lt2ut(year, month, day, hour=0, minute=0, second=0, timezone=0, DS=0):
""" Given, for a location on the Earth,a date, a time, a timezone (East + West - in hours) and the Daylight
Savings (0 normal time 1 Daylight Savings), this routine gives back a calendar date in Universal Time
representation (year, month, day, hour, minute, second).
It aims to restore a common date and time for all places in the Earth. Timezone and
Daylight Savings can be automized knowing the location using the pytz module (Olson
database)
"""
ut = dms2ddd(hour,minute,second) - timezone - DS
greenwich_calendar_date = day + ut/24
jd = cal2jul(year, month, greenwich_calendar_date)
greenwich_calendar_date = jul2cal(jd)
return greenwich_calendar_date
def ut2lt(year, month, day, hour=0, minute=0, second=0, timezone=0, DS=0):
""" Given a date, a time for Greenwich in UT format this routine gives back a calendar date
in local time representation (year, month, day, hour, minute, second).
It's the inverse function of the previous formula
"""
lt = dms2ddd(hour,minute,second) + timezone +DS
local_calendar_date = day + lt/24
jd = cal2jul(year, month, local_calendar_date)
local_calendar_date = jul2cal(jd)
return local_calendar_date
def ut2gst(year, month, day, hour, minute, second):
""" Sidereal time is a time-keeping system astronomers use to keep track of the direction to point
their telescopes to view a given star in the night sky.
Briefly, sidereal time is a "time scale that is based on the Earth's rate of rotation measured
relative to the fixed stars." (source Wikipedia)
This routine converts Universal Time to Sidereal Time for Greenwich (Greenwich Sidereal Time)
"""
jd = cal2jul(year, month, day)
S = jd - 2451545.0
T = S/36525.0
T0 = (6.697374558 + (2400.051336 * T)+ 0.000025862 *T*T) % 24
UT = dms2ddd(hour, minute, second)*1.002737909
GST = ddd2dms((UT + T0) % 24)
return GST
def gst2ut( year, month, day, hour, minute, second):
""" Inverse of the previous function
"""
jd = cal2jul(year, month, day, 0,0,0)
S = jd - 2451545.0
T = S/36525.0
T0 = (6.697374558 + 2400.051336 * T + 0.000025862 *T*T) % 24
GST = (dms2ddd(hour, minute, second) - T0) % 24
while GST <0:
GST += 24
UT = GST * .9972695663
return ddd2dms(UT)
def gst2lst( hour, minute, second, long_degree, long_minute, long_second=0):
""" Corrects GST for a different location on the Earth
"""
GST = dms2ddd(hour,minute,second)
lg = dms2ddd(long_degree, long_minute, long_second)/15.0
lst = ddd2dms((GST + lg) % 24)
return lst
def lst2gst( hour, minute, second, long_degree, long_minute, long_second=0):
""" Inverse of the previous method
"""
lst = dms2ddd(hour,minute,second)
lg = dms2ddd(long_degree, long_minute, long_second)/15.0
GST = ddd2dms((lst + lg) % 24)
return GST
def julian_centuries(year, month, day, hour=0, minute =0, second=0):
d1 = cal2jul(year, month, day, hour, minute, second)
d2 = cal2jul(2000,1,1,12)
return (d1-d2) / 36525.0
def julian_millennia(year, month, day, hour=0, minute =0, second=0):
return julian_centuries(year, month, day, hour, minute, second) / 10.0
def julian_decamillennia(year, month, day, hour=0, minute =0, second=0):
return julian_centuries(year, month, day, hour, minute, second) / 100.0
def obl_ecl_JPL(year, month, day, hour=0, minute = 0, second = 0):
t = julian_centuries(year, month, day, hour, minute, second)
""" from JPL Astronomical Almanac 2010 """
return (23 * 3600 + 26*60 + 21.406
- 46.836769 * t
- 0.0001831 * t * t
+ 0.00200340 * t * t * t
- 0.576e-6 * t * t * t * t
- 4.34e-8 * t * t * t * t * t) / 3600.0
def obl_ecl_Laskar(year, month, day, hour = 0, minute = 0, second = 0):
"""
Original work from Jay Tanner
- converted to Python code by Domenico Mustara 2015
This PHP function computes the mean obliquity of the ecliptic
given a JD argument corresponding to any given date and time.
Author: Jay Tanner - 2010
The algorithm used here is based on work published by J. Laskar
Astronomy and Astrophysics, Vol 157, p68 (1986),
New Formulas for the Precession, Valid Over 10000 years,
Table 8.
Source code provided under the provisions of the
GNU Affero General Public License (AGPL), version 3.
http://www.gnu.org/licenses/agpl.html
// -----------------------------------------------------------
// Compute the (t) value in Julian decamillennia corresponding
// to the JD argument and reckoned from J2000.
$t = ($JD - 2451545.0) / 3652500.0;
// --------------------------------------
"""
t = julian_decamillennia(year, month, day, hour, minute, second)
w = 84381.448
w -= 4680.93 * t
w -= 1.55 * t * t
w += 1999.25 * t * t * t
w -= 51.38 * t * t * t * t
w -= 249.67 * t * t * t * t * t
w -= 39.05 * t * t * t * t * t * t
w += 7.12 * t * t * t * t * t * t * t
w += 27.87 * t * t * t * t * t * t * t * t
w += 5.79 * t * t * t * t * t * t * t * t * t
w += 2.45 * t * t * t * t * t * t * t * t * t * t
return w / 3600.0
""" Some conversion utilities between various coordinate systems """
def sph_ecl2rect_ecl(r, longitude, latitude):
x = r * cos(latitude) * cos(longitude)
y = r * cos(latitude) * sin(longitude)
z = r * sin(latitude)
return (x,y,z)
def rect_ecl2sph_ecl(x,y,z):
r = math.sqrt(x*x + y*y + z*z)
longitude = atan2(y,x)
latitude = atan2(z, math.sqrt(x*x + y*y))
return (r, longitude, latitude)
def sph_equat2rect_equat(r, RA, Declination):
x = r * cos(RA) * cos(Declination)
y = r * sin(RA) * cos(Declination)
z = r * sin(Declination)
return (x,y,x)
def rect_equat2sph_equat(x,y,z):
r = math.sqrt(x*x + y*y +z*z)
RA = atan2(y, x)
Decl = atan2(z, math.sqrt(x*x + y*y))
return (r, RA, Decl)
def rect_ecl2rect_equat(xeclip, yeclip, zeclip, year, month, day, hour = 0, minute = 0, second = 0):
oblecl = obl_ecl_JPL(year, month, day, hour, minute, second)
xequat = xeclip
yequat = yeclip * cos(oblecl) - zeclip * sin(oblecl)
zequat = yeclip * sin(oblecl) + zeclip * cos(oblecl)
return (xequat, yequat, zequat)
def rect_equat2rect_ecl(xequat, yequat, zequat, year, month, day, hour = 0, minute = 0, second = 0):
oblecl = obl_ecl_JPL(year, month, day, hour, minute, second)
xeclip = xequat
yeclip = yequat * cos(- oblecl) - zequat * sin(- oblecl)
zeclip = yequat * sin(- oblecl) + zequat * cos(- oblecl)
return (xeclip, yeclip, zeclip)
| cc0-1.0 | -7,099,347,639,674,084,000 | 34.641844 | 111 | 0.594369 | false |
stoeckli/iMatrixSpray | octoprint/printer.py | 1 | 20362 | # coding=utf-8
__author__ = "Gina Häußge <[email protected]>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
import time
import datetime
import threading
import copy
import os
#import logging, logging.config
import octoprint.util.comm as comm
import octoprint.util as util
from octoprint.settings import settings
from octoprint.events import eventManager
def getConnectionOptions():
"""
Retrieves the available ports, baudrates, prefered port and baudrate for connecting to the printer.
"""
return {
"ports": comm.serialList(),
"baudrates": comm.baudrateList(),
"portPreference": settings().get(["serial", "port"]),
"baudratePreference": settings().getInt(["serial", "baudrate"]),
"autoconnect": settings().getBoolean(["serial", "autoconnect"])
}
class Printer():
def __init__(self, gcodeManager):
from collections import deque
self._gcodeManager = gcodeManager
self._gcodeManager.registerCallback(self)
# state
self._temp = None
self._bedTemp = None
self._targetTemp = None
self._targetBedTemp = None
self._temps = {
"actual": deque([], 300),
"target": deque([], 300),
"actualBed": deque([], 300),
"targetBed": deque([], 300)
}
self._tempBacklog = []
self._latestMessage = None
self._messages = deque([], 300)
self._messageBacklog = []
self._latestLog = None
self._log = deque([], 300)
self._logBacklog = []
self._state = None
self._currentZ = None
self._progress = None
self._printTime = None
self._printTimeLeft = None
self._printAfterSelect = False
# sd handling
self._sdPrinting = False
self._sdStreaming = False
self._selectedFile = None
# comm
self._comm = None
# callbacks
self._callbacks = []
self._lastProgressReport = None
self._stateMonitor = StateMonitor(
ratelimit=0.5,
updateCallback=self._sendCurrentDataCallbacks,
addTemperatureCallback=self._sendAddTemperatureCallbacks,
addLogCallback=self._sendAddLogCallbacks,
addMessageCallback=self._sendAddMessageCallbacks
)
self._stateMonitor.reset(
state={"state": None, "stateString": self.getStateString(), "flags": self._getStateFlags()},
jobData={"filename": None, "filesize": None, "estimatedSprayTime": None, "filament": None},
progress={"progress": None, "filepos": None, "sprayTime": None, "sprayTimeLeft": None},
currentZ=None
)
#~~ callback handling
def registerCallback(self, callback):
self._callbacks.append(callback)
self._sendInitialStateUpdate(callback)
def unregisterCallback(self, callback):
if callback in self._callbacks:
self._callbacks.remove(callback)
def _sendAddTemperatureCallbacks(self, data):
for callback in self._callbacks:
try: callback.addTemperature(data)
except: pass
def _sendAddLogCallbacks(self, data):
for callback in self._callbacks:
try: callback.addLog(data)
except: pass
def _sendAddMessageCallbacks(self, data):
for callback in self._callbacks:
try: callback.addMessage(data)
except: pass
def _sendCurrentDataCallbacks(self, data):
for callback in self._callbacks:
try: callback.sendCurrentData(copy.deepcopy(data))
except: pass
def _sendTriggerUpdateCallbacks(self, type):
for callback in self._callbacks:
try: callback.sendUpdateTrigger(type)
except: pass
def _sendFeedbackCommandOutput(self, name, output):
for callback in self._callbacks:
try: callback.sendFeedbackCommandOutput(name, output)
except: pass
#~~ callback from gcodemanager
def sendUpdateTrigger(self, type):
if type == "gcodeFiles" and self._selectedFile:
self._setJobData(self._selectedFile["filename"],
self._selectedFile["filesize"],
self._selectedFile["sd"])
#~~ printer commands
def connect(self, port=None, baudrate=None):
"""
Connects to the printer. If port and/or baudrate is provided, uses these settings, otherwise autodetection
will be attempted.
"""
if self._comm is not None:
self._comm.close()
self._comm = comm.MachineCom(port, baudrate, callbackObject=self)
def disconnect(self):
"""
Closes the connection to the printer.
"""
if self._comm is not None:
self._comm.close()
self._comm = None
eventManager().fire("Disconnected")
def command(self, command):
"""
Sends a single gcode command to the printer.
"""
self.commands([command])
def commands(self, commands):
"""
Sends multiple gcode commands (provided as a list) to the printer.
"""
for command in commands:
self._comm.sendCommand(command)
def selectFile(self, filename, sd, printAfterSelect=False):
if self._comm is None or (self._comm.isBusy() or self._comm.isStreaming()):
return
self._printAfterSelect = printAfterSelect
self._comm.selectFile(filename, sd)
self._setProgressData(0, None, None, None)
self._setCurrentZ(None)
def unselectFile(self):
if self._comm is not None and (self._comm.isBusy() or self._comm.isStreaming()):
return
self._comm.unselectFile()
self._setProgressData(0, None, None, None)
self._setCurrentZ(None)
def startPrint(self):
"""
Starts the currently loaded print job.
Only starts if the printer is connected and operational, not currently printing and a printjob is loaded
"""
if self._comm is None or not self._comm.isOperational() or self._comm.isPrinting():
return
if self._selectedFile is None:
return
self._setCurrentZ(None)
self._comm.startPrint()
def togglePausePrint(self):
"""
Pause the current printjob.
"""
if self._comm is None:
return
self._comm.setPause(not self._comm.isPaused())
def cancelPrint(self, disableMotorsAndHeater=True):
"""
Cancel the current printjob.
"""
if self._comm is None:
return
self._comm.cancelPrint()
if disableMotorsAndHeater:
self.commands(["M84", "M104 S0", "M140 S0", "M106 S0"]) # disable motors, switch off heaters and fan
# reset progress, height, print time
self._setCurrentZ(None)
self._setProgressData(None, None, None, None)
# mark print as failure
if self._selectedFile is not None:
self._gcodeManager.printFailed(self._selectedFile["filename"])
eventManager().fire("PrintFailed", self._selectedFile["filename"])
#~~ state monitoring
def _setCurrentZ(self, currentZ):
self._currentZ = currentZ
formattedCurrentZ = None
if self._currentZ:
formattedCurrentZ = "%.2f mm" % (self._currentZ)
self._stateMonitor.setCurrentZ(formattedCurrentZ)
def _setState(self, state):
self._state = state
self._stateMonitor.setState({"state": self._state, "stateString": self.getStateString(), "flags": self._getStateFlags()})
def _addLog(self, log):
self._log.append(log)
self._stateMonitor.addLog(log)
def _addMessage(self, message):
self._messages.append(message)
self._stateMonitor.addMessage(message)
def _setProgressData(self, progress, filepos, printTime, printTimeLeft):
self._progress = progress
self._printTime = printTime
self._printTimeLeft = printTimeLeft
formattedPrintTime = None
if (self._printTime):
formattedPrintTime = util.getFormattedTimeDelta(datetime.timedelta(seconds=self._printTime))
formattedPrintTimeLeft = None
if (self._printTimeLeft):
formattedPrintTimeLeft = util.getFormattedTimeDelta(datetime.timedelta(minutes=self._printTimeLeft))
formattedFilePos = None
if (filepos):
formattedFilePos = util.getFormattedSize(filepos)
self._stateMonitor.setProgress({"progress": self._progress, "filepos": formattedFilePos, "printTime": formattedPrintTime, "printTimeLeft": formattedPrintTimeLeft})
def _addTemperatureData(self, temp, bedTemp, targetTemp, bedTargetTemp):
currentTimeUtc = int(time.time() * 1000)
self._temps["actual"].append((currentTimeUtc, temp))
self._temps["target"].append((currentTimeUtc, targetTemp))
self._temps["actualBed"].append((currentTimeUtc, bedTemp))
self._temps["targetBed"].append((currentTimeUtc, bedTargetTemp))
self._temp = temp
self._bedTemp = bedTemp
self._targetTemp = targetTemp
self._targetBedTemp = bedTargetTemp
self._stateMonitor.addTemperature({"currentTime": currentTimeUtc, "temp": self._temp, "bedTemp": self._bedTemp, "targetTemp": self._targetTemp, "targetBedTemp": self._targetBedTemp})
def _setJobData(self, filename, filesize, sd):
if filename is not None:
self._selectedFile = {
"filename": filename,
"filesize": filesize,
"sd": sd
}
else:
self._selectedFile = None
formattedFilename = None
formattedFilesize = None
estimatedPrintTime = None
fileMTime = None
filament = None
if filename:
formattedFilename = os.path.basename(filename)
# Use a string for mtime because it could be float and the
# javascript needs to exact match
if not sd:
fileMTime = str(os.stat(filename).st_mtime)
if filesize:
formattedFilesize = util.getFormattedSize(filesize)
fileData = self._gcodeManager.getFileData(filename)
if fileData is not None and "gcodeAnalysis" in fileData.keys():
if "estimatedPrintTime" in fileData["gcodeAnalysis"].keys():
estimatedPrintTime = fileData["gcodeAnalysis"]["estimatedPrintTime"]
if "filament" in fileData["gcodeAnalysis"].keys():
filament = fileData["gcodeAnalysis"]["filament"]
self._stateMonitor.setJobData({"filename": formattedFilename, "filesize": formattedFilesize, "estimatedPrintTime": estimatedPrintTime, "filament": filament, "sd": sd, "mtime": fileMTime})
def _sendInitialStateUpdate(self, callback):
try:
data = self._stateMonitor.getCurrentData()
# convert the dict of deques to a dict of lists
temps = {k: list(v) for (k,v) in self._temps.iteritems()}
data.update({
"temperatureHistory": temps,
"logHistory": list(self._log),
"messageHistory": list(self._messages)
})
callback.sendHistoryData(data)
except Exception, err:
import sys
sys.stderr.write("ERROR: %s\n" % str(err))
pass
def _getStateFlags(self):
if not settings().getBoolean(["feature", "sdSupport"]) or self._comm is None:
sdReady = False
else:
sdReady = self._comm.isSdReady()
return {
"operational": self.isOperational(),
"printing": self.isPrinting(),
"closedOrError": self.isClosedOrError(),
"error": self.isError(),
"paused": self.isPaused(),
"ready": self.isReady(),
"sdReady": sdReady
}
def getCurrentData(self):
return self._stateMonitor.getCurrentData()
#~~ callbacks triggered from self._comm
def mcLog(self, message):
"""
Callback method for the comm object, called upon log output.
"""
self._addLog(message)
def mcTempUpdate(self, temp, bedTemp, targetTemp, bedTargetTemp):
self._addTemperatureData(temp, bedTemp, targetTemp, bedTargetTemp)
def mcStateChange(self, state):
"""
Callback method for the comm object, called if the connection state changes.
"""
oldState = self._state
# forward relevant state changes to gcode manager
if self._comm is not None and oldState == self._comm.STATE_PRINTING:
if self._selectedFile is not None:
if state == self._comm.STATE_OPERATIONAL:
self._gcodeManager.printSucceeded(self._selectedFile["filename"])
elif state == self._comm.STATE_CLOSED or state == self._comm.STATE_ERROR or state == self._comm.STATE_CLOSED_WITH_ERROR:
self._gcodeManager.printFailed(self._selectedFile["filename"])
self._gcodeManager.resumeAnalysis() # printing done, put those cpu cycles to good use
elif self._comm is not None and state == self._comm.STATE_PRINTING:
self._gcodeManager.pauseAnalysis() # do not analyse gcode while printing
self._setState(state)
def mcMessage(self, message):
"""
Callback method for the comm object, called upon message exchanges via serial.
Stores the message in the message buffer, truncates buffer to the last 300 lines.
"""
self._addMessage(message)
def mcProgress(self):
"""
Callback method for the comm object, called upon any change in progress of the printjob.
Triggers storage of new values for printTime, printTimeLeft and the current progress.
"""
self._setProgressData(self._comm.getPrintProgress(), self._comm.getPrintFilepos(), self._comm.getPrintTime(), self._comm.getPrintTimeRemainingEstimate())
def mcZChange(self, newZ):
"""
Callback method for the comm object, called upon change of the z-layer.
"""
oldZ = self._currentZ
if newZ != oldZ:
# we have to react to all z-changes, even those that might "go backward" due to a slicer's retraction or
# anti-backlash-routines. Event subscribes should individually take care to filter out "wrong" z-changes
eventManager().fire("ZChange", newZ)
self._setCurrentZ(newZ)
def mcSdStateChange(self, sdReady):
self._stateMonitor.setState({"state": self._state, "stateString": self.getStateString(), "flags": self._getStateFlags()})
def mcSdFiles(self, files):
self._sendTriggerUpdateCallbacks("gcodeFiles")
def mcFileSelected(self, filename, filesize, sd):
self._setJobData(filename, filesize, sd)
self._stateMonitor.setState({"state": self._state, "stateString": self.getStateString(), "flags": self._getStateFlags()})
if self._printAfterSelect:
self.startPrint()
def mcPrintjobDone(self):
self._setProgressData(1.0, self._selectedFile["filesize"], self._comm.getPrintTime(), 0)
self._stateMonitor.setState({"state": self._state, "stateString": self.getStateString(), "flags": self._getStateFlags()})
def mcFileTransferStarted(self, filename, filesize):
self._sdStreaming = True
self._setJobData(filename, filesize, True)
self._setProgressData(0.0, 0, 0, None)
self._stateMonitor.setState({"state": self._state, "stateString": self.getStateString(), "flags": self._getStateFlags()})
def mcFileTransferDone(self):
self._sdStreaming = False
self._setCurrentZ(None)
self._setJobData(None, None, None)
self._setProgressData(None, None, None, None)
self._stateMonitor.setState({"state": self._state, "stateString": self.getStateString(), "flags": self._getStateFlags()})
def mcReceivedRegisteredMessage(self, command, output):
self._sendFeedbackCommandOutput(command, output)
#~~ sd file handling
def getSdFiles(self):
if self._comm is None:
return
return self._comm.getSdFiles()
def addSdFile(self, filename, path):
if not self._comm or self._comm.isBusy():
return
self._comm.startFileTransfer(path, filename[:8].lower() + ".gco")
def deleteSdFile(self, filename):
if not self._comm:
return
self._comm.deleteSdFile(filename)
def initSdCard(self):
if not self._comm:
return
self._comm.initSdCard()
def releaseSdCard(self):
if not self._comm:
return
self._comm.releaseSdCard()
def refreshSdFiles(self):
if not self._comm:
return
self._comm.refreshSdFiles()
#~~ state reports
def getStateString(self):
"""
Returns a human readable string corresponding to the current communication state.
"""
if self._comm is None:
return "Offline"
else:
return self._comm.getStateString()
def getCurrentData(self):
return self._stateMonitor.getCurrentData()
def getCurrentJob(self):
currentData = self._stateMonitor.getCurrentData()
return currentData["job"]
def getCurrentTemperatures(self):
return {
"extruder": {
"current": self._temp,
"target": self._targetTemp
},
"bed": {
"current": self._bedTemp,
"target": self._targetBedTemp
}
}
def isClosedOrError(self):
return self._comm is None or self._comm.isClosedOrError()
def isOperational(self):
return self._comm is not None and self._comm.isOperational()
def isPrinting(self):
return self._comm is not None and self._comm.isPrinting()
def isPaused(self):
return self._comm is not None and self._comm.isPaused()
def isError(self):
return self._comm is not None and self._comm.isError()
def isReady(self):
return self.isOperational() and not self._comm.isStreaming()
def isLoading(self):
return self._gcodeLoader is not None
class GcodeLoader(threading.Thread):
"""
The GcodeLoader takes care of loading a gcode-File from disk and parsing it into a gcode object in a separate
thread while constantly notifying interested listeners about the current progress.
The progress is returned as a float value between 0 and 1 which is to be interpreted as the percentage of completion.
"""
def __init__(self, filename, progressCallback, loadedCallback):
threading.Thread.__init__(self)
self._progressCallback = progressCallback
self._loadedCallback = loadedCallback
self._filename = filename
self._gcodeList = None
def run(self):
#Send an initial M110 to reset the line counter to zero.
prevLineType = lineType = "CUSTOM"
gcodeList = ["M110 N0"]
filesize = os.stat(self._filename).st_size
with open(self._filename, "r") as file:
for line in file:
if line.startswith(";TYPE:"):
lineType = line[6:].strip()
if ";" in line:
line = line[0:line.find(";")]
line = line.strip()
if len(line) > 0:
if prevLineType != lineType:
gcodeList.append((line, lineType, ))
else:
gcodeList.append(line)
prevLineType = lineType
self._onLoadingProgress(float(file.tell()) / float(filesize))
self._gcodeList = gcodeList
self._loadedCallback(self._filename, self._gcodeList)
def _onLoadingProgress(self, progress):
self._progressCallback(self._filename, progress, "loading")
def _onParsingProgress(self, progress):
self._progressCallback(self._filename, progress, "parsing")
class SdFileStreamer(threading.Thread):
def __init__(self, comm, filename, file, progressCallback, finishCallback):
threading.Thread.__init__(self)
self._comm = comm
self._filename = filename
self._file = file
self._progressCallback = progressCallback
self._finishCallback = finishCallback
def run(self):
if self._comm.isBusy():
return
name = self._filename[:self._filename.rfind(".")]
sdFilename = name[:8].lower() + ".gco"
try:
size = os.stat(self._file).st_size
with open(self._file, "r") as f:
self._comm.startSdFileTransfer(sdFilename)
for line in f:
if ";" in line:
line = line[0:line.find(";")]
line = line.strip()
if len(line) > 0:
self._comm.sendCommand(line)
time.sleep(0.001) # do not send too fast
self._progressCallback(sdFilename, float(f.tell()) / float(size))
finally:
self._comm.endSdFileTransfer(sdFilename)
self._finishCallback(sdFilename)
class StateMonitor(object):
def __init__(self, ratelimit, updateCallback, addTemperatureCallback, addLogCallback, addMessageCallback):
self._ratelimit = ratelimit
self._updateCallback = updateCallback
self._addTemperatureCallback = addTemperatureCallback
self._addLogCallback = addLogCallback
self._addMessageCallback = addMessageCallback
self._state = None
self._jobData = None
self._gcodeData = None
self._sdUploadData = None
self._currentZ = None
self._progress = None
self._changeEvent = threading.Event()
self._lastUpdate = time.time()
self._worker = threading.Thread(target=self._work)
self._worker.daemon = True
self._worker.start()
def reset(self, state=None, jobData=None, progress=None, currentZ=None):
self.setState(state)
self.setJobData(jobData)
self.setProgress(progress)
self.setCurrentZ(currentZ)
def addTemperature(self, temperature):
self._addTemperatureCallback(temperature)
self._changeEvent.set()
def addLog(self, log):
self._addLogCallback(log)
self._changeEvent.set()
def addMessage(self, message):
self._addMessageCallback(message)
self._changeEvent.set()
def setCurrentZ(self, currentZ):
self._currentZ = currentZ
self._changeEvent.set()
def setState(self, state):
self._state = state
self._changeEvent.set()
def setJobData(self, jobData):
self._jobData = jobData
self._changeEvent.set()
def setProgress(self, progress):
self._progress = progress
self._changeEvent.set()
def _work(self):
while True:
self._changeEvent.wait()
now = time.time()
delta = now - self._lastUpdate
additionalWaitTime = self._ratelimit - delta
if additionalWaitTime > 0:
time.sleep(additionalWaitTime)
data = self.getCurrentData()
self._updateCallback(data)
self._lastUpdate = time.time()
self._changeEvent.clear()
def getCurrentData(self):
return {
"state": self._state,
"job": self._jobData,
"currentZ": self._currentZ,
"progress": self._progress
}
| agpl-3.0 | 2,885,922,597,023,972,000 | 28.379509 | 189 | 0.712525 | false |
nimasmi/wagtail | wagtail/core/blocks/struct_block.py | 1 | 8310 | import collections
from django import forms
from django.core.exceptions import ValidationError
from django.forms.utils import ErrorList
from django.template.loader import render_to_string
from django.utils.functional import cached_property
from django.utils.html import format_html, format_html_join
from django.utils.safestring import mark_safe
from wagtail.admin.staticfiles import versioned_static
from .base import Block, DeclarativeSubBlocksMetaclass
from .utils import js_dict
__all__ = ['BaseStructBlock', 'StructBlock', 'StructValue']
class StructValue(collections.OrderedDict):
""" A class that generates a StructBlock value from provded sub-blocks """
def __init__(self, block, *args):
super().__init__(*args)
self.block = block
def __html__(self):
return self.block.render(self)
def render_as_block(self, context=None):
return self.block.render(self, context=context)
@cached_property
def bound_blocks(self):
return collections.OrderedDict([
(name, block.bind(self.get(name)))
for name, block in self.block.child_blocks.items()
])
class BaseStructBlock(Block):
def __init__(self, local_blocks=None, **kwargs):
self._constructor_kwargs = kwargs
super().__init__(**kwargs)
# create a local (shallow) copy of base_blocks so that it can be supplemented by local_blocks
self.child_blocks = self.base_blocks.copy()
if local_blocks:
for name, block in local_blocks:
block.set_name(name)
self.child_blocks[name] = block
self.child_js_initializers = {}
for name, block in self.child_blocks.items():
js_initializer = block.js_initializer()
if js_initializer is not None:
self.child_js_initializers[name] = js_initializer
self.dependencies = self.child_blocks.values()
def get_default(self):
"""
Any default value passed in the constructor or self.meta is going to be a dict
rather than a StructValue; for consistency, we need to convert it to a StructValue
for StructBlock to work with
"""
return self._to_struct_value(self.meta.default.items())
def js_initializer(self):
# skip JS setup entirely if no children have js_initializers
if not self.child_js_initializers:
return None
return "StructBlock(%s)" % js_dict(self.child_js_initializers)
@property
def media(self):
return forms.Media(js=[versioned_static('wagtailadmin/js/blocks/struct.js')])
def get_form_context(self, value, prefix='', errors=None):
if errors:
if len(errors) > 1:
# We rely on StructBlock.clean throwing a single ValidationError with a specially crafted
# 'params' attribute that we can pull apart and distribute to the child blocks
raise TypeError('StructBlock.render_form unexpectedly received multiple errors')
error_dict = errors.as_data()[0].params
else:
error_dict = {}
bound_child_blocks = collections.OrderedDict([
(
name,
block.bind(value.get(name, block.get_default()),
prefix="%s-%s" % (prefix, name), errors=error_dict.get(name))
)
for name, block in self.child_blocks.items()
])
return {
'children': bound_child_blocks,
'help_text': getattr(self.meta, 'help_text', None),
'classname': self.meta.form_classname,
'block_definition': self,
'prefix': prefix,
}
def render_form(self, value, prefix='', errors=None):
context = self.get_form_context(value, prefix=prefix, errors=errors)
return mark_safe(render_to_string(self.meta.form_template, context))
def value_from_datadict(self, data, files, prefix):
return self._to_struct_value([
(name, block.value_from_datadict(data, files, '%s-%s' % (prefix, name)))
for name, block in self.child_blocks.items()
])
def value_omitted_from_data(self, data, files, prefix):
return all(
block.value_omitted_from_data(data, files, '%s-%s' % (prefix, name))
for name, block in self.child_blocks.items()
)
def clean(self, value):
result = [] # build up a list of (name, value) tuples to be passed to the StructValue constructor
errors = {}
for name, val in value.items():
try:
result.append((name, self.child_blocks[name].clean(val)))
except ValidationError as e:
errors[name] = ErrorList([e])
if errors:
# The message here is arbitrary - StructBlock.render_form will suppress it
# and delegate the errors contained in the 'params' dict to the child blocks instead
raise ValidationError('Validation error in StructBlock', params=errors)
return self._to_struct_value(result)
def to_python(self, value):
""" Recursively call to_python on children and return as a StructValue """
return self._to_struct_value([
(
name,
(child_block.to_python(value[name]) if name in value else child_block.get_default())
# NB the result of get_default is NOT passed through to_python, as it's expected
# to be in the block's native type already
)
for name, child_block in self.child_blocks.items()
])
def _to_struct_value(self, block_items):
""" Return a Structvalue representation of the sub-blocks in this block """
return self.meta.value_class(self, block_items)
def get_prep_value(self, value):
""" Recursively call get_prep_value on children and return as a plain dict """
return dict([
(name, self.child_blocks[name].get_prep_value(val))
for name, val in value.items()
])
def get_api_representation(self, value, context=None):
""" Recursively call get_api_representation on children and return as a plain dict """
return dict([
(name, self.child_blocks[name].get_api_representation(val, context=context))
for name, val in value.items()
])
def get_searchable_content(self, value):
content = []
for name, block in self.child_blocks.items():
content.extend(block.get_searchable_content(value.get(name, block.get_default())))
return content
def deconstruct(self):
"""
Always deconstruct StructBlock instances as if they were plain StructBlocks with all of the
field definitions passed to the constructor - even if in reality this is a subclass of StructBlock
with the fields defined declaratively, or some combination of the two.
This ensures that the field definitions get frozen into migrations, rather than leaving a reference
to a custom subclass in the user's models.py that may or may not stick around.
"""
path = 'wagtail.core.blocks.StructBlock'
args = [list(self.child_blocks.items())]
kwargs = self._constructor_kwargs
return (path, args, kwargs)
def check(self, **kwargs):
errors = super().check(**kwargs)
for name, child_block in self.child_blocks.items():
errors.extend(child_block.check(**kwargs))
errors.extend(child_block._check_name(**kwargs))
return errors
def render_basic(self, value, context=None):
return format_html('<dl>\n{}\n</dl>', format_html_join(
'\n', ' <dt>{}</dt>\n <dd>{}</dd>', value.items()))
class Meta:
default = {}
form_classname = 'struct-block'
form_template = 'wagtailadmin/block_forms/struct.html'
value_class = StructValue
# No icon specified here, because that depends on the purpose that the
# block is being used for. Feel encouraged to specify an icon in your
# descendant block type
icon = "placeholder"
class StructBlock(BaseStructBlock, metaclass=DeclarativeSubBlocksMetaclass):
pass
| bsd-3-clause | 7,582,453,976,146,293,000 | 37.472222 | 107 | 0.622262 | false |
ypid/series60-remote | pc/devices/status_numbers.py | 1 | 2071 | # -*- coding: utf-8 -*-
# Copyright (c) 2008 - 2010 Lukas Hetzenecker <[email protected]>
NUM_CONNECTED = 100
NUM_HELLO_REQUEST = 110
NUM_HELLO_REPLY = 111
NUM_QUIT = 120
NUM_PARTIAL_MESSAGE = 130
NUM_CONTACTS_REQUEST_HASH_ALL = 200
NUM_CONTACTS_REQUEST_HASH_SINGLE= 201
NUM_CONTACTS_REQUEST_CONTACT = 204
NUM_CONTACTS_REQUEST_CONTACTS_ALL = 205
NUM_CONTACTS_REPLY_HASH_ALL= 210
NUM_CONTACTS_REPLY_HASH_SINGLE_START= 211
NUM_CONTACTS_REPLY_HASH_SINGLE_LINE= 212
NUM_CONTACTS_REPLY_HASH_SINGLE_END= 213
NUM_CONTACTS_REPLY_CONTACT_START = 220
NUM_CONTACTS_REPLY_CONTACT_LINE = 221
NUM_CONTACTS_REPLY_CONTACT_END = 222
NUM_CONTACTS_REPLY_CONTACTS_ALL_END = 223
NUM_CONTACTS_ADD = 230
NUM_CONTACTS_ADD_REPLY_ID = 231
NUM_CONTACTS_DELETE = 232
NUM_CONTACTS_CHANGE_ADDFIELD = 233
NUM_CONTACTS_CHANGE_REMOVEFIELD = 234
NUM_SYSINFO_REQUEST = 250
NUM_SYSINFO_REPLY_START = 260
NUM_SYSINFO_REPLY_LINE = 261
NUM_SYSINFO_REPLY_END = 262
NUM_MESSAGE_SEND_REQUEST = 300
NUM_MESSAGE_SEND_REPLY_OK = 301
NUM_MESSAGE_SEND_REPLY_STATUS = 302
NUM_MESSAGE_SEND_REPLY_FAILURE = 303
NUM_MESSAGE_SEND_REPLY_RETRY = 304
NUM_SET_READ = 320
NUM_MESSAGE_NEW = 350
NUM_MESSAGE_REQUEST = 351
NUM_MESSAGE_REPLY_LINE = 352
NUM_MESSAGE_REPLY_END = 353
NUM_MESSAGE_REQUEST_UNREAD = 370
NUM_MESSAGE_REPLY_UNREAD = 371
NUM_CALENDAR_REQUEST_HASH_ALL = 380
#NUM_CALENDAR_REQUEST_HASH_SINGLE = 381
NUM_CALENDAR_REQUEST_ENTRY = 382
NUM_CALENDAR_REQUEST_ENTRIES_ALL = 383
NUM_CALENDAR_REPLY_HASH_ALL= 384
#NUM_CALENDAR_REPLY_HASH_SINGLE_START= 385
#NUM_CALENDAR_REPLY_HASH_SINGLE_LINE= 386
#NUM_CALENDAR_REPLY_HASH_SINGLE_END= 387
NUM_CALENDAR_REPLY_ENTRIES_START = 388
NUM_CALENDAR_REPLY_ENTRY = 389
NUM_CALENDAR_REPLY_ENTRIES_END = 390
NUM_CALENDAR_ENTRY_ADD = 395
NUM_CALENDAR_ENTRY_ADD_REPLY = 396
NUM_CALENDAR_ENTRY_DELETE = 397
NUM_CALENDAR_ENTRY_CHANGE = 398
NUM_CALENDAR_ENTRY_CHANGE_REPLY_TIME = 399
NUM_INCOMING_CALL = 400
NUM_DEBUG = 999
NUM_END_HEADER = chr(0x02) # Start of Text
NUM_SEPERATOR = chr(0x1E) # Record Separator
NUM_END_TEXT = chr(0x03) # End of Text
PROTOCOL_VERSION = 1.5
| gpl-2.0 | 2,607,002,893,510,730,000 | 26.986486 | 59 | 0.759536 | false |
tspus/python-matchingPursuit | src/utils.py | 1 | 5766 | #!/usr/bin/env python
#-*- coding: utf-8 -*-
'''
# This file is part of Matching Pursuit Python program (python-MP).
#
# python-MP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# python-MP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with python-MP. If not, see <http://www.gnu.org/licenses/>.
author: Tomasz Spustek
e-mail: [email protected]
University of Warsaw, July 06, 2015
'''
from __future__ import division
import numpy as np
from scipy.io import savemat
from src.processing import calculateMP
def saveBookAsMat(book , data , config , nameOfFile):
# matrix2save = np.zeros([data.shape[0],data.shape[1],config['maxNumberOfIterations']] , dtype='complex') # trials x channels x iterations
results = {}
for indTrial in np.arange(data.shape[0]):
for indChannel in np.arange(data.shape[1]):
partialBook = book[indTrial,indChannel]
nameOfStruct = 'trial_' + str(indTrial) + 'channel_' + str(indChannel)
results[nameOfStruct] = {col_name : partialBook[col_name].values for col_name in partialBook.columns.values}
savemat(nameOfFile , results)
return 'ok'
def generateFinalConfig(dictionaryConfig , dataInfo , algorithmConfig):
flags = {}
flags['useAsymA'] = dictionaryConfig['useAsym']
flags['useRectA'] = dictionaryConfig['useRect']
flags['useGradientOptimization'] = algorithmConfig['useGradient']
flags['displayInfo'] = algorithmConfig['displayInfo']
config = {}
config['flags'] = flags
config['algorithm'] = algorithmConfig['algorithmType']
config['minS'] = dictionaryConfig['minS_samples']
config['maxS'] = dictionaryConfig['maxS_samples']
config['density'] = dictionaryConfig['dictionaryDensity']
config['maxNumberOfIterations'] = algorithmConfig['iterationsLimit']
config['minEnergyExplained'] = algorithmConfig['energyLimit']
config['samplingFrequency'] = dataInfo['samplingFreq']
config['minNFFT'] = algorithmConfig['nfft']
config['channels2calc'] = algorithmConfig['channelsRange']
config['trials2calc'] = algorithmConfig['trialsRange']
return config
def retranslateDictionaryConfig(dictionaryConfig):
config = {}
flags = {}
flags['useAsymA'] = dictionaryConfig['useAsym']
flags['useRectA'] = dictionaryConfig['useRect']
config['flags'] = flags
config['minS'] = dictionaryConfig['minS_samples']
config['maxS'] = dictionaryConfig['maxS_samples']
config['density'] = dictionaryConfig['dictionaryDensity']
return config
def generateRangeFromString(text):
text = text.replace(' ' , '')
text = text.replace(',' , ' ')
text = text.split()
finalRange = []
iterator = 0
for element in text:
f1 = element.find(':')
f2 = element.find('-')
f3 = element.find(';')
if f1 != -1:
start = int(element[0:f1])
end = int(element[f1+1:len(element)])+1
for number in range(start , end):
finalRange.append(number)
elif f2 != -1:
start = int(element[0:f2])
end = int(element[f2+1:len(element)])+1
for number in range(start , end):
finalRange.append(number)
elif f3 != -1:
start = int(element[0:f3])
end = int(element[f3+1:len(element)])+1
for number in range(start , end):
finalRange.append(number)
else:
finalRange.append(int(element))
finalRange = np.array(finalRange)
finalRange.sort()
finalRange = np.unique(finalRange)
return finalRange
def determineAlgorithmConfig(dataInfo):
config = {}
config['algorithmType'] = 'smp'
config['useGradient'] = 1
config['displayInfo'] = 0
config['nfft'] = 1 << (int(dataInfo['samplingFreq'])-1).bit_length()
config['energyLimit'] = 0.99
config['iterationsLimit'] = 20
config['channels2calc'] = '1:' + str(dataInfo['numberOfChannels'])
config['channelsRange'] = generateRangeFromString(config['channels2calc'])
config['trials2calc'] = '1:' + str(dataInfo['numberOfTrials'])
config['trialsRange'] = generateRangeFromString(config['trials2calc'])
return config
def determineDictionaryConfig(dictionaryConfig , energyLimit , dataInfo):
density = 1.0 - energyLimit
if dictionaryConfig == {}:
dictionaryConfig['useAsym'] = 0
dictionaryConfig['useRect'] = 0
dictionaryConfig['minS_samples'] = int((dataInfo['numberOfSeconds']/16)*dataInfo['samplingFreq'])
dictionaryConfig['minS_seconds'] = float(dataInfo['numberOfSeconds']/16)
dictionaryConfig['maxS_samples'] = int(dataInfo['numberOfSamples'])
dictionaryConfig['maxS_seconds'] = float(dataInfo['numberOfSeconds'])
dictionaryConfig['dictionaryDensity'] = density
else:
if dataInfo['numberOfSamples'] > dictionaryConfig['maxS_samples']:
dictionaryConfig['maxS_samples'] = int(dataInfo['numberOfSamples'])
dictionaryConfig['maxS_seconds'] = float(dataInfo['numberOfSamples'] / dataInfo['samplingFreq'])
if (dataInfo['numberOfSeconds']/8)*dataInfo['samplingFreq'] < dictionaryConfig['minS_samples']:
dictionaryConfig['minS_samples'] = int((dataInfo['numberOfSeconds']/16)*dataInfo['samplingFreq'])
dictionaryConfig['minS_seconds'] = float(dataInfo['numberOfSeconds']/16)
if dictionaryConfig['dictionaryDensity'] > density:
dictionaryConfig['dictionaryDensity'] = density
return dictionaryConfig
| gpl-3.0 | -267,186,999,534,640,670 | 36.441558 | 139 | 0.692508 | false |
ngmiller/mipsy | mipsy/encoder.py | 1 | 8100 | """
mipsy.encoder
Instruction encoder.
See README.md for usage and general information.
"""
# system imports
import bitstring
# application imports
from mipsy.arch import MIPS
from mipsy.util import LabelCache, ParseInfo
class Encoder(object):
"""
Responsible for encoding individual instructions and querying the label cache.
"""
class tokenizer(object):
"""
Defines a 'list' of tokenizing functions used for varying instructions.
Each 'tokenizer' returns a dictionary mapping the specified operands to their tokens
from the instruction data (the portion of the instruction following the operation)
instruction = (operation) (instruction_data) <-- here, we're only concerned with instruction_data
"""
def map_operands(self, to_split, operands):
"""
Helper method.
Maps operands to the preprocessed instruction data string.
"""
operand_values = to_split.split()
if len(operands) != len(operand_values):
raise RuntimeError('instruction contains too many operands')
operand_map = {}
for i in range(len(operands)):
operand_map[operands[i]] = operand_values[i]
return operand_map
def RI_type(self, operands, instruction_data):
"""
The RI_type tokenizer takes instructions with the format:
(operation) [(operand1), (operand2), (operand3)]
"""
to_split = instruction_data.replace(',', ' ')
return self.map_operands(to_split, operands)
def J_type(self, operands, instruction_data):
"""
The J_type tokenizer takes jump (j, jal, jr) instructions
with the format:
(operation) [operand]
"""
return self.map_operands(instruction_data, operands)
def load_store(self, operands, instruction_data):
"""
The load_store tokenizer takes instructions with the format:
(operation) [operand1, (operand2)(operand3)]
"""
# Clear out commas and the parenthesis surrounding the base register
to_split = instruction_data.replace(',', ' ').replace('(', ' ').replace(')', ' ')
return self.map_operands(to_split, operands)
def nop(self, operands, instruction_data):
"""
The nop tokenizer simply maps all the given operands to register $zero.
"""
return {operand: '$zero' for operand in operands}
# The assembler operation table defines the parsing rules
# for a given instruction. The parsing rules are used to
# map tokens in the instruction string to register address
# and immediate value positions. (rs, rt, rd, etc)
t = tokenizer()
operations = {
'nop' : ParseInfo(['rd', 'rs', 'rt'], t.nop),
'add' : ParseInfo(['rd', 'rs', 'rt'], t.RI_type),
'addi' : ParseInfo(['rt', 'rs', 'imm'], t.RI_type),
'and' : ParseInfo(['rd', 'rs', 'rt'], t.RI_type),
'beq' : ParseInfo(['rs', 'rt', 'label'], t.RI_type),
'j' : ParseInfo(['label'], t.J_type),
'jal' : ParseInfo(['label'], t.J_type),
'jr' : ParseInfo(['rs'], t.RI_type),
'lw' : ParseInfo(['rt', 'imm', 'rs'], t.load_store),
'or' : ParseInfo(['rd', 'rs', 'rt'], t.RI_type),
'slt' : ParseInfo(['rd', 'rs', 'rt'], t.RI_type),
'sll' : ParseInfo(['rd', 'rt', 'shamt'], t.RI_type),
'sw' : ParseInfo(['rt', 'imm', 'rs'], t.load_store),
'sub' : ParseInfo(['rd', 'rs', 'rt'], t.RI_type),
# TODO ...
}
def __init__(self):
# ISA definitions
self.mips = MIPS()
# Label resolution cache
self.label_cache = LabelCache()
def encode_instruction(self, pc, instr):
"""
Given an instruction string, generate the encoded bit string.
PC (instruction index is used for branch label resolution)
"""
data = instr.split()
operation = data[0]
try:
mips_op_info = MIPS.operations[operation]
except KeyError, e:
raise RuntimeError('Unknown operation: {}'.format(operation))
# Grab the parsing info from the assembler operations table
# Generate the initial operand map using the specified tokenizer
parse_info = self.operations[operation]
encoding_map = parse_info.tokenizer(parse_info.tokens, ''.join(data[1:]))
# Get the binary equivalents of the operands and MIPS operation information
self.resolve_operands(encoding_map, operation, pc)
# Pull MIPS operation info into encoding map
self.resolve_operation_info(encoding_map, mips_op_info)
instruction = self.mips.generate_instruction(mips_op_info.format)
return instruction.encode(encoding_map)
def resolve_operation_info(self, encoding_map, mips_op_info):
"""
Adds the predefined operation info (opcode, funct) to the current encoding map.
"""
encoding_map['opcode'] = mips_op_info.opcode
encoding_map['funct'] = mips_op_info.funct
def resolve_operands(self, encoding_map, operation, pc):
"""
Converts generic register references (such as $t0, $t1, etc), immediate values, and jump addresses
to their binary equivalents.
"""
convert = Encoder.to_binary
branch_replace = False
jump_replace = False
for operand, value in encoding_map.iteritems():
if (operand == 'rs' or operand == 'rt' or operand == 'rd'):
encoding_map[operand] = MIPS.registers[value]
elif (operand == 'imm'):
encoding_map[operand] = convert(int(value), MIPS.IMMEDIATE_SIZE)
elif (operand == 'addr'):
encoding_map[operand] = convert(int(value), MIPS.ADDRESS_SIZE)
elif (operand == 'shamt'):
encoding_map[operand] = convert(int(value), MIPS.SHAMT_SIZE)
elif (operand == 'label'):
label = encoding_map[operand]
hit, index = self.label_cache.query(label)
if not hit:
raise RuntimeError('No address found for label: {}'.format(label))
if ((operation == 'beq') or (operation == 'bne')):
# Calculate the relative instruction offset. The MIPS ISA uses
# PC + 4 + (branch offset) to resolve branch targets.
if index > pc:
encoding_map[operand] = convert(index - pc - 1, MIPS.IMMEDIATE_SIZE)
elif index < pc:
encoding_map[operand] = convert((pc + 1) - index, MIPS.IMMEDIATE_SIZE)
else:
# Not sure why a branch would resolve to itself, but ok
# (PC + 4) - 4 =
encoding_map[operand] = convert(-1, MIPS.IMMEDIATE_SIZE)
branch_replace = True
elif ((operation == 'j') or (operation == 'jal')):
# Jump addresses are absolute
encoding_map[operand] = convert(index, MIPS.ADDRESS_SIZE)
jump_replace = True
# Need to convert references to 'label' back to references the instruction
# encoding string recognizes, otherwise we end up with the default value (zero)
# This doesn't feel very clean, but working on a fix.
if branch_replace:
encoding_map['imm'] = encoding_map['label']
elif jump_replace:
encoding_map['addr'] = encoding_map['label']
@staticmethod
def to_binary(decimal, length):
"""
Given a decimal, generate the binary equivalent string of
given length.
e.g. binary(2, 5) = 00010
"""
b = bitstring.Bits(int=decimal, length=length)
return b.bin
| mit | -3,993,751,590,257,310,700 | 38.512195 | 106 | 0.564691 | false |
Akson/RemoteConsolePlus3 | RemoteConsolePlus3/RCP3/Backends/Processors/Graphs/Plot1D.py | 1 | 2341 | #Created by Dmytro Konobrytskyi, 2014 (github.com/Akson)
import numpy as np
import matplotlib
import matplotlib.pyplot
from RCP3.Infrastructure import TmpFilesStorage
class Backend(object):
def __init__(self, parentNode):
self._parentNode = parentNode
def Delete(self):
"""
This method is called when a parent node is deleted.
"""
pass
def GetParameters(self):
"""
Returns a dictionary with object parameters, their values,
limits and ways to change them.
"""
return {}
def SetParameters(self, parameters):
"""
Gets a dictionary with parameter values and
update object parameters accordingly
"""
pass
def ProcessMessage(self, message):
"""
This message is called when a new message comes.
If an incoming message should be processed by following nodes, the
'self._parentNode.SendMessage(message)'
should be called with an appropriate message.
"""
dataArray = np.asarray(message["Data"])
fig = matplotlib.pyplot.figure(figsize=(6, 4), dpi=float(96))
ax=fig.add_subplot(111)
#n, bins, patches = ax.hist(dataArray, bins=50)
ax.plot(range(len(dataArray)), dataArray)
processedMessage = {"Stream":message["Stream"], "Info":message["Info"]}
filePath, link = TmpFilesStorage.NewTemporaryFile("png")
fig.savefig(filePath,format='png')
matplotlib.pyplot.close(fig)
html = '<img src="http://{}" alt="Image should come here">'.format(link)
processedMessage["Data"] = html
self._parentNode.SendMessage(processedMessage)
"""
print len(message["Data"])
import numpy as np
import matplotlib.pyplot as plt
x = np.array(message["Data"])
num_bins = 50
# the histogram of the data
n, bins, patches = plt.hist(x, num_bins, normed=1, facecolor='green', alpha=0.5)
plt.subplots_adjust(left=0.15)
plt.show()
"""
def AppendContextMenuItems(self, menu):
"""
Append backend specific menu items to a context menu that user will see
when he clicks on a node.
"""
pass | lgpl-3.0 | -487,449,994,099,500,860 | 29.415584 | 88 | 0.5912 | false |
BarusXXX/K-Tree | TreeLogic.py | 1 | 3884 | import os
from copy import deepcopy
class RecursiveTree:
def __init__(self, dir_name):
self.dir_name = dir_name
self.files = []
self.folders = [] #Tuple Absolute address, branch, level
self.branches = []
self.children_n = []
self.currentlevel = 0
self.level=[] #len(self.branches)
self.level.append(0)
self.folder_n = len(self.folders)
self.parentIndex = []
self.parentbranch = []
self.iterator = 0
self.reversead = 0
self.parentIndex.append(None)
self.branches.append([0])
self.folders.append((dir_name, "{0}", 0))
RecursiveTree.get_immediate_subdirectories(self, self.dir_name, 0)
self.level_max = max(self.level)
def Branch(self):
pass
def PrintTree(self):
print("#Folders#")
for x in self.folders:
print(x)
print("#Branches#")
for x in self.branches:
print(x)
print("#Parent Branches#")
for x in self.parentbranch:
print(x)
print("#Files#")
for x in self.files:
print(x)
def subdir(self):
return self.folders
def filedir(self):
return self.files
def sortedbranches(self):
STree = []
CountX = 0
for x in self.branches:
STree.append([])
for y in x:
STree[CountX].append(int(y))
CountX += 1
SSum = []
CountX = 0
TTree = deepcopy(STree)
for x in TTree:
CountY = 0
for y in x:
TTree[CountX][CountY] = y + 1
CountY += 1
CountX += 1
SSum.append(sum(x))
SortedTree = [x for y, x in sorted(list(zip(SSum, STree)))]
def get_immediate_subdirectories(self, a_dir, curadd):
nextadd = 0
relocator = 0
cancleNo = self.reversead
for name in os.listdir(a_dir):
if os.path.isdir(os.path.join(a_dir, name)):
curaddstr = str(curadd) + ";" + str(nextadd)
relocator += 1
self.iterator += 1
self.currentlevel += 1
ContainsSub = False
ContainsNo = 0
for x in os.listdir(a_dir + "/" + name):
if os.path.isdir(a_dir + "/" + name + "/" + x):
ContainsSub = True
ContainsNo += 1
self.children_n.append(ContainsNo)
PathConstructor = "{" + str(curadd) + ";" + str(nextadd) + "}" + ":" + os.path.join(a_dir, name)
AbsAddressConstructor = (PathConstructor.split(":")[1]), (PathConstructor.split(":")[2])
self.folders.append((":".join(AbsAddressConstructor), PathConstructor.split(":")[0], self.currentlevel))
self.branches.append((((((PathConstructor.split(":")[0]).split("{")[1])).split("}")[0]).split(";")))
self.parentbranch.append(str(curadd).split(";"))
self.level.append(self.currentlevel)
self.parentIndex.append(self.iterator - relocator - self.reversead + cancleNo) #Cannot negate 1
RecursiveTree.get_immediate_subdirectories(self, (a_dir + "/" + name), curaddstr)
self.currentlevel -= 1
if ContainsSub == True:
self.reversead += ContainsNo
nextadd += 1
else:
self.files.append((self.iterator - relocator - self.reversead + cancleNo, os.path.join(a_dir, name))) #index of parent, direct links to file
#print("file found:", self.iterator - relocator - self.reversead + cancleNo, name)
#print("{"+str(curadd) + ";" + str(nextadd) + "}" + ":" + os.path.join(a_dir, name))
| mit | 4,737,420,698,815,880,000 | 29.582677 | 156 | 0.511843 | false |
ndparker/wolfe | wolfe/scheduler/_job_queue.py | 1 | 4458 | # -*- coding: ascii -*-
r"""
:Copyright:
Copyright 2014 - 2016
Andr\xe9 Malo or his licensors, as applicable
:License:
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
===========
Job Queue
===========
Job Queue. The queue is implemented as priority queue using a heap.
"""
if __doc__: # pragma: no cover
# pylint: disable = redefined-builtin
__doc__ = __doc__.encode('ascii').decode('unicode_escape')
__author__ = r"Andr\xe9 Malo".encode('ascii').decode('unicode_escape')
__docformat__ = "restructuredtext en"
import heapq as _heapq
class JobQueue(object):
"""
Job queue
This container utilizes a heap structure to implement a more or less
generic priority queue (see below). The sorting order of the items is
defined by a wrapper class passed to the constructor.
The queue is made for jobs. That's why wrapper classes have to provide a
job attribute for unwrapping and items passed into the queue are expected
to provide a valid ``id`` attribute.
Additionally the queue implements boolean operations (it's false if it's
empty) and a __contains__ operation based on job IDs.
>>> class Wrapper(object):
... def __init__(self, job):
... self.job = job
... def __lt__(self, other):
... return self.job.id > other.job.id
>>> class Job(object):
... def __init__(self, job_id):
... self.id = job_id
>>> queue = JobQueue(Wrapper)
>>> queue.put(Job(2))
>>> bool(queue)
True
>>> 1 in queue
False
>>> 2 in queue
True
>>> len(queue)
1
:IVariables:
`_queue` : ``list``
actual heap containing wrapped jobs
`_wrapper` : callable
Wrapper class factory
`_ids` : ``set``
Set of job IDs currently queued
"""
def __init__(self, wrapper_class):
"""
Initialization
:Parameters:
`wrapper_class` : any
class factory expected to take a job and represent it inside the
queue. The object should be comparable with other instances
(``__lt__`` is the proper method) and should provide a ``job``
attribute pointing to the original object.
"""
self._queue = []
self._wrapper = wrapper_class
self._ids = set()
def __nonzero__(self):
"""
Return false if the queue is empty, true otherwise
:Return: Is there something in the queue?
:Rtype: ``bool``
"""
return bool(self._queue)
def __contains__(self, job_id):
"""
Check if the passed job_id is currently enqueued
:Return: Is it?
:Rtype: ``bool``
"""
return job_id in self._ids
def __len__(self):
""" Find queue length """
return len(self._queue)
def __iter__(self):
""" Iterate over the queue until it's exhausted """
try:
while True:
yield self.get()
except IndexError:
pass
def put(self, job):
"""
Put a job into the queue
:Parameters:
`job` : any
The job to put in. The object must have an ``id`` attribute,
which must be hashable.
"""
self._ids.add(job.id)
_heapq.heappush(self._queue, self._wrapper(job))
def get(self):
"""
Get the next job from the queue
:Return: A job
:Rtype: any
:Exceptions:
- `IndexError` : Queue was empty
"""
job = _heapq.heappop(self._queue).job
self._ids.remove(job.id)
return job
def peek(self):
"""
Return the next job without removing it from the queue
The job will still be wrapped in the wrapper_class container
:Return: wrapped job
:Rtype: any
:Exceptions:
- `IndexError` : Queue was empty
"""
return self._queue[0]
| apache-2.0 | -4,326,341,695,374,241,300 | 25.855422 | 77 | 0.580978 | false |
bvanrijn/debianpaste-clients | old-paste.py | 1 | 7602 | #!/usr/bin/python
# Filename: paste
# Purpose: XmlRpc interface client to paste.debian.net
# Author: Copyright (C) 2007-2011 Michael Gebetsroither <[email protected]>
# License: This file is licensed under the GPL v2+. Full license text in LICENSE
# Modified original: No modifications have been made
#
# This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
# You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
################################################################################
import sys
import xmlrpclib
import optparse
import inspect
import getpass
# program defaults
DEFAULT_SERVER='http://paste.debian.net/server.pl'
class ActionFailedException(Exception):
'''Thrown if server returned an error'''
def __init__(self, errormsg, ret):
Exception.__init__(self, errormsg, ret)
def what(self):
'''Get errormessage'''
return self.args[0]
def dwhat(self):
'''Get more verbose errormessage'''
return self.args[1]
class Action(object):
def __init__(self, args, opts):
self.args_ = args
self.opts_ = opts
def _createProxy(self):
return xmlrpclib.ServerProxy(self.opts_.server, verbose=False)
def _callProxy(self, functor, server=None):
'''Wrapper for xml-rpc calls to server which throws an
ActionFailedException on error'''
if server is None:
server = self._createProxy()
ret = functor(server)
if ret['rc'] != 0:
raise ActionFailedException(ret['statusmessage'], ret)
return ret
def call(self, method_name):
'''External Interface to call the appropriate action'''
return self.__getattribute__(method_name)()
def actionAddPaste(self):
'''Add paste to the server: <1.line> <2.line> ...
default Read paste from stdin.
[text] Every argument on the commandline will be interpreted as
a seperate line of paste.
'''
server = self._createProxy()
o = self.opts_
code = self.args_
if len(self.args_) == 0:
code = [ i.rstrip() for i in sys.stdin.readlines() ]
code = '\n'.join(code)
result = self._callProxy(lambda s: s.paste.addPaste(code, o.name, o.expire * 3600, o.lang, o.private),
server)
return (result['statusmessage'], result)
def actionDelPaste(self):
'''Delete paste from server: <digest>
<digest> Digest of paste you want to remove.
'''
digest = self.args_.pop(0)
result = self._callProxy(lambda s: s.paste.deletePaste(digest))
return (result['statusmessage'], result)
def actionGetPaste(self):
'''Get paste from server: <id>
<id> Id of paste you want to receive.
'''
id = self.args_.pop(0)
result = self._callProxy(lambda s: s.paste.getPaste(id))
return (result['code'], result)
def actionGetLangs(self):
'''Get supported language highlighting types from server'''
result = self._callProxy(lambda s: s.paste.getLanguages())
return ('\n'.join(result['langs']), result)
def actionAddShortUrl(self):
'''Add short-URL: <url>
<url> Short-URL to add
'''
url = self.args_.pop(0)
result = self._callProxy(lambda s: s.paste.addShortURL(url))
return (result['url'], result)
def actionGetShortUrl(self):
'''Resolve short-URL: <url>
<url> Short-URL to get clicks of
'''
url = self.args_.pop(0)
result = self._callProxy(lambda s: s.paste.resolveShortURL(url))
return (result['url'], result)
def actionGetShortUrlClicks(self):
'''Get clicks of short-URL: <url>
<url> Short-URL to get clicks of
'''
url = self.args_.pop(0)
result = self._callProxy(lambda s: s.paste.ShortURLClicks(url))
return (result['count'], result)
def actionHelp(self):
'''Print more verbose help about specific action: <action>
<action> Topic on which you need more verbose help.
'''
if len(self.args_) < 1:
alias = "help"
else:
alias = self.args_.pop(0)
if alias in actions:
fun = actions[alias]
print inspect.getdoc(self.__getattribute__(fun))
print "\naliase: " + " ".join([i for i in actions_r[fun] if i != alias])
else:
print "Error: No such command - %s" % (alias)
OPT_PARSER.print_usage()
sys.exit(0)
# actionAddPaste -> [add, a]
actions_r = {}
# add -> actionAddPaste
# a -> actionAddPaste
actions = {}
# option parser
OPT_PARSER = None
##
# MAIN
##
if __name__ == "__main__":
action_spec = ['actionAddPaste add a',
'actionDelPaste del d rm',
'actionGetPaste get g',
'actionGetLangs getlangs gl langs l',
'actionAddShortUrl addurl',
'actionGetShortUrl geturl',
'actionGetShortUrlClicks getclicks',
'actionHelp help']
for i in action_spec:
aliases = i.split()
cmd = aliases.pop(0)
actions_r[cmd] = aliases
for (k,v) in actions_r.items():
for i in v:
actions[i] = k
usage = "usage: %prog [options] ACTION <args>\n\n" +\
"actions:\n" +\
"\n".join(["%12s\t%s" % (v[0], inspect.getdoc(getattr(Action, k)).split('\n')[0]) \
for (k,v) in actions_r.items()])
running_user = getpass.getuser()
parser = optparse.OptionParser(usage=usage)
parser.add_option('-n', '--name', default=running_user, help="Name of poster")
parser.add_option('-e', '--expire', type=int, default=72, metavar='HOURS',
help='Time at wich paste should expire')
parser.add_option('-l', '--lang', default='Plain', help='Type of language to highlight')
parser.add_option("-p", "--private", action="count", dest="private", default=0,
help='Create hidden paste'),
parser.add_option('-s', '--server', default=DEFAULT_SERVER,
help='Paste server')
parser.add_option('-v', '--verbose', action='count', default=0, help='More output')
(opts, args) = parser.parse_args()
OPT_PARSER = parser
if len(args) == 0:
parser.error('Please provide me with an action')
elif args[0] in actions:
cmd = args.pop(0)
action = Action(args, opts)
try:
(msg, ret) = action.call(actions[cmd])
if opts.verbose == 0:
print msg
else:
print ret
except ActionFailedException, e:
sys.stderr.write('Server Error: %s\n' % e.what())
if opts.verbose >0:
print e.dwhat()
sys.exit(1)
else:
parser.error('Unknown action: %s' % args[0])
| gpl-2.0 | 4,928,760,378,934,636,000 | 35.373206 | 241 | 0.578269 | false |
wjakob/layerlab | recipes/coated-gold-with-scatmedium.py | 1 | 2082 | # Creates a rough gold layer with a rough dielectric coating containing an
# anisotropic scattering medium
import sys
sys.path.append('.')
from utils.materials import gold
from utils.cie import get_rgb
import layerlab as ll
eta_top = 1.5
# This step integrates the spectral IOR against the CIE XYZ curves to obtain
# equivalent sRGB values. This may seem fairly approximate but turns out to
# yield excellent agreement with spectral reference renders
print('Computing gold IOR parameters')
eta_bot = get_rgb(gold)
alpha_top = 0.1 # Beckmann roughness of top layer (coating)
alpha_bot = 0.1 # Beckmann roughness of bottom layer (gold)
# Medium parameters
g = 0.5 # Scattering anisotropy
albedo = [0.25, 0.0, 0.95] # Single scattering albedo
tau = 0.5 # Optical depth
# Construct quadrature scheme suitable for the material
n_top, m_top = ll.parameterHeuristicMicrofacet(eta=eta_top, alpha=alpha_top)
n_bot, m_bot = ll.parameterHeuristicMicrofacet(eta=eta_bot[0], alpha=alpha_bot)
n_med, m_med = ll.parameterHeuristicHG(g=g)
n = max(n_top, n_bot) # Max of zenith angle discretization
m = m_top # Number of Fourier orders determined by top layer
mu, w = ll.quad.gaussLobatto(n)
print("# of nodes = %i, fourier orders = %i" % (n, m))
# Construct coating layer
print("Creating coating layer")
coating = ll.Layer(mu, w, m)
coating.setMicrofacet(eta=eta_top, alpha=alpha_top)
output = []
for channel in range(3):
# Construct diffuse bottom layer for each channel
print("Creating metal layer")
l = ll.Layer(mu, w, m)
l.setMicrofacet(eta=eta_bot[channel], alpha=alpha_bot)
# Construct medium layer
print("Creating medium layer")
l2 = ll.Layer(mu, w, m)
l2.setHenyeyGreenstein(g=g, albedo=albedo[channel])
l2.expand(tau)
# Apply medium layer
print("Applying medium ..")
l.addToTop(l2)
# Apply coating
print("Applying coating..")
l.addToTop(coating)
output.append(l)
# .. and write to disk
print("Writing to disk..")
storage = ll.BSDFStorage.fromLayerRGB("output.bsdf", *output)
storage.close()
| bsd-2-clause | -3,367,170,747,667,034,600 | 29.617647 | 79 | 0.713737 | false |
plumer/codana | projectdata.py | 1 | 5358 | class VersionDataManager:
"""Manager of all the information of files and packages in a specific version
Attributes:
packages (list of str): List of packages name
files (list of str): List of all the files in the project
packagedict (dict): Map of packages(key) and filenames(value)
filebugnum (dict): Map of filename(key) and bug numbers(value)
fileattr (dict): Map of filename(key) and the attributes of the file(value)
packageattr (dict): Map of package(key) and the attributes of the package(value)
filedepends (list of tuple): List of all the edges in the dependence graph of all files
packagedepends (list of tuple) : List of all the edges in the dependence graph of all packages
"""
def __init__(self, version='6.0.0'):
self.packagedict = {}
self.fileattr = {}
self.files = []
self.filebugnum = {}
self.packageattr = {}
self.versionArray = []
datafile = open(r'tomcat_history/tomcat' + version + r'/tomcat_pack.txt', 'r')
for packs in datafile:
packslice = packs.strip(' \t\n').split('\t')
self.packagedict[packslice[0]] = []
self.packageattr[packslice[0]] = self.packPackageAttr(packslice[1:])
filenum = 0
if int(packslice[1]) == 0:
continue
for files in datafile:
fileattr = files.strip(' \t\n').split('\t')
if not fileattr[0] in self.packagedict[packslice[0]]:
self.files.append(fileattr[0])
self.packagedict[packslice[0]].append(fileattr[0])
self.fileattr[fileattr[0]] = self.packFileAttr(fileattr[1:])
filenum = filenum + 1
if filenum >= int(packslice[1]):
break
datafile.close()
datafile = open(r'tomcat_history/tomcat' + version + r'/log.txt', 'r')
for record in datafile:
recordslice = record.strip(' \t\n').split('\t')
self.filebugnum[recordslice[0]] = int(recordslice[1])
datafile.close()
self.packages = self.packagedict.keys()
self.packagedepends = []
packdependfile = open(r'tomcat_history/tomcat' + version + r'/tomcat_pack_depends.txt', 'r')
for e in packdependfile:
vertices = e.strip(' \t\n').split(' ')
self.packagedepends.append( (vertices[0], vertices[-1]) )
packdependfile.close()
self.filedepends = []
filedependfile = open(r'tomcat_history/tomcat' + version + r'/tomcat_depends.txt', 'r')
for e in filedependfile:
vertices = e.strip(' \t\n').split('\t')
self.filedepends.append( (vertices[0], vertices[-1]) )
filedependfile.close()
def packPackageAttr(self, attrs):
return {'filenum' : attrs[0],
'codelines' : attrs[1],
'cyclomatic' : attrs[2]}
def packFileAttr(self, attrs):
return {'codelines' : attrs[0],
'cyclomatic' : attrs[1]}
def listFileAttr(self):
return ('codelines', 'cyclomatic')
def listPackageAttr(self):
return ('filenum', 'codelines' , 'cyclomatic')
def getPackages(self):
return self.packages
def getFilenames(self):
return self.files
def getFilesOfPackage(self, package):
return self.packagedict[package]
def getPackageOfFile(self, filename):
return self.filedict[filename]
def getFileAttr(self, filename):
return self.fileattr[filename]
def getPackageAttr(self, package):
return self.packageattr[package]
def getFileDependence(self):
return self.filedepends
def getPackageDependence(self):
return self.packagedepends
def getFileDependenceOfPackage(self, package):
deplist = []
filelist = self.getFilesOfPackage(package)
for dep in self.filedepends:
if dep[0] in filelist and dep[1] in filelist:
deplist.append(dep)
return deplist
def getBugNumberOfFile(self, filename):
if filename in self.filebugnum:
return self.filebugnum[filename]
return 0
def getBugNumberOfPackage(self, package):
bugnum = 0
for filename in self.packagedict[package]:
if filename in self.filebugnum:
bugnum = bugnum + self.filebugnum[filename]
return bugnum
class DataManager:
'''Manage all the data in all versions
Attributes:
versionArray (list): List of all the versions
dataManages (dict): Map of the version(key) and the specified data manager(value)
'''
def __init__(self):
self.versionArray = []
datafile = open(r'tomcat_history/tomcat_list.txt', 'r')
for line in datafile:
self.versionArray.append(line.strip(' \n').strip('tomcat'))
datafile.close()
self.dataManages = {}
for version in self.versionArray:
self.dataManages[version] = VersionDataManager(version)
def getManager(self, version):
return self.dataManages[version]
def getVersionArray(self):
return self.versionArray
if __name__ == '__main__':
dm = DataManager()
dm.getFileDependenceOfPackage('apache.catalina')
| mit | -4,992,400,439,942,177,000 | 35.69863 | 102 | 0.601904 | false |
chutsu/robotics | prototype/models/two_wheel.py | 1 | 3500 | from math import cos
from math import sin
import numpy as np
import sympy
from sympy import pprint
def two_wheel_2d_model(x, u, dt):
"""Two wheel 2D motion model
Parameters
----------
x : np.array
Two Wheel model state vector (x, y, theta)
u : np.array
Input
dt : float
Time difference
Returns
-------
np.array (x, y, theta)
"""
gdot = np.array([[u[0, 0] * cos(x[2, 0]) * dt],
[u[0, 0] * sin(x[2, 0]) * dt],
[u[1, 0] * dt]])
return x + gdot
def two_wheel_2d_linearized_model(x, u, dt):
"""Two wheel 2D linearized motion model
Parameters
----------
x : np.array
Two Wheel model state vector (x, y, theta)
u : np.array
Input
dt : float
Time difference
Returns
-------
np.array 3x3 matrix of linearized two wheel model
"""
G1 = 1.0
G2 = 0.0
G3 = -u[0, 0] * sin(x[2, 0]) * dt
G4 = 0.0
G5 = 1.0
G6 = u[0, 0] * cos(x[2, 0]) * dt
G7 = 0.0
G8 = 0.0
G9 = 1.0
return np.array([[G1, G2, G3],
[G4, G5, G6],
[G7, G8, G9]])
def two_wheel_3d_model(x, u, dt):
"""Two wheel 3D motion model
Parameters
----------
x : np.array
Two Wheel model state vector (x, y, theta)
u : np.array
Input
dt : float
Time difference
Returns
-------
np.array (x, y, z, theta)
"""
g1 = x[0] + u[0] * cos(x[3]) * dt
g2 = x[1] + u[0] * sin(x[3]) * dt
g3 = x[2] + u[1] * dt
g4 = x[3] + u[2] * dt
return np.array([g1, g2, g3, g4])
def two_wheel_2d_deriv():
""" Symbolic derivation of Jacobian of the 2D two wheel motion model """
x1, x2, x3, x4, x5 = sympy.symbols("x1,x2,x3,x4,x5")
dt = sympy.symbols("dt")
# x, y, theta, v, omega
f1 = x1 + x4 * sympy.cos(x3) * dt
f2 = x2 + x4 * sympy.sin(x3) * dt
f3 = x3 + x5 * dt
f4 = x4
f5 = x5
F = sympy.Matrix([f1, f2, f3, f4, f5])
pprint(F.jacobian([x1, x2, x3, x4, x5]))
def two_wheel_3d_deriv():
""" Symbolic derivation of Jacobian of the 3D two wheel motion model """
x1, x2, x3, x4, x5, x6, x7 = sympy.symbols("x1,x2,x3,x4,x5,x6,x7")
dt = sympy.symbols("dt")
# x1 - x
# x2 - y
# x3 - z
# x4 - theta
# x5 - v
# x6 - omega
# x7 - vz
# x, y, z, theta, v, omega, vz
f1 = x1 + x5 * sympy.cos(x4) * dt
f2 = x2 + x5 * sympy.sin(x4) * dt
f3 = x3 + x7 * dt
f4 = x4 + x6 * dt
f5 = x5
f6 = x6
f7 = x7
F = sympy.Matrix([f1, f2, f3, f4, f5, f6, f7])
pprint(F.jacobian([x1, x2, x3, x4, x5, x6, x7]))
def two_wheel_3d_deriv2():
""" Symbolic derivation of Jacobian of the 3D two wheel motion model """
functions = sympy.symbols("f1,f2,f3,f4,f5,f6,f7,f8,f9")
variables = sympy.symbols("x1,x2,x3,x4,x5,x6,x7,x8,x9")
f1, f2, f3, f4, f5, f6, f7, f8, f9 = functions
x1, x2, x3, x4, x5, x6, x7, x8, x9 = variables
dt = sympy.symbols("dt")
# x1 - x
# x2 - y
# x3 - z
# x4 - theta
# x5 - v
# x6 - vz
# x7 - omega
# x8 - a
# x9 - az
f1 = x1 + x5 * sympy.cos(x4) * dt
f2 = x2 + x5 * sympy.sin(x4) * dt
f3 = x3 + x6 * dt
f4 = x4 + x7 * dt
f5 = x5 + x8 * dt
f6 = x6 + x9 * dt
f7 = x7
f8 = x8
f9 = x9
F = sympy.Matrix([f1, f2, f3, f4, f5, f6, f7, f8, f9])
pprint(F.jacobian([x1, x2, x3, x4, x5, x6, x7, x8, x9]))
| gpl-3.0 | 2,906,790,711,327,816,000 | 19.833333 | 76 | 0.483714 | false |
lingthio/Flask-User | flask_user/user_mixin.py | 1 | 4450 | """This module implements the UserMixin class for Flask-User.
This Mixin adds required methods to User data-model.
"""
from flask import current_app
from flask_login import UserMixin as FlaskLoginUserMixin
class UserMixin(FlaskLoginUserMixin):
""" This class adds required methods to the User data-model.
Example:
class User(db.Model, UserMixin):
...
"""
def get_id(self):
"""Converts a User ID and parts of a User password hash to a token."""
# This function is used by Flask-Login to store a User ID securely as a browser cookie.
# The last part of the password is included to invalidate tokens when password change.
# user_id and password_ends_with are encrypted, timestamped and signed.
# This function works in tandem with UserMixin.get_user_by_token()
user_manager = current_app.user_manager
user_id = self.id
password_ends_with = '' if user_manager.USER_ENABLE_AUTH0 else self.password[-8:]
user_token = user_manager.generate_token(
user_id, # User ID
password_ends_with, # Last 8 characters of user password
)
# print("UserMixin.get_id: ID:", self.id, "token:", user_token)
return user_token
@classmethod
def get_user_by_token(cls, token, expiration_in_seconds=None):
# This function works in tandem with UserMixin.get_id()
# Token signatures and timestamps are verified.
# user_id and password_ends_with are decrypted.
# Verifies a token and decrypts a User ID and parts of a User password hash
user_manager = current_app.user_manager
data_items = user_manager.verify_token(token, expiration_in_seconds)
# Verify password_ends_with
token_is_valid = False
if data_items:
# Load user by User ID
user_id = data_items[0]
password_ends_with = data_items[1]
user = user_manager.db_manager.get_user_by_id(user_id)
user_password = '' if user_manager.USER_ENABLE_AUTH0 else user.password[-8:]
# Make sure that last 8 characters of user password matches
token_is_valid = user and user_password==password_ends_with
return user if token_is_valid else None
def has_roles(self, *requirements):
""" Return True if the user has all of the specified roles. Return False otherwise.
has_roles() accepts a list of requirements:
has_role(requirement1, requirement2, requirement3).
Each requirement is either a role_name, or a tuple_of_role_names.
role_name example: 'manager'
tuple_of_role_names: ('funny', 'witty', 'hilarious')
A role_name-requirement is accepted when the user has this role.
A tuple_of_role_names-requirement is accepted when the user has ONE of these roles.
has_roles() returns true if ALL of the requirements have been accepted.
For example:
has_roles('a', ('b', 'c'), d)
Translates to:
User has role 'a' AND (role 'b' OR role 'c') AND role 'd'"""
# Translates a list of role objects to a list of role_names
user_manager = current_app.user_manager
role_names = user_manager.db_manager.get_user_roles(self)
# has_role() accepts a list of requirements
for requirement in requirements:
if isinstance(requirement, (list, tuple)):
# this is a tuple_of_role_names requirement
tuple_of_role_names = requirement
authorized = False
for role_name in tuple_of_role_names:
if role_name in role_names:
# tuple_of_role_names requirement was met: break out of loop
authorized = True
break
if not authorized:
return False # tuple_of_role_names requirement failed: return False
else:
# this is a role_name requirement
role_name = requirement
# the user must have this role
if not role_name in role_names:
return False # role_name requirement failed: return False
# All requirements have been met: return True
return True
| mit | 2,653,800,167,023,835,600 | 42.203883 | 106 | 0.602921 | false |
mathcamp/steward_web | steward_web/__init__.py | 1 | 1965 | """ Steward extension providing framework for web interface """
import re
import pyramid.renderers
from pyramid.request import Request
from pyramid.settings import asbool
def to_json(value):
""" A json filter for jinja2 """
return pyramid.renderers.render('json', value)
def do_index(request):
""" Render the index page """
return {}
def _add_steward_web_app(config, title, name):
""" Add a route to the list of steward web apps """
config.registry.steward_web_apps.append((title, name))
def _web_apps(request):
""" Get the list of steward web apps """
return tuple(request.registry.steward_web_apps)
def _route_names(request, pattern=r'.*'):
""" Get a list of route names that match the pattern """
pattern = re.compile('^' + pattern + '$')
introspector = request.registry.introspector
routes = introspector.get_category('routes')
names = []
for route in routes:
name = route['introspectable']['name']
if pattern.match(name):
names.append(name)
return names
def _route_map(request, pattern=r'.*'):
""" Get a dict of route names to route urls """
return {name: request.route_url(name) for name in
request.route_names(pattern)}
def includeme(config):
""" Configure the app """
settings = config.get_settings()
config.add_route('root', '/')
config.add_view('steward_web.do_index', route_name='root',
renderer='index.jinja2')
config.add_route('login', '/login')
config.add_route('logout', '/logout')
config.registry.steward_web_apps = []
config.add_directive('add_steward_web_app', _add_steward_web_app)
config.add_request_method(_web_apps, name='steward_web_apps', reify=True)
config.add_request_method(_route_names, name='route_names')
config.add_request_method(_route_map, name='route_map')
if asbool(settings.get('steward.web.basic_login', True)):
config.scan()
| mit | 7,871,287,402,392,848,000 | 29.230769 | 77 | 0.653944 | false |
abrt/faf | src/pyfaf/storage/migrations/versions/168c63b81f85_report_history_default_value.py | 1 | 1945 | # Copyright (C) 2014 ABRT Team
# Copyright (C) 2014 Red Hat, Inc.
#
# This file is part of faf.
#
# faf is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# faf is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with faf. If not, see <http://www.gnu.org/licenses/>.
"""
Report history default value
Revision ID: 168c63b81f85
Revises: 183a15e52a4f
Create Date: 2016-12-13 15:49:32.883743
"""
from alembic.op import alter_column, execute
# revision identifiers, used by Alembic.
revision = '168c63b81f85'
down_revision = '1c4d6317721a'
def upgrade() -> None:
alter_column('reporthistorydaily', 'unique', server_default="0")
alter_column('reporthistoryweekly', 'unique', server_default="0")
alter_column('reporthistorymonthly', 'unique', server_default="0")
execute('UPDATE reporthistorydaily SET "unique" = 0 WHERE "unique" IS NULL')
execute('UPDATE reporthistoryweekly SET "unique" = 0 WHERE "unique" IS NULL')
execute('UPDATE reporthistorymonthly SET "unique" = 0 WHERE "unique" IS NULL')
def downgrade() -> None:
alter_column('reporthistorydaily', 'unique', server_default=None)
alter_column('reporthistoryweekly', 'unique', server_default=None)
alter_column('reporthistorymonthly', 'unique', server_default=None)
execute('UPDATE reporthistorydaily SET "unique" = NULL WHERE "unique" = 0')
execute('UPDATE reporthistoryweekly SET "unique" = NULL WHERE "unique" = 0')
execute('UPDATE reporthistorymonthly SET "unique" = NULL WHERE "unique" = 0')
| gpl-3.0 | 7,853,489,964,225,810,000 | 37.137255 | 82 | 0.731105 | false |
sradevski/homeAutomate | scripts/laptop_on_network.py | 1 | 1994 | #!/usr/bin/python
import remote_core as core
import os
import sys
import nmap
import datetime
import time
import re
import go_to_sleep
try:
nm = nmap.PortScanner() # instance of nmap.PortScanner
except nmap.PortScannerError:
print('Nmap not found', sys.exc_info()[0])
sys.exit(0)
except:
print("Unexpected error:", sys.exc_info()[0])
sys.exit(0)
macAddressToSearch = '64:76:BA:A3:43:B0'
laptopHasBeenTurnedOn = False
disconnectedCounter = 0
def checkIfLaptopOn():
global macAddressToSearch, laptopHasBeenTurnedOn, disconnectedCounter
curHosts = []
# nm.scan(hosts = '192.168.11.1-8', arguments = '-n -sP -PS 7,22,88,443,80,660,2195 -PA 80,22,443 -PU -T3')
nm.scan(hosts = '192.168.11.1-8', arguments = '-n -sn -PR')
for host in nm.all_hosts():
try:
mac = nm[host]['addresses']['mac']
vendor = nm[host]['vendor'][mac]
except:
vendor = mac = 'unknown'
curHosts.append(mac)
localtime = time.asctime(time.localtime(time.time()))
print('============ {0} ============'.format(localtime))
for host in curHosts:
print(host)
config = core.load_config();
if config['location']['am_home']:
if macAddressToSearch not in curHosts:
if laptopHasBeenTurnedOn:
if disconnectedCounter > 3:
wentToSleepScript()
laptopHasBeenTurnedOn = False
disconnectedCounter += 1
else:
laptopHasBeenTurnedOn = True
def wentToSleepScript():
time.sleep(10)
go_to_sleep.go_to_sleep()
# print("SLEEPING")
if __name__ == '__main__':
start_at_hour = 22
stop_at_hour = 2
sleep_seconds = 60 * 60 * (start_at_hour - stop_at_hour) - 20
while True:
localtime = time.localtime(time.time())
if localtime.tm_hour > stop_at_hour and localtime.tm_hour < start_at_hour:
time.sleep(sleep_seconds - (60 * 60 * (start_at_hour - localtime.tm_hour)))
time.sleep(10)
checkIfLaptopOn()
| mit | 6,664,738,618,122,529,000 | 25.586667 | 110 | 0.61986 | false |
JordanReiter/django-notification | notification/views.py | 1 | 6596 | from django.core.urlresolvers import reverse
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponseRedirect, Http404
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
try:
from django.contrib.syndication.views import Feed
except ImportError:
from django.contrib.syndication.views import feed as Feed
from notification.models import *
from notification.decorators import basic_auth_required, simple_basic_auth_callback
from notification.feeds import NoticeUserFeed
@basic_auth_required(realm="Notices Feed", callback_func=simple_basic_auth_callback)
def feed_for_user(request):
"""
An atom feed for all unarchived :model:`notification.Notice`s for a user.
"""
url = "feed/%s" % request.user.username
return Feed(request, url, {
"feed": NoticeUserFeed,
})
@login_required
def notices(request):
"""
The main notices index view.
Template: :template:`notification/notices.html`
Context:
notices
A list of :model:`notification.Notice` objects that are not archived
and to be displayed on the site.
"""
notices = Notice.objects.notices_for(request.user, on_site=True)
return render_to_response("notification/notices.html", {
"notices": notices,
}, context_instance=RequestContext(request))
@login_required
def notice_settings(request):
"""
The notice settings view.
Template: :template:`notification/notice_settings.html`
Context:
notice_types
A list of all :model:`notification.NoticeType` objects.
notice_settings
A dictionary containing ``column_headers`` for each ``NOTICE_MEDIA``
and ``rows`` containing a list of dictionaries: ``notice_type``, a
:model:`notification.NoticeType` object and ``cells``, a list of
tuples whose first value is suitable for use in forms and the second
value is ``True`` or ``False`` depending on a ``request.POST``
variable called ``form_label``, whose valid value is ``on``.
"""
notice_types = NoticeType.objects.all()
settings_table = []
for notice_type in notice_types:
settings_row = []
for medium_id, medium_display in NOTICE_MEDIA:
form_label = "%s_%s" % (notice_type.label, medium_id)
setting = get_notification_setting(request.user, notice_type, medium_id)
if request.method == "POST":
if request.POST.get(form_label) == "on":
if not setting.send:
setting.send = True
setting.save()
else:
if setting.send:
setting.send = False
setting.save()
settings_row.append((form_label, setting.send))
settings_table.append({"notice_type": notice_type, "cells": settings_row})
if request.method == "POST":
next_page = request.POST.get("next_page", ".")
return HttpResponseRedirect(next_page)
notice_settings = {
"column_headers": [medium_display for medium_id, medium_display in NOTICE_MEDIA],
"rows": settings_table,
}
return render_to_response("notification/notice_settings.html", {
"notice_types": notice_types,
"notice_settings": notice_settings,
}, context_instance=RequestContext(request))
@login_required
def single(request, id, mark_seen=True):
"""
Detail view for a single :model:`notification.Notice`.
Template: :template:`notification/single.html`
Context:
notice
The :model:`notification.Notice` being viewed
Optional arguments:
mark_seen
If ``True``, mark the notice as seen if it isn't
already. Do nothing if ``False``. Default: ``True``.
"""
notice = get_object_or_404(Notice, id=id)
if request.user == notice.recipient:
if mark_seen and notice.unseen:
notice.unseen = False
notice.save()
return render_to_response("notification/single.html", {
"notice": notice,
}, context_instance=RequestContext(request))
raise Http404
@login_required
def archive(request, noticeid=None, next_page=None):
"""
Archive a :model:`notices.Notice` if the requesting user is the
recipient or if the user is a superuser. Returns a
``HttpResponseRedirect`` when complete.
Optional arguments:
noticeid
The ID of the :model:`notices.Notice` to be archived.
next_page
The page to redirect to when done.
"""
if noticeid:
try:
notice = Notice.objects.get(id=noticeid)
if request.user == notice.recipient or request.user.is_superuser:
notice.archive()
else: # you can archive other users' notices
# only if you are superuser.
return HttpResponseRedirect(next_page)
except Notice.DoesNotExist:
return HttpResponseRedirect(next_page)
return HttpResponseRedirect(next_page)
@login_required
def delete(request, noticeid=None, next_page=None):
"""
Delete a :model:`notices.Notice` if the requesting user is the recipient
or if the user is a superuser. Returns a ``HttpResponseRedirect`` when
complete.
Optional arguments:
noticeid
The ID of the :model:`notices.Notice` to be archived.
next_page
The page to redirect to when done.
"""
if noticeid:
try:
notice = Notice.objects.get(id=noticeid)
if request.user == notice.recipient or request.user.is_superuser:
notice.delete()
else: # you can delete other users' notices
# only if you are superuser.
return HttpResponseRedirect(next_page)
except Notice.DoesNotExist:
return HttpResponseRedirect(next_page)
return HttpResponseRedirect(next_page)
@login_required
def mark_all_seen(request):
"""
Mark all unseen notices for the requesting user as seen. Returns a
``HttpResponseRedirect`` when complete.
"""
for notice in Notice.objects.notices_for(request.user, unseen=True):
notice.unseen = False
notice.save()
return HttpResponseRedirect(reverse("notification_notices"))
| mit | 8,042,785,939,941,627,000 | 32.482234 | 89 | 0.622347 | false |
alexwaters/python-readability-api | readability/models.py | 1 | 5472 | # -*- coding: utf-8 -*-
"""
readability.models
~~~~~~~~~~~~~~~~~~
This module provides the core Readability API models.
"""
from .helpers import to_python, to_api
class BaseResource(object):
"""A Base BaseResource object."""
def __init__(self):
super(BaseResource, self).__init__()
self._rdd = None
def __dir__(self):
d = self.__dict__.copy()
try:
del d['_rdd']
except KeyError:
pass
return d.keys()
class Bookmark(BaseResource):
"""Bookmark API Model."""
def __init__(self):
self.id = None
self.user_id = None
self.read_percent = None
self.date_updated = None
self.favorite = None
self.archive = None
self.date_archived = None
self.date_opened = None
self.date_added = None
self.article = None
def __repr__(self):
return '<bookmark id="%s" favorite="%s" archive="%s" read_percent="%s">' % (self.id, self.favorite, self.archive, self.read_percent)
@staticmethod
def new_from_dict(d, rdd=None):
b = to_python(
obj=Bookmark(), in_dict=d,
string_keys = (
'id', 'user_id', 'read_percent', 'favorite', 'archive',
'author',
),
date_keys = ('date_updated', 'date_archived', 'date_opened', 'date_added'),
object_map = {'article': Article},
_rdd = rdd
)
return b
def delete(self):
"""Deletes Bookmark."""
return self._rdd._delete_resource(('bookmarks', self.id))
def update(self):
"""Updates Bookmark."""
args = to_api(
dict(
favorite=self.favorite,
archive=self.archive,
read_percent=self.read_percent,
),
int_keys=('favorite', 'archive')
)
r = self._rdd._post_resource(('bookmarks', self.id), **args)
return r
class Article(BaseResource):
def __init__(self):
self.id = None
self.domain = None
self.title = None
self.url = None
self.short_url = None
self.author = None
self.word_count = None
self.content = None
self.excerpt = None
self.date_published = None
self.next_page_href = None
self.processed = None
self.content_size = None
def __repr__(self):
return '<article id="%s">' % (self.id,)
@staticmethod
def new_from_dict(d, rdd=None):
return to_python(
obj=Article(), in_dict=d,
string_keys = (
'id', 'domain', 'title', 'url', 'short_url', 'author',
'word_count', 'content', 'excerpt', 'next_page_href',
'processed', 'content_size',
),
date_keys = ('date_published',),
_rdd = rdd
)
class Domain(BaseResource):
def __init__(self):
super(Domain, self).__init__()
self.fqdn = None
self.articles_ref = None
def __repr__(self):
return '<domain fqdn="%s">' % (self.fqdn,)
@staticmethod
def new_from_dict(d, rdd=None):
return to_python(
obj=Domain(), in_dict=d,
string_keys = ('fqdn', 'articles_ref'),
_rdd = rdd
)
def articles(self, **filters):
"""Returns Article list, filtered by Domain."""
return self._rdd.get_articles(domain=self.fqdn, **filters)
def contributions(self, **filters):
"""Returns Article list, filtered by Domain."""
return self._rdd.get_contributions(domain=self.fqdn, **filters)
class Contribution(BaseResource):
def __init__(self):
super(Contribution, self).__init__()
self.date = None
self.contribution = None
self.user = None
self.domain = None
self.num_bookmarks = None
def __repr__(self):
return '<contribution domain="%s">' % (self.domain,)
@staticmethod
def new_from_dict(d, rdd=None):
return to_python(
obj=Contribution(), in_dict=d,
string_keys = ('contribution', 'user', 'domain', 'num_bookmarks'),
date_keys = ('date'),
_rdd = rdd
)
class User(BaseResource):
"""User API Model."""
def __init__(self):
self.username = None
self.first_name = None
self.last_name = None
self.date_joined = None
def __repr__(self):
return '<user name="%s">' % (self.username,)
@staticmethod
def new_from_dict(d, rdd=None):
return to_python(
obj=User(), in_dict=d,
string_keys = ('username', 'first_name'),
date_keys = ('date_joined',),
_rdd=rdd
)
def bookmarks(self, **filters):
"""Returns Bookmark list, filtered by User."""
if self.username == self._rdd.username:
return self._rdd.get_bookmarks(user=self.username, **filters)
else:
return self._rdd.get_bookmarks_by_user(self.username, **filters)
def contributions(self, **filters):
"""Returns Contributions list, filtered by User."""
if self.username == self._rdd.username:
return self._rdd.get_contributions(user=self.username, **filters)
else:
return self._rdd.get_contributions_by_user(self.username, **filters)
| mit | -2,055,132,855,764,576,500 | 22.088608 | 140 | 0.524671 | false |
Wikidata/StrepHit | tests/test_classification.py | 1 | 4013 | # -*- encoding: utf-8 -*-
import unittest
from treetaggerwrapper import Tag
from strephit.classification import feature_extractors
class TestFactExtractorFeatureExtractor(unittest.TestCase):
def setUp(self):
self.gazetteer = {
'sentence': ['feature1', 'feature2']
}
self.sentences_data = [
{
'sentence': u'This is the first sentence',
'fes': {
'Subject': u'this',
'Missing': u'this is not',
'Object': u'first sentence',
},
},
{
'sentence': u'This is the second sentence',
'fes': {},
}
]
def test_sorted_set(self):
s = feature_extractors.SortedSet()
for i in xrange(5):
index = s.put(i)
self.assertEqual(index, i)
for i in xrange(5):
index = s.index(i)
self.assertEqual(index, i)
def test_sentence_to_tokens(self):
extractor = feature_extractors.FactExtractorFeatureExtractor('en')
tokens = extractor.sentence_to_tokens(**self.sentences_data[0])
self.assertEqual(tokens, [[u'this', u'DT', u'this', u'Subject'],
Tag(word=u'is', pos=u'VBZ', lemma=u'be'),
Tag(word=u'the', pos=u'DT', lemma=u'the'),
[u'first sentence', 'ENT', u'first sentence', u'Object']])
def test_feature_for(self):
extractor = feature_extractors.FactExtractorFeatureExtractor('en')
self.assertEqual(extractor.feature_for('word1', 'pos', 3, True), 1)
self.assertEqual(extractor.feature_for('word2', 'lemma', -2, True), 2)
self.assertEqual(extractor.feature_for('WoRd1', 'POs', 3, True), 1)
def test_extract_features_no_window(self):
extractor = feature_extractors.FactExtractorFeatureExtractor('en', 0)
_, f1 = extractor.extract_features(add_unknown=True, gazetteer=self.gazetteer,
**self.sentences_data[0])
_, f2 = extractor.extract_features(add_unknown=True, gazetteer=self.gazetteer,
**self.sentences_data[1])
self.assertEqual(f1[0][0], f2[0][0])
self.assertEqual(f1[1][0], f2[1][0])
self.assertEqual(f1[2][0], f2[2][0])
def test_extract_features_window(self):
window = 2
extractor = feature_extractors.FactExtractorFeatureExtractor('en', window)
_, feat = extractor.extract_features(add_unknown=True, gazetteer=self.gazetteer,
**self.sentences_data[1])
self.assertEqual(len(feat[2][0]), 3 * (2 * window + 1) + 2)
def test_feature_labels(self):
extractor = feature_extractors.FactExtractorFeatureExtractor('en')
_, tokens = extractor.extract_features(add_unknown=True, gazetteer=self.gazetteer,
**self.sentences_data[0])
self.assertEqual(tokens[0][1], 0)
self.assertEqual(tokens[1][1], 1)
self.assertEqual(tokens[2][1], 1)
self.assertEqual(tokens[3][1], 2)
def test_get_training_set(self):
extractor = feature_extractors.FactExtractorFeatureExtractor('en')
extractor.process_sentence(add_unknown=True, gazetteer=self.gazetteer,
**self.sentences_data[0])
extractor.process_sentence(add_unknown=True, gazetteer=self.gazetteer,
**self.sentences_data[1])
x, y = extractor.get_features()
self.assertEqual(x.shape, (9, 70))
self.assertEqual(list(y), [0, 1, 1, 2, 1, 1, 1, 1, 1])
def test_unknown_token(self):
extractor = feature_extractors.FactExtractorFeatureExtractor('en')
self.assertEqual(extractor.feature_for('a', 'b', 12, add_unknown=False),
extractor.unk_index)
| gpl-3.0 | -8,885,127,340,239,638,000 | 40.802083 | 92 | 0.555943 | false |
kaphka/catconv | convert.py | 1 | 1091 | import argparse
import signal
from tqdm import tqdm
import catconv.operations as co
import catconv.stabi as sb
exit = False
def signal_handler(signal, frame):
print('You pressed Ctrl+C!')
exit = True
parser = argparse.ArgumentParser()
parser.add_argument("source")
parser.add_argument("target")
parser.add_argument("-u", "--update", help="overwrite previous results",
action="store_true")
args = parser.parse_args()
source = sb.op.normpath(args.source)
target = sb.op.normpath(args.target)
data_dir, target_cat_name = sb.op.split(target)
pages = map(sb.page_from_path, sb.catalog_pages(source,ext=".tif"))
print("Source catalog:")
print("path:", source)
print("pages:", len(pages))
conversion = {"ext": ".jpg", "remove_type": True, "to_cat": data_dir,"cat": target_cat_name}
from_to = [(page, sb.convert_page_path(page, conversion)) for page in pages]
for ft in tqdm(from_to):
if exit:
break
from_page, to_page = ft
if sb.op.isfile(to_page['path']) and not args.update:
continue
else:
co.convert_to_png(*ft)
| apache-2.0 | 5,971,109,955,525,650,000 | 24.372093 | 92 | 0.669111 | false |
alirizakeles/zato | code/zato-zmq/src/zato/zmq_/mdp/worker.py | 1 | 9531 | # -*- coding: utf-8 -*-
"""
Copyright (C) 2016 Dariusz Suchojad <dsuch at zato.io>
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import logging
import time
from datetime import datetime, timedelta
# ZeroMQ
import zmq.green as zmq
# Zato
from zato.zmq_.mdp import BaseZMQConnection, const, EventWorkerDisconnect, EventWorkerHeartbeat, EventReady, EventWorkerReply
# ################################################################################################################################
logger = logging.getLogger(__name__)
# ################################################################################################################################
class Worker(BaseZMQConnection):
""" Standalone implementation of a worker for ZeroMQ Majordomo Protocol 0.1 http://rfc.zeromq.org/spec:7
"""
def __init__(self, service_name, broker_address='tcp://localhost:47047', linger=0, poll_interval=100, log_details=False,
heartbeat=3, heartbeat_mult=2, reconnect_sleep=2):
self.service_name = service_name
super(Worker, self).__init__(broker_address, linger, poll_interval, log_details)
# How often, in seconds, to send a heartbeat to the broker or expect one from the broker
self.heartbeat = heartbeat
# If self.heartbeat * self.heartbeat_mult is exceeded, we assume the broker is down
self.heartbeat_mult = heartbeat_mult
# How long, in seconds, to wait before attempting to reconnect to the broker
self.reconnect_sleep = reconnect_sleep
# When did we last hear from the broker
self.broker_last_heartbeat = None
# When did we last send our own heartbeat to the broker
self.worker_last_heartbeat = None
# Timestamp of when we started to run
self.last_connected = datetime.utcnow()
self.has_debug = logger.isEnabledFor(logging.DEBUG)
# Maps event IDs to methods that handle a given one
self.handle_event_map = {
const.v01.request_to_worker: self.on_event_request_to_worker,
const.v01.heartbeat: self.on_event_heartbeat,
const.v01.disconnect: self.on_event_disconnect,
}
# ################################################################################################################################
def connect(self):
logger.info('Connecting to broker %s', self.broker_address)
# Open ZeroMQ sockets first
# From worker to broker
self.client_socket.connect(self.broker_address)
# From broker to worker
self.worker_socket = self.ctx.socket(zmq.DEALER)
self.worker_socket.linger = self.linger
self.worker_poller = zmq.Poller()
self.worker_poller.register(self.worker_socket, zmq.POLLIN)
self.worker_socket.connect(self.broker_address)
# Ok, we are ready
self.notify_ready()
# We can assume that the broker received our message
self.last_connected = datetime.utcnow()
# ################################################################################################################################
def stop(self):
self.worker_poller.unregister(self.worker_socket)
self.worker_socket.close()
self.stop_client_socket()
self.connect_client_socket()
logger.info('Stopped worker for %s', self.broker_address)
# ################################################################################################################################
def needs_reconnect(self):
base_timestamp = self.broker_last_heartbeat if self.broker_last_heartbeat else self.last_connected
return datetime.utcnow() >= base_timestamp + timedelta(seconds=self.heartbeat * self.heartbeat_mult)
# ################################################################################################################################
def reconnect(self):
last_hb = '{} (UTC)'.format(self.broker_last_heartbeat.isoformat()) if self.broker_last_heartbeat else 'never'
logger.info('Sleeping for %ss before reconnecting to broker %s, last HB from broker: %s',
self.reconnect_sleep, self.broker_address, last_hb)
time.sleep(self.reconnect_sleep)
logger.info('Reconnecting to broker %s', self.broker_address)
self.stop()
self.connect()
# Let's give the other side a moment to reply to our ready event
time.sleep(self.reconnect_sleep)
# ################################################################################################################################
def needs_hb_to_broker(self):
return datetime.utcnow() >= self.worker_last_heartbeat + timedelta(seconds=self.heartbeat)
# ################################################################################################################################
def serve_forever(self):
# To speed up look-ups
log_details = self.log_details
# Main loop
while self.keep_running:
try:
items = self.worker_poller.poll(self.poll_interval)
except KeyboardInterrupt:
self.notify_disconnect()
break
if items:
msg = self.worker_socket.recv_multipart()
if log_details:
logger.info('Received msg at %s %s', self.broker_address, msg)
self.handle(msg)
else:
if log_details:
logger.info('No items for worker at %s', self.broker_address)
if self.needs_hb_to_broker():
self.notify_heartbeat()
if self.needs_reconnect():
self.reconnect()
# ################################################################################################################################
def on_event_request_to_worker(self, msg):
logger.info('In _handle %s', msg)
return datetime.utcnow().isoformat()
# ################################################################################################################################
def on_event_heartbeat(self, *ignored):
""" A no-op since self.handle already handles heartbeats from the broker.
"""
# ################################################################################################################################
def on_event_disconnect(self, *ignored):
""" Our broker tells us to disconnect - according to the spec we now must re-open the connection.
"""
self.reconnect()
# ################################################################################################################################
def handle(self, msg):
logger.info('Handling %s', msg)
# Since we received this message, it means the broker is up so the message,
# no matter what event it is, allows us to update the timestamp of the last HB from broker
self.broker_last_heartbeat = datetime.utcnow()
sender_id = None
body = None
command = msg[2]
if command == const.v01.request_to_worker:
sender_id = msg[3]
body = msg[4]
# Hand over the message to an actual implementation and reply if told to
response = self.handle_event_map[command](body)
if response:
self.send(EventWorkerReply(response, sender_id).serialize())
# Message handled, we are ready to handle a new one, assuming this one was a request
if command == const.v01.request_to_worker:
self.notify_ready()
# ################################################################################################################################
def send(self, data, needs_hb=True):
""" Sends data to the broker and updates an internal timer of when the last time we send a heartbeat to the broker
since sending anything in that direction should be construed by the broker as a heartbeat itself.
"""
# Send data first
self.worker_socket.send_multipart(data)
# Update the timer
if needs_hb:
self.worker_last_heartbeat = datetime.utcnow()
# ################################################################################################################################
def notify_ready(self):
""" Notify the broker that we are ready to handle a new message.
"""
self.send(EventReady(self.service_name).serialize())
# ################################################################################################################################
def notify_heartbeat(self):
""" Notify the broker that we are still around.
"""
self.send(EventWorkerHeartbeat().serialize())
# ################################################################################################################################
def notify_disconnect(self):
""" Notify the broker that we are to disconnect from it.
"""
self.send(EventWorkerDisconnect().serialize(), needs_hb=False)
# ################################################################################################################################
if __name__ == '__main__':
w = Worker(b'My service', 'tcp://localhost:47047')
w.connect()
w.serve_forever()
| gpl-3.0 | -5,260,113,745,436,168,000 | 37.587045 | 130 | 0.484 | false |
pcingola/server | ga4gh/cli.py | 1 | 32399 | """
Command line interface programs for the GA4GH reference implementation.
TODO: document how to use these for development and simple deployment.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import argparse
import logging
import unittest
import unittest.loader
import unittest.suite
import requests
import ga4gh.client as client
import ga4gh.converters as converters
import ga4gh.frontend as frontend
import ga4gh.configtest as configtest
import ga4gh.exceptions as exceptions
# the maximum value of a long type in avro = 2**63 - 1
# (64 bit signed integer)
# http://avro.apache.org/docs/1.7.7/spec.html#schema_primitive
# AVRO_LONG_MAX = (1 << 63) - 1
# TODO in the meantime, this is the max value pysam can handle
# This should be removed once pysam input sanitisation has been
# implemented.
AVRO_LONG_MAX = 2**31 - 1
##############################################################################
# Server
##############################################################################
def addServerOptions(parser):
parser.add_argument(
"--port", "-P", default=8000, type=int,
help="The port to listen on")
parser.add_argument(
"--host", "-H", default="127.0.0.1",
help="The server host string; use 0.0.0.0 to allow all connections.")
parser.add_argument(
"--config", "-c", default='DevelopmentConfig', type=str,
help="The configuration to use")
parser.add_argument(
"--config-file", "-f", type=str, default=None,
help="The configuration file to use")
parser.add_argument(
"--tls", "-t", action="store_true", default=False,
help="Start in TLS (https) mode.")
parser.add_argument(
"--dont-use-reloader", default=False, action="store_true",
help="Don't use the flask reloader")
addDisableUrllibWarningsArgument(parser)
def server_main(parser=None):
if parser is None:
parser = argparse.ArgumentParser(
description="GA4GH reference server")
addServerOptions(parser)
args = parser.parse_args()
if args.disable_urllib_warnings:
requests.packages.urllib3.disable_warnings()
frontend.configure(
args.config_file, args.config, args.port)
sslContext = None
if args.tls or ("OIDC_PROVIDER" in frontend.app.config):
sslContext = "adhoc"
frontend.app.run(
host=args.host, port=args.port,
use_reloader=not args.dont_use_reloader, ssl_context=sslContext)
##############################################################################
# Client
##############################################################################
def verbosityToLogLevel(verbosity):
"""
Returns the specfied verbosity level interpreted as a logging level.
"""
ret = 0
if verbosity == 1:
ret = logging.INFO
elif verbosity >= 2:
ret = logging.DEBUG
return ret
class AbstractQueryRunner(object):
"""
Abstract base class for runner classes
"""
def __init__(self, args):
self._key = args.key
self._httpClient = client.HttpClient(
args.baseUrl, verbosityToLogLevel(args.verbose), self._key)
class FormattedOutputRunner(AbstractQueryRunner):
"""
Superclass of runners that support output in common formats.
"""
def __init__(self, args):
super(FormattedOutputRunner, self).__init__(args)
self._output = self._textOutput
if args.outputFormat == "json":
self._output = self._jsonOutput
def _jsonOutput(self, gaObjects):
"""
Outputs the specified protocol objects as one JSON string per
line.
"""
for gaObject in gaObjects:
print(gaObject.toJsonString())
def _textOutput(self, gaObjects):
"""
Outputs a text summary of the specified protocol objects, one
per line.
"""
for gaObject in gaObjects:
print(gaObject.id, gaObject.name, sep="\t")
class AbstractGetRunner(FormattedOutputRunner):
"""
Abstract base class for get runner classes
"""
def __init__(self, args):
super(AbstractGetRunner, self).__init__(args)
self._id = args.id
self._httpClient = client.HttpClient(
args.baseUrl, verbosityToLogLevel(args.verbose), self._key)
def run(self):
response = self._method(self._id)
self._output([response])
class AbstractSearchRunner(FormattedOutputRunner):
"""
Abstract base class for search runner classes
"""
def __init__(self, args):
super(AbstractSearchRunner, self).__init__(args)
self._pageSize = args.pageSize
self._httpClient.setPageSize(self._pageSize)
def getAllDatasets(self):
"""
Returns all datasets on the server.
"""
return self._httpClient.searchDatasets()
def getAllVariantSets(self):
"""
Returns all variant sets on the server.
"""
for dataset in self.getAllDatasets():
iterator = self._httpClient.searchVariantSets(datasetId=dataset.id)
for variantSet in iterator:
yield variantSet
def getAllReadGroupSets(self):
"""
Returns all readgroup sets on the server.
"""
for dataset in self.getAllDatasets():
iterator = self._httpClient.searchReadGroupSets(
datasetId=dataset.id)
for readGroupSet in iterator:
yield readGroupSet
def getAllReferenceSets(self):
"""
Returns all reference sets on the server.
"""
return self._httpClient.searchReferenceSets()
# Runners for the various search methods
class SearchDatasetsRunner(AbstractSearchRunner):
"""
Runner class for the datasets/search method
"""
def __init__(self, args):
super(SearchDatasetsRunner, self).__init__(args)
def run(self):
iterator = self._httpClient.searchDatasets()
self._output(iterator)
class SearchReferenceSetsRunner(AbstractSearchRunner):
"""
Runner class for the referencesets/search method.
"""
def __init__(self, args):
super(SearchReferenceSetsRunner, self).__init__(args)
self._accession = args.accession
self._md5checksum = args.md5checksum
def run(self):
iterator = self._httpClient.searchReferenceSets(
accession=self._accession, md5checksum=self._md5checksum)
self._output(iterator)
class SearchReferencesRunner(AbstractSearchRunner):
"""
Runner class for the references/search method
"""
def __init__(self, args):
super(SearchReferencesRunner, self).__init__(args)
self._referenceSetId = args.referenceSetId
self._accession = args.accession
self._md5checksum = args.md5checksum
def _run(self, referenceSetId):
iterator = self._httpClient.searchReferences(
accession=self._accession, md5checksum=self._md5checksum,
referenceSetId=referenceSetId)
self._output(iterator)
def run(self):
if self._referenceSetId is None:
for referenceSet in self.getAllReferenceSets():
self._run(referenceSet.id)
else:
self._run(self._referenceSetId)
class SearchVariantSetsRunner(AbstractSearchRunner):
"""
Runner class for the variantsets/search method.
"""
def __init__(self, args):
super(SearchVariantSetsRunner, self).__init__(args)
self._datasetId = args.datasetId
def _run(self, datasetId):
iterator = self._httpClient.searchVariantSets(datasetId=datasetId)
self._output(iterator)
def run(self):
if self._datasetId is None:
for dataset in self.getAllDatasets():
self._run(dataset.id)
else:
self._run(self._datasetId)
class SearchReadGroupSetsRunner(AbstractSearchRunner):
"""
Runner class for the readgroupsets/search method
"""
def __init__(self, args):
super(SearchReadGroupSetsRunner, self).__init__(args)
self._datasetId = args.datasetId
self._name = args.name
def _run(self, datasetId):
iterator = self._httpClient.searchReadGroupSets(
datasetId=datasetId, name=self._name)
self._output(iterator)
def run(self):
if self._datasetId is None:
for dataset in self.getAllDatasets():
self._run(dataset.id)
else:
self._run(self._datasetId)
class SearchCallSetsRunner(AbstractSearchRunner):
"""
Runner class for the callsets/search method
"""
def __init__(self, args):
super(SearchCallSetsRunner, self).__init__(args)
self._variantSetId = args.variantSetId
self._name = args.name
def _run(self, variantSetId):
iterator = self._httpClient.searchCallSets(
variantSetId=variantSetId, name=self._name)
self._output(iterator)
def run(self):
if self._variantSetId is None:
for variantSet in self.getAllVariantSets():
self._run(variantSet.id)
else:
self._run(self._variantSetId)
class VariantFormatterMixin(object):
"""
Simple mixin to format variant objects.
"""
def _textOutput(self, gaObjects):
"""
Prints out the specified Variant objects in a VCF-like form.
"""
for variant in gaObjects:
print(
variant.id, variant.variantSetId, variant.names,
variant.referenceName, variant.start, variant.end,
variant.referenceBases, variant.alternateBases,
sep="\t", end="\t")
for key, value in variant.info.items():
print(key, value, sep="=", end=";")
print("\t", end="")
for c in variant.calls:
print(
c.callSetId, c.genotype, c.genotypeLikelihood, c.info,
c.phaseset, sep=":", end="\t")
print()
class SearchVariantsRunner(VariantFormatterMixin, AbstractSearchRunner):
"""
Runner class for the variants/search method.
"""
def __init__(self, args):
super(SearchVariantsRunner, self).__init__(args)
self._referenceName = args.referenceName
self._variantSetId = args.variantSetId
self._start = args.start
self._end = args.end
if args.callSetIds == []:
self._callSetIds = []
elif args.callSetIds == '*':
self._callSetIds = None
else:
self._callSetIds = args.callSetIds.split(",")
def _run(self, variantSetId):
iterator = self._httpClient.searchVariants(
start=self._start, end=self._end,
referenceName=self._referenceName,
variantSetId=variantSetId, callSetIds=self._callSetIds)
self._output(iterator)
def run(self):
if self._variantSetId is None:
for variantSet in self.getAllVariantSets():
self._run(variantSet.id)
else:
self._run(self._variantSetId)
class SearchReadsRunner(AbstractSearchRunner):
"""
Runner class for the reads/search method
"""
def __init__(self, args):
super(SearchReadsRunner, self).__init__(args)
self._start = args.start
self._end = args.end
self._referenceId = args.referenceId
self._readGroupIds = None
if args.readGroupIds is not None:
self._readGroupIds = args.readGroupIds.split(",")
def run(self):
# TODO add support for looking up ReadGroupSets and References
# like we do with SearchVariants and others.
iterator = self._httpClient.searchReads(
readGroupIds=self._readGroupIds, referenceId=self._referenceId,
start=self._start, end=self._end)
self._output(iterator)
def _textOutput(self, gaObjects):
"""
Prints out the specified Variant objects in a VCF-like form.
"""
for read in gaObjects:
# TODO add in some more useful output here.
print(read.id)
# ListReferenceBases is an oddball, and doesn't fit either get or
# search patterns.
class ListReferenceBasesRunner(AbstractQueryRunner):
"""
Runner class for the references/{id}/bases method
"""
def __init__(self, args):
super(ListReferenceBasesRunner, self).__init__(args)
self._referenceId = args.id
self._start = args.start
self._end = args.end
def run(self):
iterator = self._httpClient.listReferenceBases(
self._referenceId, self._start, self._end)
# TODO add support for FASTA output.
for segment in iterator:
print(segment, end="")
print()
# Runners for the various GET methods.
class GetReferenceSetRunner(AbstractGetRunner):
"""
Runner class for the referencesets/{id} method
"""
def __init__(self, args):
super(GetReferenceSetRunner, self).__init__(args)
self._method = self._httpClient.getReferenceSet
class GetReferenceRunner(AbstractGetRunner):
"""
Runner class for the references/{id} method
"""
def __init__(self, args):
super(GetReferenceRunner, self).__init__(args)
self._method = self._httpClient.getReference
class GetReadGroupSetRunner(AbstractGetRunner):
"""
Runner class for the readgroupsets/{id} method
"""
def __init__(self, args):
super(GetReadGroupSetRunner, self).__init__(args)
self._method = self._httpClient.getReadGroupSet
class GetReadGroupRunner(AbstractGetRunner):
"""
Runner class for the references/{id} method
"""
def __init__(self, args):
super(GetReadGroupRunner, self).__init__(args)
self._method = self._httpClient.getReadGroup
class GetCallsetRunner(AbstractGetRunner):
"""
Runner class for the callsets/{id} method
"""
def __init__(self, args):
super(GetCallsetRunner, self).__init__(args)
self._method = self._httpClient.getCallset
class GetDatasetRunner(AbstractGetRunner):
"""
Runner class for the datasets/{id} method
"""
def __init__(self, args):
super(GetDatasetRunner, self).__init__(args)
self._method = self._httpClient.getDataset
class GetVariantRunner(VariantFormatterMixin, AbstractGetRunner):
"""
Runner class for the variants/{id} method
"""
def __init__(self, args):
super(GetVariantRunner, self).__init__(args)
self._method = self._httpClient.getVariant
def addDisableUrllibWarningsArgument(parser):
parser.add_argument(
"--disable-urllib-warnings", default=False, action="store_true",
help="Disable urllib3 warnings")
def addVariantSearchOptions(parser):
"""
Adds common options to a variant searches command line parser.
"""
addVariantSetIdArgument(parser)
addReferenceNameArgument(parser)
addCallSetIdsArgument(parser)
addStartArgument(parser)
addEndArgument(parser)
addPageSizeArgument(parser)
def addVariantSetIdArgument(parser):
parser.add_argument(
"--variantSetId", "-V", default=None,
help="The variant set id to search over")
def addReferenceNameArgument(parser):
parser.add_argument(
"--referenceName", "-r", default="1",
help="Only return variants on this reference.")
def addCallSetIdsArgument(parser):
parser.add_argument(
"--callSetIds", "-c", default=[],
help="""Return variant calls which belong to call sets
with these IDs. Pass in IDs as a comma separated list (no spaces).
Use '*' to request all call sets (the quotes are important!).
""")
def addStartArgument(parser):
parser.add_argument(
"--start", "-s", default=0, type=int,
help="The start of the search range (inclusive).")
def addEndArgument(parser, defaultValue=AVRO_LONG_MAX):
parser.add_argument(
"--end", "-e", default=defaultValue, type=int,
help="The end of the search range (exclusive).")
def addIdArgument(parser):
parser.add_argument("id", default=None, help="The id of the object")
def addGetArguments(parser):
addUrlArgument(parser)
addIdArgument(parser)
addOutputFormatArgument(parser)
def addUrlArgument(parser):
"""
Adds the URL endpoint argument to the specified parser.
"""
parser.add_argument("baseUrl", help="The URL of the API endpoint")
def addOutputFormatArgument(parser):
parser.add_argument(
"--outputFormat", "-O", choices=['text', 'json'], default="text",
help=(
"The format for object output. Currently supported are "
"'text' (default), which gives a short summary of the object and "
"'json', which outputs each object in line-delimited JSON"))
def addAccessionArgument(parser):
parser.add_argument(
"--accession", default=None,
help="The accession to search for")
def addMd5ChecksumArgument(parser):
parser.add_argument(
"--md5checksum", default=None,
help="The md5checksum to search for")
def addPageSizeArgument(parser):
parser.add_argument(
"--pageSize", "-m", default=None, type=int,
help=(
"The maximum number of results returned in one page. "
"The default is to let the server decide how many "
"results to return in a single page."))
def addDatasetIdArgument(parser):
parser.add_argument(
"--datasetId", default=None,
help="The datasetId to search over")
def addReferenceSetIdArgument(parser):
parser.add_argument(
"--referenceSetId", default=None,
help="The referenceSet to search over")
def addNameArgument(parser):
parser.add_argument(
"--name", default=None,
help="The name to search over")
def addClientGlobalOptions(parser):
parser.add_argument(
'--verbose', '-v', action='count', default=0,
help="Increase verbosity; can be supplied multiple times")
parser.add_argument(
"--key", "-k", default='invalid',
help="Auth Key. Found on server index page.")
addDisableUrllibWarningsArgument(parser)
def addHelpParser(subparsers):
parser = subparsers.add_parser(
"help", description="ga4gh_client help",
help="show this help message and exit")
return parser
def addVariantsSearchParser(subparsers):
parser = subparsers.add_parser(
"variants-search",
description="Search for variants",
help="Search for variants.")
parser.set_defaults(runner=SearchVariantsRunner)
addUrlArgument(parser)
addOutputFormatArgument(parser)
addVariantSearchOptions(parser)
return parser
def addVariantSetsSearchParser(subparsers):
parser = subparsers.add_parser(
"variantsets-search",
description="Search for variantSets",
help="Search for variantSets.")
parser.set_defaults(runner=SearchVariantSetsRunner)
addOutputFormatArgument(parser)
addUrlArgument(parser)
addPageSizeArgument(parser)
addDatasetIdArgument(parser)
return parser
def addReferenceSetsSearchParser(subparsers):
parser = subparsers.add_parser(
"referencesets-search",
description="Search for referenceSets",
help="Search for referenceSets")
parser.set_defaults(runner=SearchReferenceSetsRunner)
addUrlArgument(parser)
addOutputFormatArgument(parser)
addPageSizeArgument(parser)
addAccessionArgument(parser)
addMd5ChecksumArgument(parser)
parser.add_argument(
"--assemblyId",
help="The assembly id to search for")
return parser
def addReferencesSearchParser(subparsers):
parser = subparsers.add_parser(
"references-search",
description="Search for references",
help="Search for references")
parser.set_defaults(runner=SearchReferencesRunner)
addUrlArgument(parser)
addOutputFormatArgument(parser)
addPageSizeArgument(parser)
addAccessionArgument(parser)
addMd5ChecksumArgument(parser)
addReferenceSetIdArgument(parser)
return parser
def addReadGroupSetsSearchParser(subparsers):
parser = subparsers.add_parser(
"readgroupsets-search",
description="Search for readGroupSets",
help="Search for readGroupSets")
parser.set_defaults(runner=SearchReadGroupSetsRunner)
addUrlArgument(parser)
addOutputFormatArgument(parser)
addPageSizeArgument(parser)
addDatasetIdArgument(parser)
addNameArgument(parser)
return parser
def addCallsetsSearchParser(subparsers):
parser = subparsers.add_parser(
"callsets-search",
description="Search for callSets",
help="Search for callSets")
parser.set_defaults(runner=SearchCallSetsRunner)
addUrlArgument(parser)
addOutputFormatArgument(parser)
addPageSizeArgument(parser)
addNameArgument(parser)
addVariantSetIdArgument(parser)
return parser
def addReadsSearchParser(subparsers):
parser = subparsers.add_parser(
"reads-search",
description="Search for reads",
help="Search for reads")
parser.set_defaults(runner=SearchReadsRunner)
addOutputFormatArgument(parser)
addReadsSearchParserArguments(parser)
return parser
def addDatasetsGetParser(subparsers):
parser = subparsers.add_parser(
"datasets-get",
description="Get a dataset",
help="Get a dataset")
parser.set_defaults(runner=GetDatasetRunner)
addGetArguments(parser)
def addDatasetsSearchParser(subparsers):
parser = subparsers.add_parser(
"datasets-search",
description="Search for datasets",
help="Search for datasets")
parser.set_defaults(runner=SearchDatasetsRunner)
addUrlArgument(parser)
addPageSizeArgument(parser)
addOutputFormatArgument(parser)
return parser
def addReadsSearchParserArguments(parser):
addUrlArgument(parser)
addPageSizeArgument(parser)
addStartArgument(parser)
addEndArgument(parser)
parser.add_argument(
"--readGroupIds", default=None,
help="The readGroupIds to search over")
parser.add_argument(
"--referenceId", default=None,
help="The referenceId to search over")
def addReferenceSetsGetParser(subparsers):
parser = subparsers.add_parser(
"referencesets-get",
description="Get a referenceset",
help="Get a referenceset")
parser.set_defaults(runner=GetReferenceSetRunner)
addGetArguments(parser)
def addReferencesGetParser(subparsers):
parser = subparsers.add_parser(
"references-get",
description="Get a reference",
help="Get a reference")
parser.set_defaults(runner=GetReferenceRunner)
addGetArguments(parser)
def addReadGroupSetsGetParser(subparsers):
parser = subparsers.add_parser(
"readgroupsets-get",
description="Get a read group set",
help="Get a read group set")
parser.set_defaults(runner=GetReadGroupSetRunner)
addGetArguments(parser)
def addReadGroupsGetParser(subparsers):
parser = subparsers.add_parser(
"readgroups-get",
description="Get a read group",
help="Get a read group")
parser.set_defaults(runner=GetReadGroupRunner)
addGetArguments(parser)
def addCallsetsGetParser(subparsers):
parser = subparsers.add_parser(
"callsets-get",
description="Get a callset",
help="Get a callset")
parser.set_defaults(runner=GetCallsetRunner)
addGetArguments(parser)
def addVariantsGetParser(subparsers):
parser = subparsers.add_parser(
"variants-get",
description="Get a variant",
help="Get a variant")
parser.set_defaults(runner=GetVariantRunner)
addGetArguments(parser)
def addReferencesBasesListParser(subparsers):
parser = subparsers.add_parser(
"references-list-bases",
description="List bases of a reference",
help="List bases of a reference")
parser.set_defaults(runner=ListReferenceBasesRunner)
addUrlArgument(parser)
addIdArgument(parser)
addStartArgument(parser)
addEndArgument(parser, defaultValue=None)
def getClientParser():
parser = argparse.ArgumentParser(
description="GA4GH reference client")
addClientGlobalOptions(parser)
subparsers = parser.add_subparsers(title='subcommands',)
addHelpParser(subparsers)
addVariantsSearchParser(subparsers)
addVariantSetsSearchParser(subparsers)
addReferenceSetsSearchParser(subparsers)
addReferencesSearchParser(subparsers)
addReadGroupSetsSearchParser(subparsers)
addCallsetsSearchParser(subparsers)
addReadsSearchParser(subparsers)
addDatasetsSearchParser(subparsers)
addReferenceSetsGetParser(subparsers)
addReferencesGetParser(subparsers)
addReadGroupSetsGetParser(subparsers)
addReadGroupsGetParser(subparsers)
addCallsetsGetParser(subparsers)
addVariantsGetParser(subparsers)
addDatasetsGetParser(subparsers)
addReferencesBasesListParser(subparsers)
return parser
def client_main():
parser = getClientParser()
args = parser.parse_args()
if "runner" not in args:
parser.print_help()
else:
if args.disable_urllib_warnings:
requests.packages.urllib3.disable_warnings()
try:
runner = args.runner(args)
runner.run()
except (exceptions.BaseClientException,
requests.exceptions.RequestException) as exception:
# TODO suppress exception unless debug settings are enabled
raise exception
##############################################################################
# ga2vcf
##############################################################################
class Ga2VcfRunner(SearchVariantsRunner):
"""
Runner class for the ga2vcf
"""
def __init__(self, args):
super(Ga2VcfRunner, self).__init__(args)
self._outputFile = args.outputFile
self._binaryOutput = False
if args.outputFormat == "bcf":
self._binaryOutput = True
def run(self):
variantSet = self._httpClient.getVariantSet(self._variantSetId)
iterator = self._httpClient.searchVariants(
start=self._start, end=self._end,
referenceName=self._referenceName,
variantSetId=self._variantSetId,
callSetIds=self._callSetIds)
# do conversion
vcfConverter = converters.VcfConverter(
variantSet, iterator, self._outputFile, self._binaryOutput)
vcfConverter.convert()
def addOutputFileArgument(parser):
parser.add_argument(
"--outputFile", "-o", default=None,
help="the file to write the output to")
def getGa2VcfParser():
parser = argparse.ArgumentParser(
description=(
"GA4GH VCF conversion tool. Converts variant information "
"stored in a GA4GH repository into VCF format."))
addClientGlobalOptions(parser)
addOutputFileArgument(parser)
addUrlArgument(parser)
parser.add_argument("variantSetId", help="The variant set to convert")
parser.add_argument(
"--outputFormat", "-O", choices=['vcf', 'bcf'], default="vcf",
help=(
"The format for object output. Currently supported are "
"'vcf' (default), which is a text-based format and "
"'bcf', which is the binary equivalent"))
addReferenceNameArgument(parser)
addCallSetIdsArgument(parser)
addStartArgument(parser)
addEndArgument(parser)
addPageSizeArgument(parser)
return parser
def ga2vcf_main():
parser = getGa2VcfParser()
args = parser.parse_args()
if "baseUrl" not in args:
parser.print_help()
else:
runner = Ga2VcfRunner(args)
runner.run()
##############################################################################
# ga2sam
##############################################################################
class Ga2SamRunner(SearchReadsRunner):
"""
Runner class for the ga2vcf
"""
def __init__(self, args):
args.readGroupIds = args.readGroupId
super(Ga2SamRunner, self).__init__(args)
self._outputFile = args.outputFile
self._binaryOutput = False
if args.outputFormat == "bam":
self._binaryOutput = True
def run(self):
readGroup = self._httpClient.getReadGroup(self._readGroupIds[0])
iterator = self._httpClient.searchReads(
readGroupIds=self._readGroupIds, referenceId=self._referenceId,
start=self._start, end=self._end)
# do conversion
samConverter = converters.SamConverter(
readGroup, iterator, self._outputFile, self._binaryOutput)
samConverter.convert()
def getGa2SamParser():
parser = argparse.ArgumentParser(
description="GA4GH SAM conversion tool")
addClientGlobalOptions(parser)
addUrlArgument(parser)
parser.add_argument(
"readGroupId",
help="The ReadGroup to convert to SAM/BAM format.")
addPageSizeArgument(parser)
addStartArgument(parser)
addEndArgument(parser)
parser.add_argument(
"--referenceId", default=None,
help="The referenceId to search over")
parser.add_argument(
"--outputFormat", "-O", default="sam", choices=["sam", "bam"],
help=(
"The format for object output. Currently supported are "
"'sam' (default), which is a text-based format and "
"'bam', which is the binary equivalent"))
addOutputFileArgument(parser)
return parser
def ga2sam_main():
parser = getGa2SamParser()
args = parser.parse_args()
if "baseUrl" not in args:
parser.print_help()
else:
runner = Ga2SamRunner(args)
runner.run()
##############################################################################
# Configuration testing
##############################################################################
class SimplerResult(unittest.TestResult):
"""
The TestResult class gives formatted tracebacks as error messages, which
is not what we want. Instead we just want the error message from the
err praram. Hence this subclass.
"""
def addError(self, test, err):
self.errors.append((test,
"{0}: {1}".format(err[0].__name__, err[1])))
def addFailure(self, test, err):
self.failures.append((test,
"{0}: {1}".format(err[0].__name__, err[1])))
def configtest_main(parser=None):
if parser is None:
parser = argparse.ArgumentParser(
description="GA4GH server configuration validator")
parser.add_argument(
"--config", "-c", default='DevelopmentConfig', type=str,
help="The configuration to use")
parser.add_argument(
"--config-file", "-f", type=str, default=None,
help="The configuration file to use")
args = parser.parse_args()
configStr = 'ga4gh.serverconfig:{0}'.format(args.config)
configtest.TestConfig.configStr = configStr
configtest.TestConfig.configFile = args.config_file
configtest.TestConfig.configEnv = "GA4GH_CONFIGURATION"
loader = unittest.TestLoader()
tests = loader.loadTestsFromModule(configtest)
results = SimplerResult()
tests.run(results)
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
log.info('{0} Tests run. {1} errors, {2} failures, {3} skipped'.
format(results.testsRun,
len(results.errors),
len(results.failures),
len(results.skipped)))
for result in results.errors:
if result is not None:
log.critical('Error: {0}: {1}'.format(result[0].id(), result[1]))
for result in results.failures:
if result is not None:
log.critical('Failure: {0}: {1}'.format(result[0].id(), result[1]))
for result in results.skipped:
if result is not None:
log.info('Skipped: {0}: {1}'.format(result[0].id(), result[1]))
| apache-2.0 | 6,806,398,057,844,227,000 | 30.212909 | 79 | 0.635112 | false |
antonygc/liblightbase | liblightbase/lbdoc/metaclass.py | 1 | 6065 | from liblightbase import lbutils
from liblightbase.lbdoc.metadata import DocumentMetadata
def generate_metaclass(struct, base=None):
"""
Generate document metaclass. The document metaclass
is an abstraction of document model defined by base
structures.
@param struct: Field or Group object.
@param base: Base object or None.
"""
build_metadata = False
if base is None:
base = struct
build_metadata = True
snames = struct.content.__snames__
rnames = struct.content.__rnames__
class MetaClass(object):
"""
Document metaclass. Describes the structures defifined by
document structure model.
"""
# @property __valreq__: Flag used to validate required
# fields or not.
__valreq__ = True
# @property __slots__: reserves space for the declared
# variables and prevents the automatic creation of
# __dict__ and __weakref__ for each instance.
__slots__ = ['_' + sname for sname in snames]
if build_metadata:
__slots__.append('__metadata__')
def __init__(self, **kwargs):
""" Document MetaClass constructor
"""
if self.__valreq__:
lbutils.validate_required(rnames, kwargs)
for arg in kwargs:
setattr(self, arg, kwargs[arg])
for childstruct in struct.content:
structname, prop = generate_property(base, childstruct)
setattr(MetaClass, structname, prop)
if build_metadata:
MetaClass._metadata = build_metadata_prop()
MetaClass.__name__ = struct.metadata.name
return MetaClass
def generate_property(base, struct):
"""
Make python's property based on structure attributes.
@param base: Base object.
@param struct: Field or Group object.
"""
if struct.is_field:
structname = struct.name
elif struct.is_group:
structname = struct.metadata.name
attr_name = '_' + structname
def getter(self):
value = getattr(self, attr_name)
if struct.is_field:
return getattr(value, '__value__')
return value
def setter(self, value):
struct_metaclass = base.metaclass(structname)
if struct.is_field:
value = struct_metaclass(value)
elif struct.is_group:
if struct.metadata.multivalued:
msg = 'object {} should be instance of {}'.format(
struct.metadata.name, list)
assert isinstance(value, list), msg
msg = '{} list elements should be instances of {}'.format(
struct.metadata.name, struct_metaclass)
assertion = all(isinstance(element, struct_metaclass) \
for element in value)
assert assertion, msg
value = generate_multimetaclass(struct,
struct_metaclass)(value)
else:
msg = '{} object should be an instance of {}'.format(
struct.metadata.name, struct_metaclass)
assert isinstance(value, struct_metaclass), msg
setattr(self, attr_name, value)
def deleter(self):
delattr(self, attr_name)
return structname, property(getter,
setter, deleter, structname)
def build_metadata_prop():
def fget(self):
return self.__metadata__
def fset(self, value):
msg = '_metadata attribute should be a DocumentMetadata object.'
assert isinstance(value, DocumentMetadata)
self.__metadata__ = value
def fdel(self):
del self.__metadata__
return property(fget, fset, fdel, '_metadata')
def generate_multimetaclass(struct, struct_metaclass):
"""
Generate metaclass to use with multivalued groups.
@param struct: Field or Group object
@param struct_metaclass: The struct Metaclass
"""
class MultiGroupMetaClass(list):
"""
Multivalued Group Metaclass. Metaclass used to ensure list
elements are instances of right metaclasses.
"""
def __setitem__(self, index, element):
""" x.__setitem__(y, z) <==> x[y] = z
"""
msg = '{} list elements should be instances of {}'.format(
struct.metadata.name, struct_metaclass)
assert isinstance(element, struct_metaclass), msg
return super(MultiGroupMetaClass, self).__setitem__(index,
element)
def append(self, element):
""" L.append(object) -- append object to end
"""
msg = '{} list elements should be instances of {}'.format(
struct.metadata.name, struct_metaclass)
assert isinstance(element, struct_metaclass), msg
return super(MultiGroupMetaClass, self).append(element)
return MultiGroupMetaClass
def generate_field_metaclass(field, base):
"""
Generate field metaclass. The field metaclass
validates incoming value against fields' datatype.
@param field: Field object.
@param base: Base object.
"""
class FieldMetaClass(object):
"""
Field MetaClass. validates incoming
value against fields' datatype.
"""
def __init__(self, value):
self.__value__ = value
def __setattr__(self, obj, value):
validator = field._datatype.__schema__(base, field, 0)
if field.multivalued is True:
msg = 'Expected type list for {}, but found {}'
assert isinstance(value, list), msg.format(
field.name, type(value))
value = [validator(element) for element in value]
else:
value = validator(value)
super(FieldMetaClass, self).__setattr__('__value__', value)
def __getattr__(self, obj):
return super(FieldMetaClass, self).__getattribute__('__value__')
FieldMetaClass.__name__ = field.name
return FieldMetaClass
| gpl-2.0 | 6,355,603,194,399,791,000 | 33.460227 | 76 | 0.588458 | false |
aio-libs/aiozmq | examples/core_dealer_router.py | 1 | 1579 | import asyncio
import aiozmq
import zmq
class ZmqDealerProtocol(aiozmq.ZmqProtocol):
transport = None
def __init__(self, queue, on_close):
self.queue = queue
self.on_close = on_close
def connection_made(self, transport):
self.transport = transport
def msg_received(self, msg):
self.queue.put_nowait(msg)
def connection_lost(self, exc):
self.on_close.set_result(exc)
class ZmqRouterProtocol(aiozmq.ZmqProtocol):
transport = None
def __init__(self, on_close):
self.on_close = on_close
def connection_made(self, transport):
self.transport = transport
def msg_received(self, msg):
self.transport.write(msg)
def connection_lost(self, exc):
self.on_close.set_result(exc)
async def go():
router_closed = asyncio.Future()
dealer_closed = asyncio.Future()
router, _ = await aiozmq.create_zmq_connection(
lambda: ZmqRouterProtocol(router_closed), zmq.ROUTER, bind="tcp://127.0.0.1:*"
)
addr = list(router.bindings())[0]
queue = asyncio.Queue()
dealer, _ = await aiozmq.create_zmq_connection(
lambda: ZmqDealerProtocol(queue, dealer_closed), zmq.DEALER, connect=addr
)
for i in range(10):
msg = (b"data", b"ask", str(i).encode("utf-8"))
dealer.write(msg)
answer = await queue.get()
print(answer)
dealer.close()
await dealer_closed
router.close()
await router_closed
def main():
asyncio.run(go())
print("DONE")
if __name__ == "__main__":
main()
| bsd-2-clause | -5,655,556,457,899,408,000 | 21.239437 | 86 | 0.621279 | false |
pyfa-org/eos | eos/item/mixin/effect_stats/remote_repair.py | 1 | 1829 | # ==============================================================================
# Copyright (C) 2011 Diego Duclos
# Copyright (C) 2011-2018 Anton Vorobyov
#
# This file is part of Eos.
#
# Eos is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Eos is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Eos. If not, see <http://www.gnu.org/licenses/>.
# ==============================================================================
from eos.eve_obj.effect.repairs.base import RemoteArmorRepairEffect
from eos.eve_obj.effect.repairs.base import RemoteShieldRepairEffect
from eos.item.mixin.base import BaseItemMixin
class RemoteRepairMixin(BaseItemMixin):
def __repair_effect_iter(self, effect_class):
for effect in self._type_effects.values():
if not isinstance(effect, effect_class):
continue
if effect.id not in self._running_effect_ids:
continue
yield effect
def get_armor_rps(self, reload=False):
rps = 0
for effect in self.__repair_effect_iter(RemoteArmorRepairEffect):
rps += effect.get_rps(self, reload=reload)
return rps
def get_shield_rps(self, reload=False):
rps = 0
for effect in self.__repair_effect_iter(RemoteShieldRepairEffect):
rps += effect.get_rps(self, reload=reload)
return rps
| lgpl-3.0 | -2,539,301,026,785,657,300 | 37.914894 | 80 | 0.636413 | false |
bollu/polymage | sandbox/apps/python/img_proc/harris/init.py | 1 | 1485 | import sys
import os.path
from PIL import Image
import numpy as np
from arg_parser import parse_args
from printer import print_header, print_usage, print_line
def init_images(app_data):
print("[init.py] : initializing images...")
app_args = app_data['app_args']
# input image:
img_path = app_args.img_file
img = np.array(Image.open(img_path).convert('1'))
rows, cols = img.shape
# convert to float image
IN = np.array(img)
IN = IN.astype(np.float32).ravel()
# final output image
OUT = np.zeros((rows, cols), np.float32).ravel()
img_data = {}
img_data['IN'] = IN
img_data['OUT'] = OUT
app_data['img_data'] = img_data
app_data['rows'] = rows
app_data['cols'] = cols
return
def get_input(app_data):
# parse the command-line arguments
app_args = parse_args()
app_data['app_args'] = app_args
app_data['mode'] = app_args.mode
app_data['runs'] = int(app_args.runs)
app_data['graph_gen'] = bool(app_args.graph_gen)
app_data['timer'] = app_args.timer
# storage optimization
app_data['optimize_storage'] = bool(app_args.optimize_storage)
# early freeing of allocated arrays
app_data['early_free'] = bool(app_args.early_free)
# pool allocate option
app_data['pool_alloc'] = bool(app_args.pool_alloc)
return
def init_all(app_data):
pipe_data = {}
app_data['pipe_data'] = pipe_data
get_input(app_data)
init_images(app_data)
return
| apache-2.0 | -1,343,414,416,860,723,500 | 22.203125 | 66 | 0.630976 | false |
zhanrnl/ag | webapp/models/roomassignment.py | 1 | 2751 | from google.appengine.ext import ndb
from models.grading import SITTING_ROOM_TYPES
from models.team import (
Team,
Contestant,
)
from models.sitting import Sitting
import random
SINGLE_TEST_SITTING = {
'alg' : 'alg,at',
'at' : 'at,calc',
'calc' : 'calc,geo',
'geo' : 'at,geo',
'team' : 'power,team',
'power' : 'power,team'
}
class RoomAssignment(ndb.Model):
testing_id = ndb.StringProperty(required=True)
sitting_nid = ndb.IntegerProperty(required=True)
@classmethod
def assign_team(cls, team_id):
team = Team.get_by_team_id(team_id)
contestants = Contestant.fetch_by_team(team.key)
cls.delete_team_assignments(team.key)
def get_sitting_type(tests):
if len(tests) == 1 and tests[0] != 'gen':
return SINGLE_TEST_SITTING[tests[0]]
return ','.join(sorted(list(tests)))
def select_sitting(sitting_type):
sittings = Sitting.fetch_by_exam(sitting_type)
weights = [s.capacity for s in sittings]
total = sum(weights)
index = random.randint(1, total)
counter = 0
i = 0
while (counter < index):
counter += weights[i]
i += 1
return sittings[i-1]
def assign_to_sitting(testing_id, tests, size):
sitting_type = get_sitting_type(tests)
sitting = select_sitting(sitting_type)
assignment = RoomAssignment(
testing_id=testing_id,
sitting_nid=sitting.nid,
parent=team.key,
)
assignment.put()
if len(team.team_tests) > 0:
assign_to_sitting(str(team_id), team.team_tests, len(contestants))
for c in contestants:
if len(c.tests) == 0: continue
assign_to_sitting(c.contestant_id, c.tests, 1)
@staticmethod
def get_assigned_team_ids():
team_ids = set()
all_room_assignments = RoomAssignment.query().fetch()
for ra in all_room_assignments:
try:
team_id = int(ra.testing_id)
team_ids.add(team_id)
except ValueError as e:
continue
return list(team_ids)
@staticmethod
def delete_all():
ndb.delete_multi(RoomAssignment.query().iter(keys_only=True))
@staticmethod
def delete_team_assignments(team_key):
for a in RoomAssignment.query(ancestor=team_key).fetch():
a.key.delete()
@staticmethod
def fetch_by_team(team_key):
return RoomAssignment.query(ancestor=team_key).fetch()
@staticmethod
def fetch_all():
return RoomAssignment.query().fetch()
| mit | -7,670,656,687,545,846,000 | 27.957895 | 78 | 0.571429 | false |
google/tf-quant-finance | tf_quant_finance/experimental/pricing_platform/framework/market_data/market_data_test.py | 1 | 5816 | # Lint as: python3
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the market data."""
import tensorflow.compat.v2 as tf
import tf_quant_finance as tff
from tensorflow.python.framework import test_util # pylint: disable=g-direct-tensorflow-import
core = tff.experimental.pricing_platform.framework.core
market_data = tff.experimental.pricing_platform.framework.market_data
interpolation_method = tff.experimental.pricing_platform.framework.core.interpolation_method
@test_util.run_all_in_graph_and_eager_modes
class MarketDataTest(tf.test.TestCase):
def setUp(self):
valuation_date = [(2020, 6, 24)]
fixing_dates = [(2020, 2, 24), (2020, 3, 12), (2020, 4, 14), (2020, 5, 21)]
fixing_rates = [0.01, 0.02, 0.03, 0.025]
dates = [[2021, 2, 8], [2022, 2, 8], [2023, 2, 8], [2025, 2, 8],
[2027, 2, 8], [2030, 2, 8], [2050, 2, 8]]
discounts = [0.97197441, 0.94022746, 0.91074031, 0.85495089, 0.8013675,
0.72494879, 0.37602059]
vol_dates = [
[2021, 2, 8], [2022, 2, 8], [2023, 2, 8], [2025, 2, 8], [2027, 2, 8]]
strikes = [[1500, 1550, 1510],
[1500, 1550, 1510],
[1500, 1550, 1510],
[1500, 1550, 1510],
[1500, 1550, 1510]]
volatilities = [[0.1, 0.12, 0.13],
[0.15, 0.2, 0.15],
[0.1, 0.2, 0.1],
[0.1, 0.2, 0.1],
[0.1, 0.1, 0.3]]
risk_free_dates = [
[2021, 2, 8], [2022, 2, 8], [2023, 2, 8], [2025, 2, 8], [2050, 2, 8]]
risk_free_discounts = [
0.97197441, 0.94022746, 0.91074031, 0.85495089, 0.37602059]
self._market_data_dict = {
"rates": {
"USD": {
"risk_free_curve": {
"dates": risk_free_dates, "discounts": risk_free_discounts
},
"OIS": {
"dates": dates, "discounts": discounts
},
"LIBOR_3M": {
"dates": dates,
"discounts": discounts,
"fixing_dates": fixing_dates,
"fixing_rates": fixing_rates,
"fixing_daycount": "ACTUAL_365",
"config": {
"interpolation_method": interpolation_method.
InterpolationMethod.LINEAR
}
},
},
},
"equities": {
"USD": {
"GOOG": {
"spot": 1500,
"volatility_surface": {
"dates": vol_dates,
"strikes": strikes,
"implied_volatilities": volatilities
}
}
}
},
"reference_date": valuation_date,
}
self._libor_discounts = discounts
self._risk_free_discounts = risk_free_discounts
super(MarketDataTest, self).setUp()
def test_discount_curve(self):
market = market_data.MarketDataDict(
self._market_data_dict)
# Get the risk free discount curve
risk_free_curve_type = core.curve_types.RiskFreeCurve(currency="USD")
risk_free_curve = market.yield_curve(risk_free_curve_type)
# Get LIBOR 3M discount
libor_3m = core.rate_indices.RateIndex(type="LIBOR_3M")
rate_index_curve_type = core.curve_types.RateIndexCurve(
currency="USD", index=libor_3m)
libor_3m_curve = market.yield_curve(rate_index_curve_type)
with self.subTest("RiskFree"):
discount_factor_nodes = risk_free_curve.discount_factor_nodes
self.assertAllClose(discount_factor_nodes, self._risk_free_discounts)
with self.subTest("LIBOR_3M"):
discount_factor_nodes = libor_3m_curve.discount_factor_nodes
self.assertAllClose(discount_factor_nodes, self._libor_discounts)
def test_volatility(self):
market = market_data.MarketDataDict(
self._market_data_dict)
# Get volatility surface
vol_surface = market.volatility_surface(currency=["USD", "USD"],
asset=["GOOG", "GOOG"])
expiry = tff.datetime.dates_from_year_month_day(
year=[[2023], [2030]], month=[[5], [10]], day=[[10], [15]])
vols = vol_surface.volatility(expiry_dates=expiry, strike=[[1510], [1520]])
self.assertAllClose(
self.evaluate(vols), [[0.108], [0.31]], atol=1e-6)
def test_fixings(self):
market = market_data.MarketDataDict(
self._market_data_dict)
index_curve_3m = core.curve_types.RateIndexCurve(
"USD", core.rate_indices.RateIndex(type="LIBOR_3M"))
index_curve_ois = core.curve_types.RateIndexCurve(
"USD", core.rate_indices.RateIndex(type="OIS"))
dates = [(2020, 5, 24), (2020, 3, 24)]
with self.subTest("LIBOR_3M"):
fixings, fixings_daycount = market.fixings(dates, index_curve_3m)
self.assertAllClose(
self.evaluate(fixings), [0.025, 0.03], atol=1e-6)
self.assertEqual(fixings_daycount.value, "ACTUAL_365")
with self.subTest("OIS"):
fixings, _ = market.fixings(dates, index_curve_ois)
self.assertAllClose(
self.evaluate(fixings), [0.0, 0.0], atol=1e-6)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 | 2,786,026,955,294,573,600 | 39.388889 | 95 | 0.569464 | false |
Endika/mitmproxy | libmproxy/contentviews.py | 1 | 16688 | """
Mitmproxy Content Views
=======================
mitmproxy includes a set of content views which can be used to format/decode/highlight data.
While they are currently used for HTTP message bodies only, the may be used in other contexts
in the future, e.g. to decode protobuf messages sent as WebSocket frames.
Thus, the View API is very minimalistic. The only arguments are `data` and `**metadata`,
where `data` is the actual content (as bytes). The contents on metadata depend on the protocol in
use. For HTTP, the message headers are passed as the ``headers`` keyword argument.
"""
from __future__ import (absolute_import, print_function, division)
import cStringIO
import json
import logging
import subprocess
import sys
import lxml.html
import lxml.etree
import datetime
from PIL import Image
from PIL.ExifTags import TAGS
import html2text
import six
from netlib.odict import ODict
from netlib import encoding
from netlib.utils import clean_bin, hexdump, urldecode, multipartdecode, parse_content_type
from . import utils
from .exceptions import ContentViewException
from .contrib import jsbeautifier
from .contrib.wbxml.ASCommandResponse import ASCommandResponse
try:
import pyamf
from pyamf import remoting, flex
except ImportError: # pragma nocover
pyamf = None
try:
import cssutils
except ImportError: # pragma nocover
cssutils = None
else:
cssutils.log.setLevel(logging.CRITICAL)
cssutils.ser.prefs.keepComments = True
cssutils.ser.prefs.omitLastSemicolon = False
cssutils.ser.prefs.indentClosingBrace = False
cssutils.ser.prefs.validOnly = False
# Default view cutoff *in lines*
VIEW_CUTOFF = 512
KEY_MAX = 30
def format_dict(d):
"""
Helper function that transforms the given dictionary into a list of
("key", key )
("value", value)
tuples, where key is padded to a uniform width.
"""
max_key_len = max(len(k) for k in d.keys())
max_key_len = min(max_key_len, KEY_MAX)
for key, value in d.items():
key += ":"
key = key.ljust(max_key_len + 2)
yield [
("header", key),
("text", value)
]
def format_text(text):
"""
Helper function that transforms bytes into the view output format.
"""
for line in text.splitlines():
yield [("text", line)]
class View(object):
name = None
prompt = ()
content_types = []
def __call__(self, data, **metadata):
"""
Transform raw data into human-readable output.
Args:
data: the data to decode/format as bytes.
metadata: optional keyword-only arguments for metadata. Implementations must not
rely on a given argument being present.
Returns:
A (description, content generator) tuple.
The content generator yields lists of (style, text) tuples, where each list represents
a single line. ``text`` is a unfiltered byte string which may need to be escaped,
depending on the used output.
Caveats:
The content generator must not yield tuples of tuples,
because urwid cannot process that. You have to yield a *list* of tuples per line.
"""
raise NotImplementedError()
class ViewAuto(View):
name = "Auto"
prompt = ("auto", "a")
content_types = []
def __call__(self, data, **metadata):
headers = metadata.get("headers", {})
ctype = headers.get("content-type")
if ctype:
ct = parse_content_type(ctype) if ctype else None
ct = "%s/%s" % (ct[0], ct[1])
if ct in content_types_map:
return content_types_map[ct][0](data, **metadata)
elif utils.isXML(data):
return get("XML")(data, **metadata)
if utils.isMostlyBin(data):
return get("Hex")(data)
return get("Raw")(data)
class ViewRaw(View):
name = "Raw"
prompt = ("raw", "r")
content_types = []
def __call__(self, data, **metadata):
return "Raw", format_text(data)
class ViewHex(View):
name = "Hex"
prompt = ("hex", "e")
content_types = []
@staticmethod
def _format(data):
for offset, hexa, s in hexdump(data):
yield [
("offset", offset + " "),
("text", hexa + " "),
("text", s)
]
def __call__(self, data, **metadata):
return "Hex", self._format(data)
class ViewXML(View):
name = "XML"
prompt = ("xml", "x")
content_types = ["text/xml"]
def __call__(self, data, **metadata):
parser = lxml.etree.XMLParser(
remove_blank_text=True,
resolve_entities=False,
strip_cdata=False,
recover=False
)
try:
document = lxml.etree.fromstring(data, parser)
except lxml.etree.XMLSyntaxError:
return None
docinfo = document.getroottree().docinfo
prev = []
p = document.getroottree().getroot().getprevious()
while p is not None:
prev.insert(
0,
lxml.etree.tostring(p)
)
p = p.getprevious()
doctype = docinfo.doctype
if prev:
doctype += "\n".join(prev).strip()
doctype = doctype.strip()
s = lxml.etree.tostring(
document,
pretty_print=True,
xml_declaration=True,
doctype=doctype or None,
encoding=docinfo.encoding
)
return "XML-like data", format_text(s)
class ViewJSON(View):
name = "JSON"
prompt = ("json", "s")
content_types = ["application/json"]
def __call__(self, data, **metadata):
pretty_json = utils.pretty_json(data)
if pretty_json:
return "JSON", format_text(pretty_json)
class ViewHTML(View):
name = "HTML"
prompt = ("html", "h")
content_types = ["text/html"]
def __call__(self, data, **metadata):
if utils.isXML(data):
parser = lxml.etree.HTMLParser(
strip_cdata=True,
remove_blank_text=True
)
d = lxml.html.fromstring(data, parser=parser)
docinfo = d.getroottree().docinfo
s = lxml.etree.tostring(
d,
pretty_print=True,
doctype=docinfo.doctype,
encoding='utf8'
)
return "HTML", format_text(s)
class ViewHTMLOutline(View):
name = "HTML Outline"
prompt = ("html outline", "o")
content_types = ["text/html"]
def __call__(self, data, **metadata):
data = data.decode("utf-8")
h = html2text.HTML2Text(baseurl="")
h.ignore_images = True
h.body_width = 0
outline = h.handle(data)
return "HTML Outline", format_text(outline)
class ViewURLEncoded(View):
name = "URL-encoded"
prompt = ("urlencoded", "u")
content_types = ["application/x-www-form-urlencoded"]
def __call__(self, data, **metadata):
d = urldecode(data)
return "URLEncoded form", format_dict(ODict(d))
class ViewMultipart(View):
name = "Multipart Form"
prompt = ("multipart", "m")
content_types = ["multipart/form-data"]
@staticmethod
def _format(v):
yield [("highlight", "Form data:\n")]
for message in format_dict(ODict(v)):
yield message
def __call__(self, data, **metadata):
headers = metadata.get("headers", {})
v = multipartdecode(headers, data)
if v:
return "Multipart form", self._format(v)
if pyamf:
class DummyObject(dict):
def __init__(self, alias):
dict.__init__(self)
def __readamf__(self, input):
data = input.readObject()
self["data"] = data
def pyamf_class_loader(s):
for i in pyamf.CLASS_LOADERS:
if i != pyamf_class_loader:
v = i(s)
if v:
return v
return DummyObject
pyamf.register_class_loader(pyamf_class_loader)
class ViewAMF(View):
name = "AMF"
prompt = ("amf", "f")
content_types = ["application/x-amf"]
def unpack(self, b, seen=set([])):
if hasattr(b, "body"):
return self.unpack(b.body, seen)
if isinstance(b, DummyObject):
if id(b) in seen:
return "<recursion>"
else:
seen.add(id(b))
for k, v in b.items():
b[k] = self.unpack(v, seen)
return b
elif isinstance(b, dict):
for k, v in b.items():
b[k] = self.unpack(v, seen)
return b
elif isinstance(b, list):
return [self.unpack(i) for i in b]
elif isinstance(b, datetime.datetime):
return str(b)
elif isinstance(b, flex.ArrayCollection):
return [self.unpack(i, seen) for i in b]
else:
return b
def _format(self, envelope):
for target, message in iter(envelope):
if isinstance(message, pyamf.remoting.Request):
yield [
("header", "Request: "),
("text", str(target)),
]
else:
yield [
("header", "Response: "),
("text", "%s, code %s" % (target, message.status)),
]
s = json.dumps(self.unpack(message), indent=4)
for msg in format_text(s):
yield msg
def __call__(self, data, **metadata):
envelope = remoting.decode(data, strict=False)
if envelope:
return "AMF v%s" % envelope.amfVersion, self._format(envelope)
class ViewJavaScript(View):
name = "JavaScript"
prompt = ("javascript", "j")
content_types = [
"application/x-javascript",
"application/javascript",
"text/javascript"
]
def __call__(self, data, **metadata):
opts = jsbeautifier.default_options()
opts.indent_size = 2
res = jsbeautifier.beautify(data, opts)
return "JavaScript", format_text(res)
class ViewCSS(View):
name = "CSS"
prompt = ("css", "c")
content_types = [
"text/css"
]
def __call__(self, data, **metadata):
if cssutils:
sheet = cssutils.parseString(data)
beautified = sheet.cssText
else:
beautified = data
return "CSS", format_text(beautified)
class ViewImage(View):
name = "Image"
prompt = ("image", "i")
content_types = [
"image/png",
"image/jpeg",
"image/gif",
"image/vnd.microsoft.icon",
"image/x-icon",
]
def __call__(self, data, **metadata):
try:
img = Image.open(cStringIO.StringIO(data))
except IOError:
return None
parts = [
("Format", str(img.format_description)),
("Size", "%s x %s px" % img.size),
("Mode", str(img.mode)),
]
for i in sorted(img.info.keys()):
if i != "exif":
parts.append(
(str(i), str(img.info[i]))
)
if hasattr(img, "_getexif"):
ex = img._getexif()
if ex:
for i in sorted(ex.keys()):
tag = TAGS.get(i, i)
parts.append(
(str(tag), str(ex[i]))
)
fmt = format_dict(ODict(parts))
return "%s image" % img.format, fmt
class ViewProtobuf(View):
"""Human friendly view of protocol buffers
The view uses the protoc compiler to decode the binary
"""
name = "Protocol Buffer"
prompt = ("protobuf", "p")
content_types = [
"application/x-protobuf",
"application/x-protobuffer",
]
@staticmethod
def is_available():
try:
p = subprocess.Popen(
["protoc", "--version"],
stdout=subprocess.PIPE
)
out, _ = p.communicate()
return out.startswith("libprotoc")
except:
return False
def decode_protobuf(self, content):
# if Popen raises OSError, it will be caught in
# get_content_view and fall back to Raw
p = subprocess.Popen(['protoc', '--decode_raw'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
out, err = p.communicate(input=content)
if out:
return out
else:
return err
def __call__(self, data, **metadata):
decoded = self.decode_protobuf(data)
return "Protobuf", format_text(decoded)
class ViewWBXML(View):
name = "WBXML"
prompt = ("wbxml", "w")
content_types = [
"application/vnd.wap.wbxml",
"application/vnd.ms-sync.wbxml"
]
def __call__(self, data, **metadata):
try:
parser = ASCommandResponse(data)
parsedContent = parser.xmlString
if parsedContent:
return "WBXML", format_text(parsedContent)
except:
return None
views = []
content_types_map = {}
view_prompts = []
def get(name):
for i in views:
if i.name == name:
return i
def get_by_shortcut(c):
for i in views:
if i.prompt[1] == c:
return i
def add(view):
# TODO: auto-select a different name (append an integer?)
for i in views:
if i.name == view.name:
raise ContentViewException("Duplicate view: " + view.name)
# TODO: the UI should auto-prompt for a replacement shortcut
for prompt in view_prompts:
if prompt[1] == view.prompt[1]:
raise ContentViewException("Duplicate view shortcut: " + view.prompt[1])
views.append(view)
for ct in view.content_types:
l = content_types_map.setdefault(ct, [])
l.append(view)
view_prompts.append(view.prompt)
def remove(view):
for ct in view.content_types:
l = content_types_map.setdefault(ct, [])
l.remove(view)
if not len(l):
del content_types_map[ct]
view_prompts.remove(view.prompt)
views.remove(view)
add(ViewAuto())
add(ViewRaw())
add(ViewHex())
add(ViewJSON())
add(ViewXML())
add(ViewWBXML())
add(ViewHTML())
add(ViewHTMLOutline())
add(ViewJavaScript())
add(ViewCSS())
add(ViewURLEncoded())
add(ViewMultipart())
add(ViewImage())
if pyamf:
add(ViewAMF())
if ViewProtobuf.is_available():
add(ViewProtobuf())
def safe_to_print(lines, encoding="utf8"):
"""
Wraps a content generator so that each text portion is a *safe to print* unicode string.
"""
for line in lines:
clean_line = []
for (style, text) in line:
try:
text = clean_bin(text.decode(encoding, "strict"))
except UnicodeDecodeError:
text = clean_bin(text).decode(encoding, "strict")
clean_line.append((style, text))
yield clean_line
def get_content_view(viewmode, data, **metadata):
"""
Args:
viewmode: the view to use.
data, **metadata: arguments passed to View instance.
Returns:
A (description, content generator) tuple.
In contrast to calling the views directly, text is always safe-to-print unicode.
Raises:
ContentViewException, if the content view threw an error.
"""
if not data:
return "No content", []
msg = []
headers = metadata.get("headers", {})
enc = headers.get("content-encoding")
if enc and enc != "identity":
decoded = encoding.decode(enc, data)
if decoded:
data = decoded
msg.append("[decoded %s]" % enc)
try:
ret = viewmode(data, **metadata)
# Third-party viewers can fail in unexpected ways...
except Exception as e:
six.reraise(
ContentViewException,
ContentViewException(str(e)),
sys.exc_info()[2]
)
if not ret:
ret = get("Raw")(data, **metadata)
msg.append("Couldn't parse: falling back to Raw")
else:
msg.append(ret[0])
return " ".join(msg), safe_to_print(ret[1])
| mit | 1,470,349,869,913,732,900 | 26.583471 | 98 | 0.54704 | false |
armando-migliaccio/tempest | tempest/api/compute/images/test_images_oneserver_negative.py | 1 | 6602 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack Foundation
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute import base
from tempest import clients
from tempest.common.utils import data_utils
from tempest import exceptions
from tempest.openstack.common import log as logging
from tempest.test import attr
from tempest.test import skip_because
LOG = logging.getLogger(__name__)
class ImagesOneServerNegativeTestJSON(base.BaseV2ComputeTest):
_interface = 'json'
def tearDown(self):
"""Terminate test instances created after a test is executed."""
for image_id in self.image_ids:
self.client.delete_image(image_id)
self.image_ids.remove(image_id)
super(ImagesOneServerNegativeTestJSON, self).tearDown()
def setUp(self):
# NOTE(afazekas): Normally we use the same server with all test cases,
# but if it has an issue, we build a new one
super(ImagesOneServerNegativeTestJSON, self).setUp()
# Check if the server is in a clean state after test
try:
self.servers_client.wait_for_server_status(self.server_id,
'ACTIVE')
except Exception as exc:
LOG.exception(exc)
# Rebuild server if cannot reach the ACTIVE state
# Usually it means the server had a serius accident
self._reset_server()
def _reset_server(self):
self.__class__.server_id = self.rebuild_server(self.server_id)
@classmethod
def setUpClass(cls):
super(ImagesOneServerNegativeTestJSON, cls).setUpClass()
cls.client = cls.images_client
if not cls.config.service_available.glance:
skip_msg = ("%s skipped as glance is not available" % cls.__name__)
raise cls.skipException(skip_msg)
try:
resp, server = cls.create_test_server(wait_until='ACTIVE')
cls.server_id = server['id']
except Exception:
cls.tearDownClass()
raise
cls.image_ids = []
if cls.multi_user:
if cls.config.compute.allow_tenant_isolation:
creds = cls.isolated_creds.get_alt_creds()
username, tenant_name, password = creds
cls.alt_manager = clients.Manager(username=username,
password=password,
tenant_name=tenant_name)
else:
# Use the alt_XXX credentials in the config file
cls.alt_manager = clients.AltManager()
cls.alt_client = cls.alt_manager.images_client
@skip_because(bug="1006725")
@attr(type=['negative', 'gate'])
def test_create_image_specify_multibyte_character_image_name(self):
# Return an error if the image name has multi-byte characters
snapshot_name = data_utils.rand_name('\xef\xbb\xbf')
self.assertRaises(exceptions.BadRequest,
self.client.create_image, self.server_id,
snapshot_name)
@attr(type=['negative', 'gate'])
def test_create_image_specify_invalid_metadata(self):
# Return an error when creating image with invalid metadata
snapshot_name = data_utils.rand_name('test-snap-')
meta = {'': ''}
self.assertRaises(exceptions.BadRequest, self.client.create_image,
self.server_id, snapshot_name, meta)
@attr(type=['negative', 'gate'])
def test_create_image_specify_metadata_over_limits(self):
# Return an error when creating image with meta data over 256 chars
snapshot_name = data_utils.rand_name('test-snap-')
meta = {'a' * 260: 'b' * 260}
self.assertRaises(exceptions.BadRequest, self.client.create_image,
self.server_id, snapshot_name, meta)
@attr(type=['negative', 'gate'])
def test_create_second_image_when_first_image_is_being_saved(self):
# Disallow creating another image when first image is being saved
# Create first snapshot
snapshot_name = data_utils.rand_name('test-snap-')
resp, body = self.client.create_image(self.server_id,
snapshot_name)
self.assertEqual(202, resp.status)
image_id = data_utils.parse_image_id(resp['location'])
self.image_ids.append(image_id)
self.addCleanup(self._reset_server)
# Create second snapshot
alt_snapshot_name = data_utils.rand_name('test-snap-')
self.assertRaises(exceptions.Conflict, self.client.create_image,
self.server_id, alt_snapshot_name)
@attr(type=['negative', 'gate'])
def test_create_image_specify_name_over_256_chars(self):
# Return an error if snapshot name over 256 characters is passed
snapshot_name = data_utils.rand_name('a' * 260)
self.assertRaises(exceptions.BadRequest, self.client.create_image,
self.server_id, snapshot_name)
@attr(type=['negative', 'gate'])
def test_delete_image_that_is_not_yet_active(self):
# Return an error while trying to delete an image what is creating
snapshot_name = data_utils.rand_name('test-snap-')
resp, body = self.client.create_image(self.server_id, snapshot_name)
self.assertEqual(202, resp.status)
image_id = data_utils.parse_image_id(resp['location'])
self.image_ids.append(image_id)
self.addCleanup(self._reset_server)
# Do not wait, attempt to delete the image, ensure it's successful
resp, body = self.client.delete_image(image_id)
self.assertEqual('204', resp['status'])
self.image_ids.remove(image_id)
self.assertRaises(exceptions.NotFound, self.client.get_image, image_id)
class ImagesOneServerNegativeTestXML(ImagesOneServerNegativeTestJSON):
_interface = 'xml'
| apache-2.0 | 426,124,462,674,475,700 | 41.050955 | 79 | 0.63193 | false |
dropbox/changes | changes/listeners/mail.py | 1 | 8772 | from __future__ import absolute_import, print_function
from itertools import imap
import logging
import toronado
from email.utils import parseaddr
from flask import current_app, render_template
from flask_mail import Message, sanitize_address
from jinja2 import Markup
from typing import List # NOQA
from changes.config import db, mail
from changes.constants import Result, Status
from changes.db.utils import try_create
from changes.lib import build_context_lib, build_type
from changes.lib.build_context_lib import CollectionContext # NOQA
from changes.models.event import Event, EventType
from changes.models.build import Build
from changes.models.job import Job
from changes.models.jobplan import JobPlan
from changes.models.project import ProjectOption
def filter_recipients(email_list, domain_whitelist=None):
"""
Returns emails from email_list that have been white-listed by
domain_whitelist.
"""
if domain_whitelist is None:
domain_whitelist = current_app.config['MAIL_DOMAIN_WHITELIST']
if not domain_whitelist:
return email_list
return [
e for e in email_list
if parseaddr(e)[1].split('@', 1)[-1] in domain_whitelist
]
class MailNotificationHandler(object):
logger = logging.getLogger('mail')
def send(self, msg, build):
msg.recipients = filter_recipients(msg.recipients)
if not msg.recipients:
self.logger.info(
'Exiting for collection_id={} because its message has no '
'recipients.'.format(build.collection_id))
return
event = try_create(Event, where={
'type': EventType.email,
'item_id': build.collection_id,
'data': {
'triggering_build_id': build.id.hex,
'recipients': msg.recipients,
}
})
# If we were unable to create the Event, we must've done so (and thus sent the mail) already.
if not event:
self.logger.warning('An email has already been sent for collection_id=%s, (build_id=%s).',
build.collection_id, build.id.hex)
return
mail.send(msg)
def get_msg(self, builds):
# type: (List[Build]) -> Message
context = build_context_lib.get_collection_context(builds) # type: CollectionContext
if context.result == Result.passed:
return None
max_shown = current_app.config.get('MAX_SHOWN_ITEMS_PER_BUILD_MAIL', 3)
context_dict = context._asdict()
context_dict.update({
'MAX_SHOWN_ITEMS_PER_BUILD': max_shown,
'showing_failing_tests_count':
sum([min(b['failing_tests_count'], max_shown) for b in context.builds])
})
recipients = self.get_collection_recipients(context)
msg = Message(context.title, recipients=recipients, extra_headers={
'Reply-To': ', '.join(sanitize_address(r) for r in recipients),
})
msg.body = render_template('listeners/mail/notification.txt', **context_dict)
msg.html = Markup(toronado.from_string(
render_template('listeners/mail/notification.html', **context_dict)
))
return msg
def get_collection_recipients(self, collection_context):
# type: (CollectionContext) -> List[unicode]
"""
Returns a list of recipients for a collection context created by
get_collection_context. Only recipients for failing builds will be
returned.
"""
recipient_lists = map(
lambda build_context: self.get_build_recipients(build_context['build']),
collection_context.builds)
return list(set([r for rs in recipient_lists for r in rs]))
def get_build_recipients(self, build):
# type: (Build) -> List[unicode]
"""
Returns a list of recipients for a build.
The build author is included unless the build and all failing jobs
have turned off the mail.notify-author option.
Successful builds will return the empty list.
Recipients are also collected from each failing job's
mail.notify-addresses and mail.notify-addresses-revisions options.
Should there be no failing jobs (is that possible?), recipients are
collected from the build's own mail.notify-addresses and
mail.notify-addresses-revisions options.
"""
if build.result == Result.passed:
return []
recipients = []
options = self.get_build_options(build)
if options['mail.notify-author']:
author = build.author
if author:
recipients.append(u'%s <%s>' % (author.name, author.email))
recipients.extend(options['mail.notify-addresses'])
if build_type.is_initial_commit_build(build):
recipients.extend(options['mail.notify-addresses-revisions'])
return recipients
def get_build_options(self, build):
"""
Returns a build's mail options as a
{
'mail.notify-author': bool,
'mail.notify-addresses': set,
'mail.notify-addresses-revisions': set,
} dict.
The 'mail.notify-author' option is True unless the build and all
failing jobs have turned off the mail.notify-author option.
The mail.notify-addresses and mail.notify-addresses-revisions options
respectively are sets of email addresses constructed by merging the
corresponding options of all failing jobs. Note that the build's
options are used as defaults when constructing the options for
each job, so that the job options override the build options.
Finally, the build's own options are used if there are no failing jobs.
"""
default_options = {
'mail.notify-author': '1',
'mail.notify-addresses': '',
'mail.notify-addresses-revisions': '',
}
build_options = dict(
default_options,
**dict(db.session.query(
ProjectOption.name, ProjectOption.value
).filter(
ProjectOption.project_id == build.project_id,
ProjectOption.name.in_(default_options.keys()),
))
)
# Get options for all failing jobs.
jobs_options = []
for job in list(Job.query.filter(Job.build_id == build.id)):
if job.result != Result.passed:
jobs_options.append(dict(
build_options, **self.get_job_options(job)))
# Merge all options.
# Fallback to build options in case there are no failing jobs.
all_options = jobs_options or [build_options]
merged_options = {
# Notify the author unless all jobs and the build have turned the
# notify-author option off.
'mail.notify-author': any(
imap(
lambda options: options.get('mail.notify-author') == '1',
all_options,
),
),
'mail.notify-addresses': set(),
'mail.notify-addresses-revisions': set(),
}
recipient_keys = ['mail.notify-addresses', 'mail.notify-addresses-revisions']
for options in all_options:
for key in recipient_keys:
# XXX(dcramer): we dont have option validators so lets assume
# people enter slightly incorrect values
merged_options[key] |= set(
[x.strip() for x in options[key].split(',') if x.strip()]
)
return merged_options
def get_job_options(self, job):
jobplan = JobPlan.query.filter(
JobPlan.job_id == job.id,
).first()
options = {}
if jobplan and 'snapshot' in jobplan.data:
options = jobplan.data['snapshot']['options']
return options
def build_finished_handler(build_id, *args, **kwargs):
build = Build.query.get(build_id)
if not build:
return
if not build.collection_id:
# If there isn't a collection_id, assume the build stands alone.
# All builds should probably have collection_id set.
builds = [build]
else:
builds = list(
Build.query.filter(Build.collection_id == build.collection_id))
# Exit if there are no builds for the given build_id, or any build hasn't
# finished.
if not builds or any(map(lambda build: build.status != Status.finished, builds)):
return
notification_handler = MailNotificationHandler()
msg = notification_handler.get_msg(builds)
if msg is not None:
notification_handler.send(msg, build)
| apache-2.0 | 7,238,504,638,627,023,000 | 35.39834 | 102 | 0.614683 | false |
noiselabs/box-linux-sync | src/noiselabs/box/pms/apt.py | 1 | 1248 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of box-linux-sync.
#
# Copyright (C) 2013 Vítor Brandão <[email protected]>
#
# box-linux-sync is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your option)
# any later version.
#
# box-linux-sync is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with box-linux-sync; if not, see
# <http://www.gnu.org/licenses/>.
from noiselabs.box.pms.pms import BasePMS
class APT(BasePMS):
"""The Advanced Packaging Tool used in the Debian family of Linux operating
systems (Ubuntu included)."""
def __str__(self):
return 'APT'
def search(self, pkg):
return "apt-cache search %s" % pkg
def install(self, pkg):
return "apt-get install %s" % pkg
def remove(self, pkg):
return "apt-get remove %s" % pkg
| lgpl-3.0 | 4,470,295,722,904,715,000 | 31.789474 | 80 | 0.701445 | false |
pidydx/grr | grr/lib/flows/general/audit.py | 1 | 2003 | #!/usr/bin/env python
"""This implements the auditing system.
How does it work?
Noteworthy events within the GRR system (such as approval granting, flow
execution etc) generate events to notify listeners about the event.
The audit system consists of a group of event listeners which receive these
events and act upon them.
"""
from grr.lib import aff4
from grr.lib import events
from grr.lib import flow
from grr.lib import queues
from grr.lib import rdfvalue
from grr.lib import sequential_collection
AUDIT_EVENT = "Audit"
class AuditEventCollection(sequential_collection.IndexedSequentialCollection):
RDF_TYPE = events.AuditEvent
def AllAuditLogs(token=None):
# TODO(user): This is not great, we should store this differently.
for log in aff4.FACTORY.Open("aff4:/audit/logs", token=token).ListChildren():
yield AuditEventCollection(log, token=token)
def AuditLogsForTimespan(start_time, end_time, token=None):
# TODO(user): This is not great, we should store this differently.
for log in aff4.FACTORY.Open(
"aff4:/audit/logs", token=token).ListChildren(age=(start_time, end_time)):
yield AuditEventCollection(log, token=token)
class AuditEventListener(flow.EventListener):
"""Receive the audit events."""
well_known_session_id = rdfvalue.SessionID(
base="aff4:/audit", queue=queues.FLOWS, flow_name="listener")
EVENTS = [AUDIT_EVENT]
created_logs = set()
def EnsureLogIsIndexed(self, log_urn):
if log_urn not in self.created_logs:
# Just write any type to the aff4 space so we can determine
# which audit logs exist easily.
aff4.FACTORY.Create(
log_urn, aff4.AFF4Volume, mode="w", token=self.token).Close()
self.created_logs.add(log_urn)
return log_urn
@flow.EventHandler(auth_required=False)
def ProcessMessage(self, message=None, event=None):
_ = message
log_urn = aff4.CurrentAuditLog()
self.EnsureLogIsIndexed(log_urn)
AuditEventCollection.StaticAdd(log_urn, self.token, event)
| apache-2.0 | 4,533,730,079,903,174,000 | 30.793651 | 80 | 0.736895 | false |
gatagat/lapjv | bench/overview_sparse.py | 1 | 2203 | from pytest import mark
from joblib import Memory
import numpy as np
from lap import lapjv, lapmod
from lap.lapmod import get_cost
try:
from lap_old import lapjv as lapjv_old
except ImportError:
print(
'''If you get here, you do not have the old lapjv to compare to.
git clone [email protected]:gatagat/lapjv.git lapjv-old
cd lapjv-old
git checkout old
python setup.py build_ext -i
mv lapjv lapjv_old
And run the benchmark:
LAPJV_OLD=lapjv-old bench.sh
''')
lapjv_old = None
from centrosome.lapjv import lapjv as lapjv_centrosome
from lap.tests.test_utils import (
sparse_from_masked,
sparse_from_masked_CS,
get_sparse_int,
get_platform_maxint
)
cachedir = '/tmp/lapjv-cache'
memory = Memory(cachedir=cachedir, verbose=1)
@memory.cache
def get_data(seed):
cost, mask = get_sparse_int(5000, 1000, 0.01, hard=False, seed=seed)
cost_ = cost.copy()
cost_[~mask] = get_platform_maxint()
opt = lapjv(cost_)[0]
return cost, mask, opt
seeds = [1299821, 15485867, 32452867, 49979693]
def _get_cost_CS(cost, x):
return cost[np.arange(cost.shape[0]), x].sum()
@mark.parametrize('seed', seeds)
def test_CSCY(benchmark, seed):
cost, mask, opt = get_data(seed)
i, j, cc = sparse_from_masked_CS(cost, mask)
ret = benchmark(lapjv_centrosome, i, j, cc)
assert _get_cost_CS(cost, ret[0]) == opt
if lapjv_old is not None:
@mark.parametrize('seed', seeds)
def test_JV_old(benchmark, seed):
cost, mask, opt = get_data(seed)
cost[~mask] = get_platform_maxint()
ret = benchmark(lapjv_old, cost)
assert ret[0] == opt
@mark.parametrize('seed', seeds)
def test_JV(benchmark, seed):
cost, mask, opt = get_data(seed)
cost[~mask] = get_platform_maxint()
ret = benchmark(lapjv, cost)
assert ret[0] == opt
@mark.parametrize('seed', seeds)
def test_MOD_c(benchmark, seed):
cost, mask, opt = get_data(seed)
n, cc, ii, kk = sparse_from_masked(cost, mask)
ret = benchmark(lapmod, n, cc, ii, kk, fast=True, return_cost=False)
assert get_cost(n, cc, ii, kk, ret[0]) == opt
| bsd-2-clause | -4,014,113,899,185,928,000 | 26.886076 | 74 | 0.634135 | false |
montyly/manticore | tests/ethereum/EVM/test_EVMEXP.py | 1 | 79538 | import struct
import unittest
import json
from manticore.platforms import evm
from manticore.core import state
from manticore.core.smtlib import Operators, ConstraintSet
import os
class EVMTest_EXP(unittest.TestCase):
_multiprocess_can_split_ = True
maxDiff = None
def _execute(self, new_vm):
last_returned = None
last_exception = None
try:
new_vm.execute()
except evm.Stop as e:
last_exception = "STOP"
except evm.NotEnoughGas:
last_exception = "OOG"
except evm.StackUnderflow:
last_exception = "INSUFFICIENT STACK"
except evm.InvalidOpcode:
last_exception = "INVALID"
except evm.SelfDestruct:
last_exception = "SUICIDED"
except evm.Return as e:
last_exception = "RETURN"
last_returned = e.data
except evm.Revert:
last_exception = "REVERT"
return last_exception, last_returned
def test_EXP_1(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(
new_vm.stack,
[115792089237316195423570985008687907853269984665640564039457584007913129639935],
)
def test_EXP_2(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_3(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_4(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_5(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(
new_vm.stack,
[104454113832828984621679659393253883542637298667129925477260695573804969029359],
)
def test_EXP_6(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_7(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_8(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_9(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
new_vm._push(6089590155545428825848686802984512581899718912)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_10(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(0)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_11(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(0)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_12(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(0)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_13(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(0)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_14(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(0)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_15(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(0)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_16(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(0)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_17(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(0)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_18(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(0)
new_vm._push(6089590155545428825848686802984512581899718912)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_19(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(1)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(
new_vm.stack,
[115792089237316195423570985008687907853269984665640564039457584007913129639935],
)
def test_EXP_20(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(1)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_21(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(1)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_22(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(1)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(
new_vm.stack,
[57896044618658097711785492504343953926634992332820282019728792003956564819952],
)
def test_EXP_23(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(1)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(
new_vm.stack,
[3618502788666131106986593281521497120414687020801267626233049500247285301263],
)
def test_EXP_24(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(1)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [16])
def test_EXP_25(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(1)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [32])
def test_EXP_26(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(1)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [48])
def test_EXP_27(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(1)
new_vm._push(6089590155545428825848686802984512581899718912)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [6089590155545428825848686802984512581899718912])
def test_EXP_28(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_29(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_30(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_31(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_32(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(
new_vm.stack,
[97153515582439856940218076430383148080316642374323115531717460774015781538049],
)
def test_EXP_33(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_34(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_35(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_36(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
new_vm._push(6089590155545428825848686802984512581899718912)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_37(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(
new_vm.stack,
[115792089237316195423570985008687907853269984665640564039457584007913129639935],
)
def test_EXP_38(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_39(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_40(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_41(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(
new_vm.stack,
[112173586448650064316584391727166410732855297644839296413224972401556225198063],
)
def test_EXP_42(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_43(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_44(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_45(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
new_vm._push(6089590155545428825848686802984512581899718912)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_46(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(16)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_47(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(16)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_48(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(16)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_49(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(16)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [18446744073709551616])
def test_EXP_50(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(16)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(
new_vm.stack,
[57896044618658097711785492504343953926634992332820282019735360412312277710593],
)
def test_EXP_51(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(16)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [18446744073709551616])
def test_EXP_52(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(16)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1208925819614629174706176])
def test_EXP_53(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(16)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [794071845499378503449051136])
def test_EXP_54(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(16)
new_vm._push(6089590155545428825848686802984512581899718912)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(
new_vm.stack,
[8303694420805298775410959586403913600201715917447438497573206049841934761984],
)
def test_EXP_55(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(32)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_56(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(32)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_57(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(32)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_58(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(32)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [340282366920938463463374607431768211456])
def test_EXP_59(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(32)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [43143988327398919500410556793212890625])
def test_EXP_60(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(32)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [340282366920938463463374607431768211456])
def test_EXP_61(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(32)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1461501637330902918203684832716283019655932542976])
def test_EXP_62(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(32)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [630550095814788844423632687832745817333905738742890496])
def test_EXP_63(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(32)
new_vm._push(6089590155545428825848686802984512581899718912)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_64(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(48)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_65(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(48)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_66(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(48)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_67(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(48)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [6277101735386680763835789423207666416102355444464034512896])
def test_EXP_68(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(48)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(
new_vm.stack,
[57896044618658097712068879837772420409703173580337995947392654709187277710593],
)
def test_EXP_69(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(48)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [6277101735386680763835789423207666416102355444464034512896])
def test_EXP_70(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(48)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(
new_vm.stack,
[1766847064778384329583297500742918515827483896875618958121606201292619776],
)
def test_EXP_71(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(48)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(
new_vm.stack,
[17084401304090163016086072004374689170541683170424114643147834605304589320192],
)
def test_EXP_72(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(48)
new_vm._push(6089590155545428825848686802984512581899718912)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_73(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(6089590155545428825848686802984512581899718912)
new_vm._push(115792089237316195423570985008687907853269984665640564039457584007913129639935)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_74(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(6089590155545428825848686802984512581899718912)
new_vm._push(0)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_75(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(6089590155545428825848686802984512581899718912)
new_vm._push(1)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [1])
def test_EXP_76(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(6089590155545428825848686802984512581899718912)
new_vm._push(57896044618658097711785492504343953926634992332820282019728792003956564819952)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_77(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(6089590155545428825848686802984512581899718912)
new_vm._push(3618502788666131106986593281521497120414687020801267626233049500247285301263)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(
new_vm.stack,
[42192513242301740010671492996252704544191162524312342410321251717326910681089],
)
def test_EXP_78(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(6089590155545428825848686802984512581899718912)
new_vm._push(16)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_79(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(6089590155545428825848686802984512581899718912)
new_vm._push(32)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_80(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(6089590155545428825848686802984512581899718912)
new_vm._push(48)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
def test_EXP_81(self):
# Make the constraint store
constraints = ConstraintSet()
# make the ethereum world state
world = evm.EVMWorld(constraints)
address = 0x222222222222222222222222222222222222200
caller = origin = 0x111111111111111111111111111111111111100
price = 0
value = 10000
bytecode = b"\n"
data = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAA"
header = {"coinbase": 0, "timestamp": 0, "number": 0, "difficulty": 0, "gaslimit": 0}
gas = 1000000
new_vm = evm.EVM(constraints, address, data, caller, value, bytecode, gas=gas, world=world)
new_vm._push(6089590155545428825848686802984512581899718912)
new_vm._push(6089590155545428825848686802984512581899718912)
last_exception, last_returned = self._execute(new_vm)
self.assertEqual(last_exception, None)
self.assertEqual(new_vm.pc, 1)
self.assertEqual(new_vm.stack, [0])
if __name__ == "__main__":
unittest.main()
| apache-2.0 | -3,030,011,212,142,196,700 | 39.914609 | 100 | 0.646584 | false |
MicBrain/Tic_Tac_Toe | Tic_Tac_Toe.py | 1 | 8653 | ###################
### DESCRIPTION ###
###################
"""
Tic-tac-toe (or Noughts and crosses, Xs and Os) is a game for two players, X and O, who take
turns marking the spaces in a 3×3 grid. The player who succeeds in placing three respective marks
in a horizontal, vertical, or diagonal row wins the game.
The simplicity of Tic-tac-toe makes it ideal as a pedagogical tool for teaching the concepts
of good sportsmanship and the branch of artificial intelligence that deals with the searching of
game trees. It is straightforward to write a computer program to play Tic-tac-toe perfectly.
The game can be generalized to an m,n,k-game in which two players alternate placing stones of
their own color on an m×n board, with the goal of getting k of their own color in a row. Tic-tac-toe
is the (3,3,3)-game.
Despite its apparent simplicity, Tic-tac-toe requires detailed analysis to determine even some
elementary combinatory facts, the most interesting of which are the number of possible games and the
number of possible positions. A position is merely a state of the board, while a game usually refers
to the way a terminal position is obtained.
"""
from string import *
from random import *
import itertools
import math
####################
## MAIN VARIABLES ##
####################
Player_1 = 'x' # player 1's mark
Player_2 = 'o' # player 2's mark
A = 'A' # these just make it easier to keep referring to 'A', 'B' and 'C'
B = 'B'
C = 'C'
#####################
## State variables ##
#####################
EMPTY = ' '
Table = [[EMPTY, EMPTY, EMPTY],
[EMPTY, EMPTY, EMPTY],
[EMPTY, EMPTY, EMPTY]]
current = randint(1, 2)
#########################
### Coordinate system ###
#########################
def square(row, col): # squares are represented as tuples of (row, col).
return (row, col) # rows are numbered 1 thru 3, cols 'A' thru 'C'.
def square_row(square): # these two functions save us the hassle of using
return square[0] # index values in our code, e.g. square[0]...
def square_col(square): # from this point on, i should never directly use
return square[1] # tuples when working with squares.
def get_square(square):
row_i = square_row(square) - 1
col_i = ord(square_col(square)) - ord(A)
return Table[row_i][col_i] # note how this and set_square are the ONLY
# functions which directly use board!
def set_square(square, mark):
row_i = square_row(square) - 1
col_i = ord(square_col(square)) - ord(A)
Table[row_i][col_i] = mark # note how this and get_square are the ONLY
def get_row(row):
return [get_square((row, A)), get_square((row, B)), get_square((row, C))]
def get_column(col):
return [get_square((1, col)), get_square((2, col)), get_square((3, col))]
def get_diagonal(corner_square):
if corner_square == (1, A) or corner_square == (3, C):
return [get_square((1, A)), get_square((2, B)), get_square((3, C))]
else:
return [get_square((1, C)), get_square((2, B)), get_square((3, A))]
def get_mark(player):
if player == 1:
return Player_1
else:
return Player_2
def all_squares_filled():
for row in range(1, 4): # range(1, 4) returns the list [1, 2, 3]
if EMPTY in get_row(row):
return False # this row contains an empty square, we know enough
return True # no empty squares found, all squares are filled
def player_has_won(player):
MARK = get_mark(player)
win = [MARK, MARK, MARK]
if get_row(1) == win or get_row(2) == win or get_row(3) == win:
return True
if get_column(A) == win or get_column(B) == win or get_column(C) == win:
return True
if get_diagonal((1, A)) == win or get_diagonal((1, C)) == win:
return True
return False
def draw_board_straight():
A1, A2, A3 = get_square((1, A)), get_square((2, A)), get_square((3, A))
B1, B2, B3 = get_square((1, B)), get_square((2, B)), get_square((3, B))
C1, C2, C3 = get_square((1, C)), get_square((2, C)), get_square((3, C))
lines = []
lines.append("")
lines.append(" " + A + " " + B + " " + C + " ")
lines.append(" ")
lines.append("1 " + A1 + " | " + B1 + " | " + C1 + " ")
lines.append(" ---+---+---")
lines.append("2 " + A2 + " | " + B2 + " | " + C2 + " ")
lines.append(" ---+---+---")
lines.append("3 " + A3 + " | " + B3 + " | " + C3 + " ")
lines.append("")
return str.join(str(lines), '\n') # the '\n' represents a newline
def draw_board_slanted():
A1, A2, A3 = get_square((1, A)), get_square((2, A)), get_square((3, A))
B1, B2, B3 = get_square((1, B)), get_square((2, B)), get_square((3, B))
C1, C2, C3 = get_square((1, C)), get_square((2, C)), get_square((3, C))
lines = []
lines.append("")
lines.append(" " + A + " " + B + " " + C + " ")
lines.append(" ")
lines.append(" 1 " + A1 + " / " + B1 + " / " + C1 + " ")
lines.append(" ---/---/--- ")
lines.append(" 2 " + A2 + " / " + B2 + " / " + C2 + " ")
lines.append(" ---/---/--- ")
lines.append("3 " + A3 + " / " + B3 + " / " + C3 + " ")
lines.append("")
return str.join(str(lines), '\n')
def draw_board():
return draw_board_slanted()
def reset_main_board():
for row in (1, 2, 3):
for col in (A, B, C):
set_square(square(row, col), EMPTY)
def play():
global current
reset_main_board()
current = randint(1, 2)
print ("Tic-Tac-Toe!")
print
player1_name = input("Player 1, what is your name? ")
player2_name = input("Player 2, what is your name? ")
def get_name(player):
if player == 1:
return player1_name
else:
return player2_name
print
print ("Welcome,", player1_name, "and", player2_name + "!")
print (player1_name, "will be", Player_1 + ", and", player2_name, "will be", Player_2 + ".")
print ("By random decision,", get_name(current), "will go first.")
print
input("[Press enter when ready to play.] ") # just waiting for them to press enter
print (draw_board())
while not all_squares_filled():
choice = input(get_name(current) + ", which square? (e.g. 2B, 2b, B2 or b2) ")
if len(choice) != 2:
print ("That's not a square. You must enter a square like b2, or 3C.")
print
continue
if choice[0] not in ["1", "2", "3"] and str.upper(choice[0]) not in [A, B, C]:
print ("The first character must be a row (1, 2 or 3) or column (A, B or C).")
print
continue
if choice[1] not in ["1", "2", "3"] and str.upper(choice[1]) not in [A, B, C]:
print ("The second character must be a row (1, 2 or 3) or column (A, B or C).")
print
continue
if choice[0] in ["1", "2", "3"] and choice[1] in ["1", "2", "3"]:
print ("You entered two rows! You must enter one row and one column (A, B or C).")
print
continue
if str.upper(choice[0]) in [A, B, C] and str.upper(choice[1]) in [A, B, C]:
print ("You entered two columns! You must enter one row (1, 2 or 3) and one column.")
print
continue
if choice[0] in ["1", "2", "3"]:
row = int(choice[0])
col = str.upper(choice[1])
else:
row = int(choice[1])
col = str.upper(choice[0])
choice = square(row, col) # make this into a (row, col) tuple
if get_square(choice) != EMPTY:
print ("Sorry, that square is already marked.")
print
continue
set_square(choice, get_mark(current))
print (draw_board())
if player_has_won(current):
print ("Congratulations", get_name(current), "-- you win!")
print
break
if all_squares_filled():
print ("Cats game!", player1_name, "and", player2_name, "draw.")
print
break
current = 3 - current # sets 1 to 2 and 2 to 1
print ("GAME IS OVER")
print
if __name__ == "__main__":
continue_playing = True
while continue_playing:
play()
again = str.lower(input("Play again? (y/n) "))
print
print
print
if again != "y":
continue_playing = False
print ("Thanks for playing!")
print
| gpl-3.0 | -7,830,777,343,375,921,000 | 37.620536 | 101 | 0.539475 | false |
jpetto/bedrock | bedrock/firefox/helpers.py | 1 | 8778 | from collections import OrderedDict
from django.core.cache import cache
from django.conf import settings
import jingo
import jinja2
from bedrock.firefox.models import FirefoxOSFeedLink
from bedrock.firefox.firefox_details import firefox_desktop, firefox_android, firefox_ios
from bedrock.base.urlresolvers import reverse
from lib.l10n_utils import get_locale
def android_builds(channel, builds=None):
builds = builds or []
variations = OrderedDict([
('api-9', 'Gingerbread'),
('api-15', 'Ice Cream Sandwich+'),
('x86', 'x86'),
])
if channel == 'alpha':
for type, arch_pretty in variations.iteritems():
link = firefox_android.get_download_url('alpha', type)
builds.append({'os': 'android',
'os_pretty': 'Android',
'os_arch_pretty': 'Android %s' % arch_pretty,
'arch': 'x86' if type == 'x86' else 'armv7up %s' % type,
'arch_pretty': arch_pretty,
'download_link': link})
else:
link = firefox_android.get_download_url(channel)
builds.append({'os': 'android',
'os_pretty': 'Android',
'download_link': link})
return builds
def ios_builds(channel, builds=None):
builds = builds or []
link = firefox_ios.get_download_url(channel)
builds.append({'os': 'ios',
'os_pretty': 'iOS',
'download_link': link})
return builds
@jingo.register.function
@jinja2.contextfunction
def download_firefox(ctx, channel='release', small=False, icon=True,
platform='all', dom_id=None, locale=None, simple=False,
force_direct=False, force_full_installer=False,
force_funnelcake=False, check_old_fx=False):
""" Output a "download firefox" button.
:param ctx: context from calling template.
:param channel: name of channel: 'release', 'beta' or 'alpha'.
:param small: Display the small button if True.
:param icon: Display the Fx icon on the button if True.
:param platform: Target platform: 'desktop', 'android', 'ios', or 'all'.
:param dom_id: Use this string as the id attr on the element.
:param locale: The locale of the download. Default to locale of request.
:param simple: Display button with text only if True. Will not display
icon or privacy/what's new/systems & languages links. Can be used
in conjunction with 'small'.
:param force_direct: Force the download URL to be direct.
:param force_full_installer: Force the installer download to not be
the stub installer (for aurora).
:param force_funnelcake: Force the download version for en-US Windows to be
'latest', which bouncer will translate to the funnelcake build.
:param check_old_fx: Checks to see if the user is on an old version of
Firefox and, if true, changes the button text from 'Free Download'
to 'Update your Firefox'. Must be used in conjunction with
'simple' param being true.
:return: The button html.
"""
show_desktop = platform in ['all', 'desktop']
show_android = platform in ['all', 'android']
show_ios = platform in ['all', 'ios']
alt_channel = '' if channel == 'release' else channel
locale = locale or get_locale(ctx['request'])
funnelcake_id = ctx.get('funnelcake_id', False)
dom_id = dom_id or 'download-button-%s-%s' % (
'desktop' if platform == 'all' else platform, channel)
l_version = firefox_desktop.latest_builds(locale, channel)
if l_version:
version, platforms = l_version
else:
locale = 'en-US'
version, platforms = firefox_desktop.latest_builds('en-US', channel)
# Gather data about the build for each platform
builds = []
if show_desktop:
for plat_os, plat_os_pretty in firefox_desktop.platform_labels.iteritems():
# Windows 64-bit builds are not available on the ESR channel yet
if plat_os == 'win64' and channel in ['esr', 'esr_next']:
continue
# Fallback to en-US if this plat_os/version isn't available
# for the current locale
_locale = locale if plat_os_pretty in platforms else 'en-US'
# And generate all the info
download_link = firefox_desktop.get_download_url(
channel, version, plat_os, _locale,
force_direct=force_direct,
force_full_installer=force_full_installer,
force_funnelcake=force_funnelcake,
funnelcake_id=funnelcake_id,
)
# If download_link_direct is False the data-direct-link attr
# will not be output, and the JS won't attempt the IE popup.
if force_direct:
# no need to run get_download_url again with the same args
download_link_direct = False
else:
download_link_direct = firefox_desktop.get_download_url(
channel, version, plat_os, _locale,
force_direct=True,
force_full_installer=force_full_installer,
force_funnelcake=force_funnelcake,
funnelcake_id=funnelcake_id,
)
if download_link_direct == download_link:
download_link_direct = False
builds.append({'os': plat_os,
'os_pretty': plat_os_pretty,
'download_link': download_link,
'download_link_direct': download_link_direct})
if show_android:
builds = android_builds(channel, builds)
if show_ios:
builds.append({'os': 'ios',
'os_pretty': 'iOS',
'download_link': firefox_ios.get_download_url()})
# Get the native name for current locale
langs = firefox_desktop.languages
locale_name = langs[locale]['native'] if locale in langs else locale
data = {
'locale_name': locale_name,
'version': version,
'product': 'firefox-%s' % platform,
'builds': builds,
'id': dom_id,
'small': small,
'simple': simple,
'channel': alt_channel,
'show_desktop': show_desktop,
'show_android': show_android,
'show_ios': show_ios,
'icon': icon,
'check_old_fx': check_old_fx and simple,
}
html = jingo.render_to_string(ctx['request'],
'firefox/includes/download-button.html',
data)
return jinja2.Markup(html)
@jingo.register.function
def firefox_url(platform, page, channel=None):
"""
Return a product-related URL like /firefox/all/ or /mobile/beta/notes/.
Examples
========
In Template
-----------
{{ firefox_url('desktop', 'all', 'organizations') }}
{{ firefox_url('desktop', 'sysreq', channel) }}
{{ firefox_url('android', 'notes') }}
"""
kwargs = {}
# Tweak the channel name for the naming URL pattern in urls.py
if channel == 'release':
channel = None
if channel == 'alpha':
if platform == 'desktop':
channel = 'developer'
if platform == 'android':
channel = 'aurora'
if channel == 'esr':
channel = 'organizations'
if channel:
kwargs['channel'] = channel
if platform != 'desktop':
kwargs['platform'] = platform
# Firefox for Android and iOS have the system requirements page on SUMO
if platform in ['android', 'ios'] and page == 'sysreq':
return settings.FIREFOX_MOBILE_SYSREQ_URL
return reverse('firefox.%s' % page, kwargs=kwargs)
@jingo.register.function
def firefox_os_feed_links(locale, force_cache_refresh=False):
if locale in settings.FIREFOX_OS_FEED_LOCALES:
cache_key = 'firefox-os-feed-links-' + locale
if not force_cache_refresh:
links = cache.get(cache_key)
if links:
return links
links = list(
FirefoxOSFeedLink.objects.filter(locale=locale).order_by(
'-id').values_list('link', 'title')[:10])
cache.set(cache_key, links)
return links
elif '-' in locale:
return firefox_os_feed_links(locale.split('-')[0])
@jingo.register.function
def firefox_os_blog_link(locale):
try:
return settings.FXOS_PRESS_BLOG_LINKS[locale]
except KeyError:
if '-' in locale:
return firefox_os_blog_link(locale.split('-')[0])
else:
return None
| mpl-2.0 | 5,004,060,017,684,913,000 | 35.728033 | 89 | 0.583276 | false |
satish-avninetworks/murano | murano/dsl/murano_package.py | 1 | 7758 | # Copyright (c) 2014 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import weakref
import semantic_version
import six
from yaql.language import specs
from yaql.language import utils
from murano.dsl import constants
from murano.dsl import dsl_types
from murano.dsl import exceptions
from murano.dsl import helpers
from murano.dsl import meta as dslmeta
from murano.dsl import murano_object
from murano.dsl import murano_type
from murano.dsl import namespace_resolver
from murano.dsl import principal_objects
from murano.dsl import yaql_integration
class MuranoPackage(dsl_types.MuranoPackage, dslmeta.MetaProvider):
def __init__(self, package_loader, name, version=None,
runtime_version=None, requirements=None, meta=None):
super(MuranoPackage, self).__init__()
self._package_loader = weakref.proxy(package_loader)
self._name = name
self._meta = None
self._version = helpers.parse_version(version)
self._runtime_version = helpers.parse_version(runtime_version)
self._requirements = {
name: semantic_version.Spec('==' + str(self._version.major))
}
if name != constants.CORE_LIBRARY:
self._requirements[constants.CORE_LIBRARY] = \
semantic_version.Spec('==0')
self._classes = {}
self._imported_types = {object, murano_object.MuranoObject}
for key, value in six.iteritems(requirements or {}):
self._requirements[key] = helpers.parse_version_spec(value)
self._load_queue = {}
self._native_load_queue = {}
if self.name == constants.CORE_LIBRARY:
principal_objects.register(self)
self._package_class = self._create_package_class()
self._meta = dslmeta.MetaData(
meta, dsl_types.MetaTargets.Package, self._package_class)
@property
def package_loader(self):
return self._package_loader
@property
def name(self):
return self._name
@property
def version(self):
return self._version
@property
def runtime_version(self):
return self._runtime_version
@property
def requirements(self):
return self._requirements
@property
def classes(self):
return set(self._classes.keys()).union(
self._load_queue.keys()).union(self._native_load_queue.keys())
def get_resource(self, name):
raise NotImplementedError('resource API is not implemented')
# noinspection PyMethodMayBeStatic
def get_class_config(self, name):
return {}
def _register_mpl_classes(self, data, name=None):
type_obj = self._classes.get(name)
if type_obj is not None:
return type_obj
if callable(data):
data = data()
data = helpers.list_value(data)
unnamed_class = None
last_ns = {}
for cls_data in data:
last_ns = cls_data.setdefault('Namespaces', last_ns.copy())
if len(cls_data) == 1:
continue
cls_name = cls_data.get('Name')
if not cls_name:
if unnamed_class:
raise exceptions.AmbiguousClassName(name)
unnamed_class = cls_data
else:
ns_resolver = namespace_resolver.NamespaceResolver(last_ns)
cls_name = ns_resolver.resolve_name(cls_name)
if cls_name == name:
type_obj = murano_type.create(
cls_data, self, cls_name, ns_resolver)
self._classes[name] = type_obj
else:
self._load_queue.setdefault(cls_name, cls_data)
if type_obj is None and unnamed_class:
unnamed_class['Name'] = name
return self._register_mpl_classes(unnamed_class, name)
return type_obj
def _register_native_class(self, cls, name):
if cls in self._imported_types:
return self._classes[name]
try:
m_class = self.find_class(name, False)
except exceptions.NoClassFound:
m_class = self._register_mpl_classes({'Name': name}, name)
m_class.extension_class = cls
for method_name in dir(cls):
if method_name.startswith('_'):
continue
method = getattr(cls, method_name)
if not any((
helpers.inspect_is_method(cls, method_name),
helpers.inspect_is_static(cls, method_name),
helpers.inspect_is_classmethod(cls, method_name))):
continue
method_name_alias = (getattr(
method, '__murano_name', None) or
specs.convert_function_name(
method_name, yaql_integration.CONVENTION))
m_class.add_method(method_name_alias, method, method_name)
self._imported_types.add(cls)
return m_class
def register_class(self, cls, name=None):
if inspect.isclass(cls):
name = name or getattr(cls, '__murano_name', None) or cls.__name__
if name in self._classes:
self._register_native_class(cls, name)
else:
self._native_load_queue.setdefault(name, cls)
elif isinstance(cls, dsl_types.MuranoType):
self._classes[cls.name] = cls
elif name not in self._classes:
self._load_queue[name] = cls
def find_class(self, name, search_requirements=True):
payload = self._native_load_queue.pop(name, None)
if payload is not None:
return self._register_native_class(payload, name)
payload = self._load_queue.pop(name, None)
if payload is not None:
result = self._register_mpl_classes(payload, name)
if result:
return result
result = self._classes.get(name)
if result:
return result
if search_requirements:
pkgs_for_search = []
for package_name, version_spec in six.iteritems(
self._requirements):
if package_name == self.name:
continue
referenced_package = self._package_loader.load_package(
package_name, version_spec)
try:
return referenced_package.find_class(name, False)
except exceptions.NoClassFound:
pkgs_for_search.append(referenced_package)
continue
raise exceptions.NoClassFound(
name, packages=pkgs_for_search + [self])
raise exceptions.NoClassFound(name, packages=[self])
@property
def context(self):
return None
def _create_package_class(self):
ns_resolver = namespace_resolver.NamespaceResolver(None)
return murano_type.MuranoClass(
ns_resolver, self.name, self, utils.NO_VALUE)
def get_meta(self, context):
if not self._meta:
return []
return self._meta.get_meta(context)
def __repr__(self):
return 'MuranoPackage({name})'.format(name=self.name)
| apache-2.0 | 3,568,733,459,473,349,000 | 35.252336 | 78 | 0.59603 | false |
DevHugo/zds-site | zds/utils/tutorials.py | 1 | 2669 | # coding: utf-8
import os
# Used for indexing tutorials, we need to parse each manifest to know which content have been published
class GetPublished:
published_part = []
published_chapter = []
published_extract = []
def __init__(self):
pass
@classmethod
def get_published_content(cls):
# If all array are empty load_it
if not len(GetPublished.published_part) and \
not len(GetPublished.published_chapter) and \
not len(GetPublished.published_extract):
# Get all published tutorials
from zds.tutorial.models import Tutorial
tutorials_database = Tutorial.objects.filter(sha_public__isnull=False).all()
for tutorial in tutorials_database:
# Load Manifest
json = tutorial.load_json_for_public()
# Parse it
GetPublished.load_tutorial(json)
return {"parts": GetPublished.published_part,
"chapters": GetPublished.published_chapter,
"extracts": GetPublished.published_extract}
@classmethod
def load_tutorial(cls, json):
# Load parts, chapter and extract
if 'parts' in json:
for part_json in json['parts']:
# If inside of parts we have chapters, load it
GetPublished.load_chapters(part_json)
GetPublished.load_extracts(part_json)
GetPublished.published_part.append(part_json['pk'])
GetPublished.load_chapters(json)
GetPublished.load_extracts(json)
@classmethod
def load_chapters(cls, json):
if 'chapters' in json:
for chapters_json in json['chapters']:
GetPublished.published_chapter.append(chapters_json['pk'])
GetPublished.load_extracts(chapters_json)
return GetPublished.published_chapter
@classmethod
def load_extracts(cls, json):
if 'extracts' in json:
for extract_json in json['extracts']:
GetPublished.published_extract.append(extract_json['pk'])
return GetPublished.published_extract
def get_blob(tree, chemin):
for blob in tree.blobs:
try:
if os.path.abspath(blob.path) == os.path.abspath(chemin):
data = blob.data_stream.read()
return data.decode('utf-8')
except (OSError, IOError):
return ""
if len(tree.trees) > 0:
for atree in tree.trees:
result = get_blob(atree, chemin)
if result is not None:
return result
return None
else:
return None
| gpl-3.0 | -8,591,455,257,756,504,000 | 30.034884 | 103 | 0.59423 | false |
HPPTECH/hpp_IOSTressTest | Refer/IOST_OLD_SRC/IOST_0.18/IOST.py | 1 | 8248 | #!/usr/bin/env python
#======================================================================
#
# Project : hpp_IOStressTest
# File : IOST.py
# Date : Sep 21, 2016
# Author : HuuHoang Nguyen
# Contact : [email protected]
# : [email protected]
# License : MIT License
# Copyright : 2016
# Description: The hpp_IOStressTest is under the MIT License, a copy of license which may be found in LICENSE
#
#======================================================================
import io
import os
import operator
import sys
import base64
import time
# from Libs.IOST_Prepare import *
# from Libs.IOST_Config import *
# from Libs.IOST_WMain import *
# from Libs.IOST_Basic import *
sys.path.append("Libs")
sys.path.append("../Libs")
from Libs import IOST_Basic
from Libs import IOST_Config
from Libs import IOST_WMain
from Libs.IOST_WMain import *
from Libs import IOST_Prepare
import gtk
import gtk.glade
import gobject
# from Libs import *
# from Libs import *
# from Libs import *
#======================================================================
IOST_Debug_Enable = 0
#======================================================================
# argv_number = len(sys.argv)
# for i in range(0, argv_number):
# print sys.argv[i]
#======================================================================
IOST_WMAIN = "IOST_WMain"
IOST_CHIP = "Skylark"
IOST_OBJECT = "_Skylark"
IOST_CONFIG_DATA_DEFAULTE_FILE = "IOST_DataDefault.json"
IOST_CONFIG_OBJS_DEFAULTE_FILE = "IOST_ObjsDefault.json"
IOST_PROGRAM_PATH=os.path.dirname(os.path.abspath(sys.argv[0]))
if IOST_Debug_Enable:
print IOST_PROGRAM_PATH
IOST_SSH_BIN = "ssh"
IOST_TELNET_BIN = "telnet"
IOST_SHELL_BIN = os.environ["SHELL"]
IOST_CONGIG_DATA_PATH = IOST_PROGRAM_PATH + "/" + IOST_CHIP + "/" + IOST_CONFIG_DATA_DEFAULTE_FILE
if IOST_Debug_Enable:
print IOST_CONGIG_DATA_PATH
IOST_CONGIG_OBJS_PATH = IOST_PROGRAM_PATH + "/" + IOST_CHIP + "/" + IOST_CONFIG_OBJS_DEFAULTE_FILE
if IOST_Debug_Enable:
print IOST_CONGIG_OBJS_PATH
#======================================================================
class IOST(IOST_WMain):
"""
This is a main class of the program.
"""
#----------------------------------------------------------------------
def __init__(self, glade_filename = "",
window_name = "",
object_name = "",
iost_data = None,
iost_objs = None):
"The function is main function to start IOST program"
IOST_WMain.__init__(self, glade_filename, window_name, object_name, iost_data, iost_objs)
#----------------------------------------------------------------------
def IOST_Main(self):
gtk.main()
#======================================================================
# MAIN FUNCTION
#======================================================================
if __name__ == "__main__":
"The main function of IOST"
IOST_Config=IOST_Config()
#-------------------------------------------------------------------------
IOST_Config.IOST_Data = IOST_Config.ReadFile(file_name=IOST_CONGIG_DATA_PATH)
#-------------------------------------------------------------------------
IOST_Config.IOST_Objs = IOST_Config.ReadFile(file_name=IOST_CONGIG_OBJS_PATH)
IOST_Config.IOST_Data["GladeFileName"] = IOST_PROGRAM_PATH + "/" + IOST_CHIP+ '/'+ IOST_Config.IOST_Data["GladeFileName"] + '_'+ IOST_Config.IOST_Data["ProjectVersion"] + '.glade'
# print IOST_Config.IOST_Data["GladeFileName"]
# print "=================================================================="
# pprint (IOST_Config.IOST_Data.keys())
# print "=================================================================="
# pprint (IOST_Config.IOST_Objs["IOST_WMain"].keys())
# print "=================================================================="
argv_number = len(sys.argv)
if IOST_Debug_Enable:
print "=================================================================="
print "Number of arg have entered is : ", argv_number
for i in range(0, argv_number):
print "========== argv[%s] = : " %(i, sys.argv[i])
#Add config file to a Files list
for i in range(1, argv_number):
# print i
# IOST_Config.IOST_Files.append(sys.argv[1]+'/'+sys.argv[i])
if os.path.isfile(sys.argv[i]):
IOST_Config.AddFileConfig2List(IOST_Config.IOST_Files, sys.argv[i])
else:
IOST_Config.AddFileConfig2List(IOST_Config.IOST_Files, IOST_PROGRAM_PATH +'/'+sys.argv[i])
# Print to debug name of all file config have inputed
if IOST_Debug_Enable:
print "=========================The list config files have entered==========================="
print "Number of config Files is %s" % (len (IOST_Config.IOST_Files))
print "Number of config files is: "
for i in range(0, len (IOST_Config.IOST_Files)):
pprint (IOST_Config.IOST_Files[i])
#Read file and store in Files Dist type at location (2n+1)
if argv_number > 1:
IOST_Config.AddObjConfig2List(IOST_Config.IOST_Files)
if IOST_Debug_Enable:
for i in range(0, len (IOST_Config.IOST_Files)):
print "================================= %s =================================" %i
pprint (IOST_Config.IOST_Files[i])
for i in range(0, len (IOST_Config.IOST_Files), 2):
IOST_Config.ModifyIOST_Objs(IOST_Config.IOST_Data, IOST_Config.IOST_Files[i+1] )
if IOST_Debug_Enable:
print "IOST_Config.IOST_Data is : "
pprint (IOST_Config.IOST_Data)
print "IOST_Config.IOST_Data['I2C0'] is : "
pprint (IOST_Config.IOST_Data["I2C0"])
IOST_Config.IOST_Data["IOST_Path"] = IOST_PROGRAM_PATH
IOST_Config.IOST_Data["IOST_RunPath"] = os.getcwd()
IOST_Config.IOST_Data["ConfigFile"]["CfgDataPath"] = IOST_CONGIG_DATA_PATH
IOST_Config.IOST_Data["ConfigFile"]["CfgObjsPath"] = IOST_CONGIG_OBJS_PATH
#-------------------------------------------------------------------------
IOST_Config.WriteFile(IOST_PROGRAM_PATH+"/Temp_Configs/Config_Data.json", IOST_Config.IOST_Data)
IOST_Config.WriteFile(IOST_PROGRAM_PATH+"/Temp_Configs/Config_Objects.json", IOST_Config.IOST_Objs)
# Some debug code here
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
if False:
for key, value in IOST_Config.IOST_Objs["IOST_WSetupTestcase"].iteritems():
print key, value
if False:
len__temp = len(IOST_Config.IOST_Objs["IOST_WSetupTestcase"])
print "=============================================="
print "Len of IOST_WSetupTestcase object is", len__temp
print "=============================================="
print IOST_Config.IOST_Objs["IOST_WSetupTestcase"].keys()
for i in range(0, len__temp, 2 ):
print "=============================================="
print i
print "----------------------------------------------"
print IOST_Config.IOST_Objs["IOST_WSetupTestcase"].keys()[i]
print "----------------------------------------------"
print IOST_Config.IOST_Objs["IOST_WSetupTestcase"].keys()[i+1]
print "----------------------------------------------"
print IOST_Config.IOST_Objs["IOST_WSetupTestcase"][IOST_Config.IOST_Objs["IOST_WSetupTestcase"].keys()[(i+1)]]
if False:
exit(1)
#@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@
#-------------------------------------------------------------------------
main = IOST(glade_filename=IOST_Config.IOST_Data["GladeFileName"],
window_name=IOST_WMAIN,
object_name=IOST_OBJECT,
iost_data=IOST_Config.IOST_Data, iost_objs=IOST_Config.IOST_Objs)
main.IOST_Main()
| mit | 3,273,504,206,182,492,700 | 37.90566 | 183 | 0.466537 | false |
Hubert51/AutoGrading | learning/number_recognization/test.py | 1 | 1250 | from pytesseract import image_to_string
from PIL import Image
import cv2
import numpy
import sys
if __name__ == '__main__':
f = open("test1.txt")
f = f.read()
for element in f:
str1 = element
position = ((712, 571), (725, 587))
dh = position[1][1] - position[0][1]
upper = position[0][1] - 2 * dh
lower = position[1][1] + int(3.5 * dh)
left = position[1][0]
print(upper,lower, left)
img = cv2.imread('answerSheet_with_name.png')
#image = Image.open('answerSheet_with_name.png')
img = img[upper:lower, left:img[1].size]
gray = cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(gray,(5,5),0)
thresh = cv2.adaptiveThreshold(blur,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C,cv2.THRESH_BINARY,11,6)
cv2.imshow("hello", img)
################# Now finding Contours ###################
img,contours,hierarchy = cv2.findContours(thresh,cv2.RETR_LIST,cv2.CHAIN_APPROX_SIMPLE)
cv2.drawContours(img, contours, -1, (0, 0, 255),1)
im = Image.fromarray(img, 'RGB')
file = open("image_to_string.txt", "w")
# box = image_to_string(image).split('\n')
file.write(image_to_string(im))
#file.write(image_to_string(image))
file.close()
| mit | -715,447,482,893,040,000 | 26.777778 | 98 | 0.6064 | false |
bblais/Tech-SIE | Estimating_Proportion/Estimating_Proportion.py | 1 | 4755 |
# coding: utf-8
# #Statistical Inference for Everyone: Technical Supplement
#
#
#
# This document is the technical supplement, for instructors, for [Statistical Inference for Everyone], the introductory statistical inference textbook from the perspective of "probability theory as logic".
#
# <img src="http://web.bryant.edu/~bblais/images/Saturn_with_Dice.png" align=center width = 250px />
#
# [Statistical Inference for Everyone]: http://web.bryant.edu/~bblais/statistical-inference-for-everyone-sie.html
#
# ## Estimating a Proportion
#
# $$\newcommand{\twocvec}[2]{\left(\begin{array}{c}
# #1 \\\\ #2
# \end{array}\right)}
# \newcommand{\nchoosek}[2]{\twocvec{#1}{#2}}
# $$
#
# If $\theta$ is the model representing the probability, $\theta$, of the coin
# landing on heads (and $1-\theta$ is the probability of landing on tails), we
# need to make an estimate of probability of model $\theta$ being true given the
# data, which will consist of $N$ flips of which $h$ are heads.
#
# Bayes rule is:
# \begin{eqnarray}
# p(\theta|D,I) &=& \frac{p(D|\theta,I)p(\theta|I)}{p(D|I)} =
# \frac{p(D|\theta,I)p(\theta,I)}{\sum_\theta p(D|\theta,I)p(\theta|I)}
# \end{eqnarray}
#
# Thus, the probability of a particular model $\theta$ being true is the product
# of the probability of the observed data ($h$ heads in $N$ flips) given the
# model $\theta$ and the prior probability of the model $\theta$ being true
# before we even look at the data, divided by the probability of the data itself
# over all models.
#
# The prior probability of model $\theta$ will be assumed to be uniform (from
# maximum entropy considerations). The probability, $\theta$, ranges from 0 to
# 1, to the prior is
# \begin{eqnarray}
# p(\theta|I) = 1
# \end{eqnarray}
#
# The probability of the data given the random model, is just the binomial
# distribution:
#
# \begin{eqnarray}
# p(D|\theta)=\nchoosek{N}{h} \theta^h (1-\theta)^{N-h}
# \end{eqnarray}
#
# The probability of the data, $p(D|I)$, is found by summing (or in this case
# integrating) $p(D|\theta,I)p(\theta|I)$ for all $\theta$:
#
# \begin{eqnarray}
# p(D|I) &=& \int_0^1 \nchoosek{N}{h} \theta^h (1-\theta)^{N-h} \cdot 1 d\theta
# \\\\
# &=&\frac{N!}{h!(N-h)!} \frac{h!(N-h)!}{(N+1)!} = \frac{1}{N+1}
# \end{eqnarray}
#
# Now the probability of model $\theta$ being true, given the data, is just
#
# \begin{eqnarray}
# p(\theta|D,I)&=& (N+1) \cdot \nchoosek{N}{h} \theta^h (1-\theta)^{N-h} \\
# &=& \frac{(N+1)!}{h!(N-h)!} \theta^h (1-\theta)^{N-h}
# \end{eqnarray}
#
#
# ### Max, Mean, Variance
#
# The model with the maximum probability is found by maximizing $p(\theta|D,I)$
# w.r.t. $\theta$:
#
# \begin{eqnarray}
# \frac{dP(\theta|D,I)}{d\theta} &=& 0 = \frac{(N+1)!}{h!(N-h)!} \left(
# -(N-h) \theta^h (1-\theta)^{N-h-1} + h \theta^{h-1} (1-\theta)^{N-h} \right) \\\\
# (N-h) \theta^h (1-\theta)^{N-h-1} &=& h \theta^{h-1} (1-\theta)^{N-h} \\\\
# \theta(N-h) &=& (1-\theta) h = h-\theta h = N\theta-\theta h \\\\
# \theta&=&\frac{h}{N} \;\;\;\;\;\surd
# \end{eqnarray}
#
# The average and the standard deviation is also straightforward.
#
#
# \begin{eqnarray}
# \bar{\theta} &=& \int_0^1 \theta \cdot \frac{(N+1)!}{h!(N-h)!} \theta^h (1-\theta)^{N-h} \\\\
# &=& \frac{(N+1)!}{h!(N-h)!} \int_0^1 \theta^{h+1} (1-\theta)^{N-h} \\\\
# &=&\frac{(N+1)!}{h!(N-h)!} \frac{(h+1)!(N-h)!}{(N+2)!} \\\\
# &=&\frac{h+1}{N+2} \\\\
# \bar{\theta^2} &=& \int_0^1 \theta^2 \cdot \frac{(N+1)!}{h!(N-h)!} \theta^h (1-\theta)^{N-h} \\\\
# &=&\frac{(N+1)!}{h!(N-h)!} \frac{(h+2)!(N-h)!}{(N+3)!} \\\\
# &=&\frac{(h+1)(h+2)}{(N+2)(N+3)} \\\\
# \sigma^2 &=& \bar{\theta^2} - \bar{\theta}^2 = \frac{(h+1)(h+2)}{(N+2)(N+3)} -
# \frac{(h+1)(h+1)}{(N+2)(N+2)} \\\\
# &=&\frac{(h+1)(N-h+1)}{(N+2)^2(N+3)} \\\\
# &=& \frac{(h+1)}{(N+2)}\left( \frac{n+2}{n+2} - \frac{h+1}{N+2}\right)
# \frac{1}{N+3} \\\\
# &=& \bar{\theta}(1-\bar{\theta})\frac{1}{N+3}
# \end{eqnarray}
#
# ### An Approximation for the Variance
#
# If $f=h/N$ is the actual fraction of heads observed, then the variance above
# can be written as
# \begin{eqnarray}
# \sigma^2 &=&\frac{(fN+1)(N-fN+1)}{(N+2)^2(N+3)} \\\\
# \mbox{(for large $N$)}&\approx& \frac{(fN+1)(N-fN)}{N^3}
# =\frac{(fN+1)(1-f)}{N^2} \\\\
# \mbox{(for large $fN$)}&\approx& \frac{(fN)(N-fN)}{N^2} = \frac{f(1-f)}{N} \\\\
# \sigma^2&\approx& \frac{f(1-f)}{N}
# \end{eqnarray}
#
# In this limit, the distribution (beta distribution) can be approximated with a
# Gaussian.
#
# In[11]:
# ---------------------
# In[8]:
from IPython.core.display import HTML
def css_styling():
styles = open("../styles/custom.css", "r").read()
return HTML(styles)
css_styling()
| mit | 8,721,158,606,299,497,000 | 33.708029 | 206 | 0.578549 | false |
wylee/django-local-settings | src/local_settings/util.py | 1 | 5070 | import importlib
import io
import os
import dotenv
NO_DEFAULT = type(
"NO_DEFAULT",
(),
{
"__nonzero__": (lambda self: False), # Python 2
"__bool__": (lambda self: False), # Python 3
"__str__": (lambda self: self.__class__.__name__),
"__repr__": (lambda self: str(self)),
"__copy__": (lambda self: self),
},
)()
def get_file_name():
"""Get local settings file from environ or discover it.
If the ``LOCAL_SETTINGS_FILE`` environment variable is set, its
value is returned directly.
Otherwise, the current working directory is searched for
`local.{ext}` for each file extension handled by each loading
:mod:`strategy`. Note that the search is done in alphabetical order
so that if ``local.cfg`` and ``local.yaml`` both exist, the former
will be returned.
Returns:
str: File name if set via environ or discovered
None: File name isn't set and wasn't discovered
"""
file_name = os.environ.get("LOCAL_SETTINGS_FILE")
if file_name:
return file_name
cwd = os.getcwd()
default_file_names = get_default_file_names()
for file_name in default_file_names:
file_name = os.path.join(cwd, file_name)
if os.path.exists(file_name):
return file_name
def get_default_file_names():
"""Get default file names for all loading strategies, sorted."""
from .strategy import get_file_type_map # noqa: Avoid circular import
return sorted(f"local.{ext}" for ext in get_file_type_map())
def parse_file_name_and_section(
file_name, section=None, extender=None, extender_section=None
):
"""Parse file name and (maybe) section.
File names can be absolute paths, relative paths, or asset
specs::
/home/user/project/local.cfg
local.cfg
some.package:local.cfg
File names can also include a section::
some.package:local.cfg#dev
If a ``section`` is passed, it will take precedence over a
section parsed out of the file name.
"""
if "#" in file_name:
file_name, parsed_section = file_name.rsplit("#", 1)
else:
parsed_section = None
if ":" in file_name:
file_name = asset_path(file_name)
if extender:
if not file_name:
# Extended another section in the same file
file_name = extender
elif not os.path.isabs(file_name):
# Extended by another file in the same directory
file_name = abs_path(file_name, relative_to=os.path.dirname(extender))
if section:
pass
elif parsed_section:
section = parsed_section
elif extender_section:
section = extender_section
else:
section = None
return file_name, section
# Path utilities
def abs_path(path, relative_to=None):
"""Make path absolute and normalize it."""
if os.path.isabs(path):
path = os.path.normpath(path)
elif ":" in path:
path = asset_path(path)
else:
path = os.path.expanduser(path)
if relative_to:
path = os.path.join(relative_to, path)
path = os.path.abspath(path)
path = os.path.normpath(path)
return path
def asset_path(path):
"""Get absolute path from asset spec and normalize it."""
if ":" in path:
package_name, rel_path = path.split(":", 1)
else:
package_name, rel_path = path, ""
try:
package = importlib.import_module(package_name)
except ImportError:
raise ValueError(
f"Could not get asset path for {path}; could not import "
f"package: {package_name}"
)
if not hasattr(package, "__file__"):
raise ValueError("Can't compute path relative to namespace package")
package_path = os.path.dirname(package.__file__)
if rel_path:
path = os.path.join(package_path, rel_path)
path = os.path.normpath(path)
return path
def dotenv_path(path=None, relative_to=None, file_name=".env"):
"""Get .env path.
If a path is specified, convert it to an absolute path. Otherwise,
use the default, "./.env".
.. note:: By default, the dotenv package discovers the default .env
file relative to the call site, so we have to tell it use CWD.
"""
if path:
path = abs_path(path, relative_to)
else:
path = dotenv.find_dotenv(filename=file_name, usecwd=True)
return path
def load_dotenv(path=None, relative_to=None, file_name=".env"):
"""Load vars from dotenv file into environ."""
path = dotenv_path(path, relative_to, file_name)
dotenv.load_dotenv(path)
# These TTY functions were copied from Invoke
def is_a_tty(stream):
if hasattr(stream, "isatty") and callable(stream.isatty):
return stream.isatty()
elif has_fileno(stream):
return os.isatty(stream.fileno())
return False
def has_fileno(stream):
try:
return isinstance(stream.fileno(), int)
except (AttributeError, io.UnsupportedOperation):
return False
| mit | -6,834,773,356,538,003,000 | 26.258065 | 82 | 0.622091 | false |
ngageoint/scale | scale/data/models.py | 1 | 24039 | """Defines the database models for datasets"""
from __future__ import absolute_import, unicode_literals
import copy
import logging
from collections import namedtuple
import django.contrib.postgres.fields
from django.db import models, transaction
from django.db.models import Q, Count
from data.data import data_util
from data.data.json.data_v6 import convert_data_to_v6_json, DataV6
from data.data.exceptions import InvalidData
from data.data.value import FileValue
from data.dataset.dataset import DataSetDefinition
from data.dataset.json.dataset_v6 import convert_definition_to_v6_json, DataSetDefinitionV6
from data.exceptions import InvalidDataSetDefinition, InvalidDataSetMember
from data.serializers import DataSetFileSerializerV6, DataSetMemberSerializerV6
from storage.models import ScaleFile
from util import rest as rest_utils
from util.database import alphabetize
logger = logging.getLogger(__name__)
DataSetValidation = namedtuple('DataSetValidation', ['is_valid', 'errors', 'warnings'])
# DataSetKey = namedtuple('DataSetKey', ['name', 'version'])
class DataSetManager(models.Manager):
"""Provides additional methods for handling datasets"""
def create_dataset_v6(self, definition, title=None, description=None):
"""Creates and returns a new dataset for the given name/title/description/definition/version??
:param definition: Parameter definition of the dataset
:type definition: :class:`data.dataset.dataset.DataSetDefinition`
:param title: Optional title of the dataset
:type title: string
:param description: Optional description of the dataset
:type description: string
:returns: The new dataset
:rtype: :class:`data.models.DataSet`
:raises :class:`data.exceptions.InvalidDataSet`: If a give dataset has an invalid value
"""
if not definition:
definition = DataSetDefinition(definition={})
dataset = DataSet()
dataset.title = title
dataset.description = description
dataset.definition = definition.get_dict()
dataset.save()
return dataset
def get_details_v6(self, dataset_id):
"""Gets additional details for the given dataset id
:returns: The full dataset for the given id
:rtype: :class:`data.models.DataSet`
"""
ds = DataSet.objects.get(pk=dataset_id)
ds.files = DataSetFile.objects.get_dataset_files(ds.id)
return ds
def get_datasets_v6(self, started=None, ended=None, dataset_ids=None, keywords=None, order=None):
"""Handles retrieving datasets - possibly filtered and ordered
:returns: The list of datasets that match the given filters
:rtype: [:class:`data.models.DataSet`]
"""
return self.filter_datasets(started=started, ended=ended, dataset_ids=dataset_ids, keywords=keywords, order=order)
def filter_datasets(self, started=None, ended=None, dataset_ids=None, keywords=None, order=None):
"""Returns a query for dataset models that filters on the given fields
:param started: Query datasets created after this amount of time.
:type started: :class:`datetime.datetime`
:param ended: Query datasets created before this amount of time.
:type ended: :class:`datetime.datetime`
:param dataset_ids: Query datasets assciated with the given id(s)
:type dataset_ids: :func:`list`
:param keywords: Query datasets with title or description matching one of the specified keywords
:type keywords: :func:`list`
:param order: A list of fields to control the sort order.
:type order: :func:`list`
:returns: The dataset query
:rtype: :class:`django.db.models.QuerySet`
"""
# Fetch a list of the datasets
datasets = self.all()
# Apply time range filtering
if started:
datasets = datasets.filter(created__gte=started)
if ended:
datasets = datasets.filter(created__lte=ended)
# Apply additional filters
if dataset_ids:
datasets = datasets.filter(id__in=dataset_ids)
# Execute a sub-query that returns distinct job type names that match the provided filter arguments
if keywords:
key_query = Q()
for keyword in keywords:
key_query |= Q(title__icontains=keyword)
key_query |= Q(description__icontains=keyword)
datasets = datasets.filter(key_query)
# Apply sorting
if order:
ordering = alphabetize(order, DataSet.ALPHABETIZE_FIELDS)
datasets = datasets.order_by(*ordering)
else:
datasets = datasets.order_by('id')
for ds in datasets:
files = DataSetFile.objects.get_file_ids(dataset_ids=[ds.id])
ds.files = len(files)
return datasets
def validate_dataset_v6(self, definition, title=None, description=None):
"""Validates the given dataset definiton
:param definition: The dataset definition
:type definition: dict
:returns: The dataset validation
:rtype: :class:`datset.models.DataSetValidation`
"""
is_valid = True
errors = []
warnings = []
dataset_definition = None
try:
dataset_definition = DataSetDefinitionV6(definition=definition, do_validate=True)
except InvalidDataSetDefinition as ex:
is_valid = False
errors.append(ex.error)
message = 'Dataset definition is invalid: %s' % ex
logger.info(message)
pass
# validate other fields
return DataSetValidation(is_valid, errors, warnings)
def get_dataset_files(self, dataset_id):
"""Returns the files associated with the given dataset
:returns: The list of DataSetFiles matching the file_id
:rtype: [:class:`data.models.DataSetFile`]
"""
files = DataSetFile.objects.get_dataset_files(dataset_id=dataset_id)
return files
def get_dataset_members(self, dataset_id):
"""Returns the members associated with the given dataset_id
:returns: The list of DataSetMembers
:rtype: [:class:`data.models.DataSetMember`]
"""
dataset = self.get(pk=dataset_id)
members = DataSetMember.objects.all().filter(dataset=dataset)
return members
class DataSet(models.Model):
"""
Represents a DataSet object
:keyword name: The identifying name of the dataset used by clients for queries
:type name: :class:`django.db.models.CharField`
:keyword version: The version of the dataset
:type version: :class:`django.db.models.CharField`
:keyword version_array: The version of the dataset split into SemVer integer components (major,minor,patch,prerelease)
:type version_array: :func:`list`
:keyword title: The human-readable title of this dataset (optional)
:type title: :class:`django.db.models.CharField`
:keyword description: The description of the dataset (optional)
:type description: :class:`django.db.models.CharField`
:keyword created: Defines the created time of the dataset
:type created: :class:`django.db.models.DateTimeField`
:keyword definition: Defines the dataset
:type definition: class:`django.contrib.postgres.fields.JSONField`
"""
ALPHABETIZE_FIELDS = ['title', 'description']
title = models.CharField(blank=True, max_length=50, null=True)
description = models.TextField(blank=True, null=True)
created = models.DateTimeField(auto_now_add=True)
definition = django.contrib.postgres.fields.JSONField(default=dict)
objects = DataSetManager()
def get_definition(self):
"""Returns the dataset definition
:returns: The DataSet definition
:rtype: :class:`data.dataset.dataset.DataSetDefinition`
"""
if isinstance(self.definition, basestring):
self.definition = {}
return DataSetDefinitionV6(definition=self.definition).get_definition()
def get_v6_definition_json(self):
"""Returns the dataset definition in v6 of the JSON schema
:returns: The dataset definition in v6 of the JSON schema
:rtype: dict
"""
return rest_utils.strip_schema_version(convert_definition_to_v6_json(self.get_definition()).get_dict())
def get_dataset_definition(self):
"""Returns the dataset definition
:returns: The dataset definition json
:rtype: dict
"""
return self.definition
def get_dataset_members_json(self):
"""Returns the JSON for the associated dataset members
:returns: Returns the outgoing primitive representation.
:rtype: dict?
"""
members = DataSet.objects.get_dataset_members(dataset_id=self.id)
serializer = DataSetMemberSerializerV6(members, many=True)
return serializer.data
def get_dataset_files_json(self):
"""Returns the JSON for the associated dataset files
:returns: Returns the outgoing primitive representation.
:rtype: dict?
"""
files = DataSet.objects.get_dataset_files(self.id)
serializer = DataSetFileSerializerV6(files, many=True)
return serializer.data
class Meta(object):
"""meta information for the db"""
db_table = 'data_set'
class DataSetMemberManager(models.Manager):
"""Provides additional methods for handling dataset members"""
def build_data_list(self, template, data_started=None, data_ended=None, created_started=None, created_ended=None,
source_started=None, source_ended=None, source_sensor_classes=None, source_sensors=None,
source_collections=None,source_tasks=None, mod_started=None, mod_ended=None, job_type_ids=None,
job_type_names=None, job_ids=None, is_published=None, is_superseded=None, file_names=None,
job_outputs=None, recipe_ids=None, recipe_type_ids=None, recipe_nodes=None, batch_ids=None, order=None):
"""Builds a list of data dictionaries from a template and file filters
:param template: The template to fill with files found through filters
:type template: dict
:param data_started: Query files where data started after this time.
:type data_started: :class:`datetime.datetime`
:param data_ended: Query files where data ended before this time.
:type data_ended: :class:`datetime.datetime`
:param created_started: Query files created after this time.
:type created_started: :class:`datetime.datetime`
:param created_ended: Query files created before this time.
:type created_ended: :class:`datetime.datetime`
:param source_started: Query files where source collection started after this time.
:type source_started: :class:`datetime.datetime`
:param source_ended: Query files where source collection ended before this time.
:type source_ended: :class:`datetime.datetime`
:param source_sensor_classes: Query files with the given source sensor class.
:type source_sensor_classes: :func:`list`
:param source_sensor: Query files with the given source sensor.
:type source_sensor: :func:`list`
:param source_collection: Query files with the given source class.
:type source_collection: :func:`list`
:param source_tasks: Query files with the given source tasks.
:type source_tasks: :func:`list`
:param mod_started: Query files where the last modified date is after this time.
:type mod_started: :class:`datetime.datetime`
:param mod_ended: Query files where the last modified date is before this time.
:type mod_ended: :class:`datetime.datetime`
:param job_type_ids: Query files with jobs with the given type identifier.
:type job_type_ids: :func:`list`
:param job_type_names: Query files with jobs with the given type name.
:type job_type_names: :func:`list`
:keyword job_ids: Query files with a given job id
:type job_ids: :func:`list`
:param is_published: Query files flagged as currently exposed for publication.
:type is_published: bool
:param is_superseded: Query files that have/have not been superseded.
:type is_superseded: bool
:param file_names: Query files with the given file names.
:type file_names: :func:`list`
:keyword job_outputs: Query files with the given job outputs
:type job_outputs: :func:`list`
:keyword recipe_ids: Query files with a given recipe id
:type recipe_ids: :func:`list`
:keyword recipe_nodes: Query files with a given recipe nodes
:type recipe_nodes: :func:`list`
:keyword recipe_type_ids: Query files with the given recipe types
:type recipe_type_ids: :func:`list`
:keyword batch_ids: Query files with batches with the given identifiers.
:type batch_ids: :func:`list`
:param order: A list of fields to control the sort order.
:type order: :func:`list`
"""
files = ScaleFile.objects.filter_files(
data_started=data_started, data_ended=data_ended,
source_started=source_started, source_ended=source_ended,
source_sensor_classes=source_sensor_classes, source_sensors=source_sensors,
source_collections=source_collections, source_tasks=source_tasks,
mod_started=mod_started, mod_ended=mod_ended, job_type_ids=job_type_ids,
job_type_names=job_type_names, job_ids=job_ids,
file_names=file_names, job_outputs=job_outputs, recipe_ids=recipe_ids,
recipe_type_ids=recipe_type_ids, recipe_nodes=recipe_nodes, batch_ids=batch_ids,
order=order)
data_list = []
try:
for f in files:
entry = copy.deepcopy(template)
file_params = entry['files']
for p in file_params:
if file_params[p] == 'FILE_VALUE':
file_params[p] = [f.id]
data_list.append(DataV6(data=entry, do_validate=True).get_data())
except (KeyError, TypeError) as ex:
raise InvalidData('INVALID_TEMPLATE', "Specified template is invalid: %s" % ex)
return data_list
def validate_data_list(self, dataset_def, data_list):
"""Validates a list of data objects against a dataset
:param dataset_def: The dataset definition the member is a part of
:type dataset_def:
:param data_list: Data definitions of the dataset members
:type data_list: [:class:`data.data.data.Data`]
"""
is_valid = True
errors = []
warnings = []
for data in data_list:
try:
dataset_def.validate(data)
except (InvalidData, InvalidDataSetMember) as ex:
is_valid = False
errors.append(ex.error)
message = 'Dataset definition is invalid: %s' % ex
logger.info(message)
pass
# validate other fields
return DataSetValidation(is_valid, errors, warnings)
def create_dataset_members(self, dataset, data_list):
"""Creates a dataset member
:param dataset: The dataset the member is a part of
:type dataset: :class:`data.models.DataSet`
:param data_list: Data definitions of the dataset members
:type data_list: [:class:`data.data.data.Data`]
"""
with transaction.atomic():
dataset_members = []
datasetfiles = []
existing_scale_ids = DataSetFile.objects.get_file_ids(dataset_ids=[dataset.id])
for d in data_list:
dataset_member = DataSetMember()
dataset_member.dataset = dataset
dataset_member.data = convert_data_to_v6_json(d).get_dict()
dataset_member.file_ids = list(data_util.get_file_ids(d))
dataset_members.append(dataset_member)
datasetfiles.extend(DataSetFile.objects.create_dataset_files(dataset, d, existing_scale_ids))
existing_scale_ids.append(dataset_member.file_ids)
DataSetFile.objects.bulk_create(datasetfiles)
return DataSetMember.objects.bulk_create(dataset_members)
def get_dataset_members(self, dataset):
"""Returns dataset members for the given dataset
:returns: members for a given dataset
:rtype: QuerySet<DataSetMember>
"""
return self.all().filter(dataset=dataset).order_by('id')
def get_details_v6(self, dsm_id):
"""Gets additional details for the given dataset member id
:returns: The full dataset member for the given id
:rtype: :class:`data.models.DataSetMember`
"""
dsm = DataSetMember.objects.get(pk=dsm_id)
dsm.files = DataSetFile.objects.filter(dataset=dsm.dataset, scale_file_id__in=list(dsm.file_ids))
return dsm
class DataSetMember(models.Model):
"""
Defines the data of a dataset? contains list/descriptors of DataFiles
:keyword dataset: Refers to dataset member belongs to
:type dataset: :class:`django.db.models.ForeignKey`
:keyword data: JSON description of the data in this DataSetMember.
:type data: :class: `django.contrib.postgres.fields.JSONField(default=dict)`
:keyword created: Created Time
:type created: datetime
"""
dataset = models.ForeignKey('data.DataSet', on_delete=models.PROTECT)
data = django.contrib.postgres.fields.JSONField(default=dict)
file_ids = django.contrib.postgres.fields.ArrayField(models.IntegerField(null=True))
created = models.DateTimeField(auto_now_add=True)
objects = DataSetMemberManager()
def get_dataset_definition(self):
"""Returns the dataset definition
:returns: The dataset definition
:rtype: :class:`data.dataset.dataset.DataSetDefinition`
"""
return self.dataset.get_definition()
def get_data(self):
"""Returns the data for this datasetmember
:returns: The data for this datasetmember
:rtype: :class:`data.data.data.Data`
"""
return DataV6(data=self.data, do_validate=False).get_data()
def get_v6_data_json(self):
"""Returns the data for this datasetmember as v6 json with the version stripped
:returns: The v6 JSON output data dict for this datasetmember
:rtype: dict
"""
return rest_utils.strip_schema_version(convert_data_to_v6_json(self.get_data()).get_dict())
class Meta(object):
"""meta information for the db"""
db_table = 'data_set_member'
class DataSetFileManager(models.Manager):
"""Manages the datasetfile model"""
def create_dataset_files(self, dataset, data, existing_scale_ids):
"""Creates dataset files for the given dataset and data"""
datasetfiles = []
for i in data.values.keys():
v = data.values[i]
if type(v) is FileValue:
for id in v.file_ids:
if id in existing_scale_ids:
continue
file = DataSetFile()
file.dataset = dataset
file.scale_file = ScaleFile.objects.get(pk=id)
file.parameter_name = i
datasetfiles.append(file)
return datasetfiles
def get_file_ids(self, dataset_ids, parameter_names=None):
"""Returns a list of the file IDs for the given datasets, optionally filtered by parameter_name.
:param dataset_ids: The ids of the associated datasets
:type dataset_ids: integer
:param parameter_names: The parameter names to search for in the given datasets
:type parameter_names: string
:returns: The list of scale file IDs
:rtype: :func:`list`
"""
query = self.all().filter(dataset_id__in=list(dataset_ids))
if parameter_names:
query = query.filter(parameter_name__in=list(parameter_names))
return [result.scale_file_id for result in query.only('scale_file_id').distinct()]
def get_dataset_ids(self, file_ids, all_files=False):
"""Returns a list of the dataset IDs that contain the given files
:param file_ids: The ids of the files to look for
:type dataset_id: integer
:param all_files: Whether or not a dataset must contain all files or just some of the files in the list
:type all_files: bool
:returns: The list of dataset IDs
:rtype: :func:`list`
"""
results = []
if not all_files:
query = self.all().filter(scale_file_id__in=list(file_ids)).only('dataset_id').distinct()
results = [result.dataset_id for result in query]
else:
query = self.all().filter(scale_file_id__in=list(file_ids)).values('dataset_id').annotate(total=Count('dataset_id')).order_by('total')
for result in query:
if result['total'] == len(file_ids):
results.append(result['dataset_id'])
return results
def get_files(self, dataset_ids, parameter_names=None):
"""Returns the dataset files associated with the given dataset_ids
:param dataset_ids: The ids of the associated datasets
:type dataset_ids: integer
:param parameter_names: The parameter names to search for in the given datasets
:type parameter_names: string
:returns: The DataSetFiles associated with that dataset_id
:rtype: [:class:`data.models.DataSetFile`]
"""
files = self.all().filter(dataset_id__in=list(dataset_ids))
if parameter_names:
files = files.filter(parameter_name__in=list(parameter_names))
return files
def get_datasets(self, file_ids, all_files=False):
"""Returns the datasets associated with the given file_id
:param file_id: The id of the associated file
:type file_id: integer
:param all_files: Whether or not a dataset must contain all files or just some of the files in the list
:type all_files: bool
:returns: The DataSets associated with that dataset_id
:rtype: [:class:`data.models.DataSet`]
"""
dataset_ids = self.get_dataset_ids(file_ids=file_ids, all_files=all_files)
datasets = DataSet.objects.filter(id__in=dataset_ids)
return datasets
def get_dataset_files(self, dataset_id):
"""Returns the dataset files associated with the given dataset_id
:param dataset_id: The id of the associated dataset
:type dataset_id: integer
:returns: The DataSetFiles associated with that dataset_id
:rtype: [:class:`data.models.DataSetFile`]
"""
files = DataSetFile.objects.filter(dataset_id=dataset_id)
return files
class DataSetFile(models.Model):
"""
The actual file in a dataset member
:keyword dataset: Refers to the dataset the file is a member of
:type dataset: :class:`django.db.models.ForeignKey`
:keyword scale_file: Refers to the ScaleFile
:type scale_file: :class:`django.db.models.ForeignKey`
:keyword parameter_name: Refers to the File parameter name
:type parameter_name: :class:`django.db.models.CharField`
"""
dataset = models.ForeignKey('data.DataSet', on_delete=models.PROTECT)
scale_file = models.ForeignKey('storage.ScaleFile', on_delete=models.PROTECT)
parameter_name = models.CharField(db_index=True, max_length=50)
objects = DataSetFileManager()
class Meta(object):
"""meta information for the db"""
db_table = 'data_set_file'
unique_together = ("dataset", "scale_file") | apache-2.0 | 391,126,229,335,592,260 | 39.745763 | 146 | 0.650193 | false |
redhat-openstack/rdo-infra | ci-scripts/dlrnapi_promoter/test_registries_client_unit.py | 1 | 10398 | import subprocess
import yaml
try:
# Python3 imports
from unittest import mock
from unittest.mock import patch
except ImportError:
# Python2 imports
from mock import patch
import mock
from common import PromotionError
from dlrn_hash import DlrnCommitDistroHash, DlrnHash
from test_unit_fixtures import LegacyConfigSetup, hashes_test_cases
class TestPrepareExtraVars(LegacyConfigSetup):
def setUp(self):
super(TestPrepareExtraVars, self).setUp()
self.client = self.promoter.registries_client
self.dlrn_hash_commitdistro = DlrnCommitDistroHash(commit_hash='abc',
distro_hash='def',
component="comp1",
timestamp=1)
def test_setup(self):
error_msg = "Container push logfile is misplaced"
assert self.client.logfile != "", error_msg
@patch('logging.Logger.error')
@patch('logging.Logger.info')
@patch('logging.Logger.debug')
@patch('repo_client.RepoClient.get_versions_csv')
@patch('repo_client.RepoClient.get_commit_sha')
@patch('repo_client.RepoClient.get_containers_list')
def test_prepare_extra_vars_empty_missing_reader(self,
get_containers_mock,
get_commit_mock,
get_versions_mock,
mock_log_debug,
mock_log_info,
mock_log_error):
get_versions_mock.return_value = None
with self.assertRaises(PromotionError):
self.client.prepare_extra_vars(self.dlrn_hash_commitdistro,
"current-tripleo",
"tripleo-ci-testing")
get_versions_mock.assert_has_calls([
mock.call(self.dlrn_hash_commitdistro, "tripleo-ci-testing")
])
self.assertFalse(get_commit_mock.called)
self.assertFalse(get_containers_mock.called)
self.assertFalse(mock_log_debug.called)
self.assertFalse(mock_log_info.called)
mock_log_error.assert_has_calls([
mock.call("No versions.csv found")
])
@patch('logging.Logger.error')
@patch('logging.Logger.info')
@patch('logging.Logger.debug')
@patch('repo_client.RepoClient.get_versions_csv')
@patch('repo_client.RepoClient.get_commit_sha')
@patch('repo_client.RepoClient.get_containers_list')
def test_prepare_extra_vars_empty_missing_sha(self,
get_containers_mock,
get_commit_mock,
get_versions_mock,
mock_log_debug,
mock_log_info,
mock_log_error):
get_versions_mock.return_value = "reader"
get_commit_mock.return_value = None
with self.assertRaises(PromotionError):
self.client.prepare_extra_vars(self.dlrn_hash_commitdistro,
"current-tripleo",
"tripleo-ci-testing")
get_versions_mock.assert_has_calls([
mock.call(self.dlrn_hash_commitdistro, "tripleo-ci-testing")
])
get_commit_mock.assert_has_calls([
mock.call("reader", "openstack-tripleo-common")
])
self.assertFalse(get_containers_mock.called)
self.assertFalse(mock_log_debug.called)
self.assertFalse(mock_log_info.called)
mock_log_error.assert_has_calls([
mock.call("Versions.csv does not contain tripleo-common commit")
])
@patch('logging.Logger.error')
@patch('logging.Logger.info')
@patch('logging.Logger.debug')
@patch('repo_client.RepoClient.get_versions_csv')
@patch('repo_client.RepoClient.get_commit_sha')
@patch('repo_client.RepoClient.get_containers_list')
def test_prepare_extra_vars_empty_containers_list(self,
get_containers_mock,
get_commit_mock,
get_versions_mock,
mock_log_debug,
mock_log_info,
mock_log_error):
get_versions_mock.return_value = "reader"
get_commit_mock.return_value = "abc"
get_containers_mock.return_value = []
with self.assertRaises(PromotionError):
self.client.prepare_extra_vars(self.dlrn_hash_commitdistro,
"current-tripleo",
"tripleo-ci-testing")
get_versions_mock.assert_has_calls([
mock.call(self.dlrn_hash_commitdistro, "tripleo-ci-testing")
])
get_commit_mock.assert_has_calls([
mock.call("reader", "openstack-tripleo-common")
])
get_containers_mock.assert_has_calls([
mock.call("abc")
])
self.assertFalse(mock_log_debug.called)
self.assertFalse(mock_log_info.called)
mock_log_error.assert_has_calls([
mock.call("Containers list is empty")
])
@patch('logging.Logger.error')
@patch('logging.Logger.info')
@patch('logging.Logger.debug')
@patch('repo_client.RepoClient.get_versions_csv')
@patch('repo_client.RepoClient.get_commit_sha')
@patch('repo_client.RepoClient.get_containers_list')
def test_prepare_extra_vars_success(self,
get_containers_mock,
get_commit_mock,
get_versions_mock,
mock_log_debug,
mock_log_info,
mock_log_error):
get_versions_mock.return_value = "reader"
get_commit_mock.return_value = "abc"
get_containers_mock.return_value = ['a', 'b']
extra_vars_path = \
self.client.prepare_extra_vars(self.dlrn_hash_commitdistro,
"current-tripleo",
"tripleo-ci-testing")
self.assertIsInstance(extra_vars_path, str)
self.assertIn(".yaml", extra_vars_path)
with open(extra_vars_path) as extra_vars_file:
extra_vars = yaml.safe_load(stream=extra_vars_file)
self.assertIsInstance(extra_vars, dict)
self.assertDictEqual(extra_vars, {
'release': "master",
'script_root': mock.ANY,
'distro_name': "centos",
'distro_version': '7',
'manifest_push': True,
'target_registries_push': True,
'candidate_label': "tripleo-ci-testing",
"named_label": "current-tripleo",
"source_namespace": "tripleomaster",
"target_namespace": "tripleomaster",
"commit_hash": self.dlrn_hash_commitdistro.commit_hash,
"distro_hash": self.dlrn_hash_commitdistro.distro_hash,
"full_hash": self.dlrn_hash_commitdistro.full_hash,
"containers_list": ['a', 'b']
})
get_versions_mock.assert_has_calls([
mock.call(self.dlrn_hash_commitdistro, "tripleo-ci-testing")
])
get_commit_mock.assert_has_calls([
mock.call("reader", "openstack-tripleo-common")
])
get_containers_mock.assert_has_calls([
mock.call("abc")
])
mock_log_debug.assert_has_calls([
mock.call("Crated extra vars file at %s", mock.ANY)
])
mock_log_info.assert_has_calls([
mock.call("Passing extra vars to playbook: %s", mock.ANY)
])
self.assertFalse(mock_log_error.called)
class TestPromote(LegacyConfigSetup):
def setUp(self):
super(TestPromote, self).setUp()
self.client = self.promoter.registries_client
self.dlrn_hash_commitdistro = DlrnCommitDistroHash(
commit_hash='abc',
distro_hash='def',
component="comp1",
timestamp=1)
@patch('logging.Logger.error')
@patch('logging.Logger.info')
@patch('os.unlink')
@patch('registries_client.RegistriesClient.prepare_extra_vars')
@mock.patch('subprocess.check_output')
def test_promote_success(self, check_output_mock,
extra_vars_mock,
unlink_mock,
mock_log_info,
mock_log_error
):
candidate_hash =\
DlrnHash(source=hashes_test_cases['aggregate']['dict']['valid'])
target_label = "test"
check_output_mock.return_value = "test log"
self.client.promote(candidate_hash, target_label)
self.assertTrue(check_output_mock.called)
self.assertFalse(mock_log_error.called)
@patch('logging.Logger.error')
@patch('logging.Logger.info')
@patch('os.unlink')
@patch('registries_client.RegistriesClient.prepare_extra_vars')
@mock.patch('subprocess.check_output')
def test_promote_failure(self, check_output_mock,
extra_vars_mock,
unlink_mock,
mock_log_info,
mock_log_error
):
candidate_hash = \
DlrnHash(source=hashes_test_cases['aggregate']['dict']['valid'])
target_label = "test"
exception = subprocess.CalledProcessError(1, 2)
exception.output = b"test"
check_output_mock.side_effect = exception
with self.assertRaises(PromotionError):
self.client.promote(candidate_hash, target_label)
self.assertTrue(mock_log_error.called)
| apache-2.0 | -4,558,805,724,767,408,000 | 41.790123 | 77 | 0.525293 | false |
alphatwirl/alphatwirl | alphatwirl/summary/Scan.py | 1 | 1209 | # Tai Sakuma <[email protected]>
##__________________________________________________________________||
import numpy as np
import copy
##__________________________________________________________________||
class Scan:
def __init__(self, val=None, weight=1, contents=None):
if contents is not None:
self.contents = contents
return
if val is None:
self.contents = [ ]
return
self.contents = [val]
def __add__(self, other):
contents = self.contents + other.contents
return self.__class__(contents=contents)
def __radd__(self, other):
# is called with other = 0 when e.g. sum([obj1, obj2])
if other == 0:
return self.__class__() + self
raise TypeError('unsupported: {!r} + {!r}'.format(other, self))
def __repr__(self):
return '{}(contents={})'.format(self.__class__.__name__, self.contents)
def __eq__(self, other):
return self.contents == other.contents
def __copy__(self):
contents = list(self.contents)
return self.__class__(contents=contents)
##__________________________________________________________________||
| bsd-3-clause | -5,268,649,975,022,638,000 | 28.487805 | 79 | 0.456576 | false |
oasis-open/cti-python-stix2 | stix2/test/v20/test_kill_chain_phases.py | 1 | 1652 | """Tests for stix.ExternalReference"""
import pytest
import stix2
LMCO_RECON = """{
"kill_chain_name": "lockheed-martin-cyber-kill-chain",
"phase_name": "reconnaissance"
}"""
def test_lockheed_martin_cyber_kill_chain():
recon = stix2.v20.KillChainPhase(
kill_chain_name="lockheed-martin-cyber-kill-chain",
phase_name="reconnaissance",
)
assert recon.serialize(pretty=True) == LMCO_RECON
FOO_PRE_ATTACK = """{
"kill_chain_name": "foo",
"phase_name": "pre-attack"
}"""
def test_kill_chain_example():
preattack = stix2.v20.KillChainPhase(
kill_chain_name="foo",
phase_name="pre-attack",
)
assert preattack.serialize(pretty=True) == FOO_PRE_ATTACK
def test_kill_chain_required_properties():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.v20.KillChainPhase()
assert excinfo.value.cls == stix2.v20.KillChainPhase
assert excinfo.value.properties == ["kill_chain_name", "phase_name"]
def test_kill_chain_required_property_chain_name():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.v20.KillChainPhase(phase_name="weaponization")
assert excinfo.value.cls == stix2.v20.KillChainPhase
assert excinfo.value.properties == ["kill_chain_name"]
def test_kill_chain_required_property_phase_name():
with pytest.raises(stix2.exceptions.MissingPropertiesError) as excinfo:
stix2.v20.KillChainPhase(kill_chain_name="lockheed-martin-cyber-kill-chain")
assert excinfo.value.cls == stix2.v20.KillChainPhase
assert excinfo.value.properties == ["phase_name"]
| bsd-3-clause | 84,364,463,680,078,540 | 26.081967 | 84 | 0.700969 | false |
absperf/wagtailapproval | wagtailapproval/menu.py | 1 | 3637 | from __future__ import (absolute_import, division, print_function,
unicode_literals)
import itertools
from django.contrib.auth import get_user
from django.core.urlresolvers import reverse, reverse_lazy
from django.utils.translation import ugettext_lazy as _
from django.utils.translation import ungettext_lazy as _n
from wagtail.wagtailadmin import messages
from wagtail.wagtailadmin.menu import MenuItem
from .models import ApprovalStep
def get_user_approval_items(user):
'''Get an iterable of all items pending for a user's approval.
:param User user: A user object whose groups are to be checked for
appropriate steps
:rtype: Iterable[ApprovalItem]
:returns: All the items that this user can approve or reject.
'''
if user.is_superuser:
steps = ApprovalStep.objects.all()
else:
groups = user.groups.all()
steps = ApprovalStep.objects.filter(group__in=groups)
return itertools.chain.from_iterable(
step.get_items(user) for step in steps)
class ApprovalMenuItem(MenuItem):
'''The menu item that shows in the wagtail sidebar'''
def __init__(
self, label=_('Approval'), url=reverse_lazy('wagtailapproval:index'),
classnames='icon icon-tick-inverse', order=201, **kwargs):
super(ApprovalMenuItem, self).__init__(
label,
url,
classnames=classnames,
order=order,
**kwargs)
def is_shown(self, request):
'''Only show the menu if the user is in an owned approval group'''
user = get_user(request)
# If the user is superuser, show the menu if any steps exist at all
if user.is_superuser:
return ApprovalStep.objects.exists()
groups = user.groups.all()
if ApprovalStep.objects.filter(group__in=groups).exists():
# Display the approval notification only outside of the approval
# paths
if not request.path.startswith(reverse('wagtailapproval:index')):
# Get the count of waiting approvals
waiting_approvals = sum(
1 for _ in get_user_approval_items(user))
if waiting_approvals > 0:
messages.info(
request,
_n(
'{num:d} item waiting for approval',
'{num:d} items waiting for approval',
waiting_approvals).format(num=waiting_approvals),
buttons=[
messages.button(
reverse('wagtailapproval:index'),
_('Examine Now'))
]
)
return True
return False
class ApprovalAdminMenuItem(MenuItem):
'''The admin menu item that shows in the wagtail sidebar, for
administrating entire pipelines and manually dropping items into steps.'''
def __init__(
self, label=_('Approval Admin'),
url=reverse_lazy('wagtailapproval:admin_index'),
classnames='icon icon-cog', order=200, **kwargs):
super(ApprovalAdminMenuItem, self).__init__(
label,
url,
classnames=classnames,
order=order,
**kwargs)
def is_shown(self, request):
'''Only show the menu if the user is a superuser and any ApprovalStep
objects exist.'''
user = get_user(request)
if user.is_superuser:
return ApprovalStep.objects.exists()
return False
| bsd-2-clause | -1,745,352,902,436,354,300 | 35.009901 | 78 | 0.587022 | false |
lulivi/debate_bot | bot.py | 1 | 5398 | #!/usr/bin/python3 -u
# -*- coding: utf-8 -*-
import sys
import time
import telebot # Librería de la API del bot.
from telebot import types # Tipos para la API del bot.
from priv.__init__ import token as tk
bot = telebot.TeleBot(tk()) # Creamos el objeto de nuestro bot.
###############################################################################
# commands
###############################################################################
# start mensaje de bienvenida
@bot.message_handler(commands=['start'])
def command_start(m):
cid = m.chat.id
comando = m.text[7:]
if comando == 'reglas':
command_reglas(m)
else:
bot.send_message(cid,"¡Hola! Soy Debatebot.\nUsa el comando /ayuda para que te muestre mis demás comandos.\n\nEspero ser de utilidad.")
########################################
# muestra los comandos visibles
@bot.message_handler(commands=['ayuda'])
def command_ayuda(m):
bot.reply_to(m,"Guardo y doy información acerca de debates.\n/nuevo establezco el nuevo tema de debate.\n/actual muestro el tema actual de debate.\n/fin termino el debate actual.\n/reglas muestro las reglas actuales del grupo.")
########################################
# nuevo debat
@bot.message_handler(commands=['nuevo'])
def command_nuevo(m):
pos = m.text.find(" ")
cid = m.chat.id
if pos == -1:
bot.send_message(cid,m.from_user.first_name+", escribe:\n/nuevo nuevo_tema_de_debate")
else:
if get_matter(cid) == "":
set_matter(cid, m.text[pos:])
fuid = m.from_user.id
set_matter_id(cid, fuid)
bot.send_message(cid,"El tema actual se ha guardado con éxito, "+m.from_user.first_name+".")
else:
bot.send_message(cid,"Ya se está debatifino un tema, "+m.from_user.first_name+".\n/fin para terminarlo.\n/actual para obtenerlo.")
########################################
# debate actual
@bot.message_handler(commands=['actual'])
def command_actual(m):
cid = m.chat.id
actual = get_matter(cid)
if actual != "":
bot.send_message(cid,"\"* "+actual+" *\" es el tema actual.\n\n/fin para terminarlo.",parse_mode="Markdown")
else:
bot.send_message(cid,"No hay debate actualmente.\n/nuevo para comenzar uno.")
########################################
# terminar el debate
@bot.message_handler(commands=['fin'])
def command_fin(m):
cid = m.chat.id
if get_matter(cid) != "":
uid = get_matter_id(cid)
fuid = m.from_user.id
if uid == fuid:
set_matter(cid)
set_matter_id(cid,uid)
bot.send_message(cid,"Tema cerrado, "+m.from_user.first_name+".\n/nuevo para comenzar uno.")
else:
bot.send_message(cid,"No tiene permiso para terminar el debate, "+m.from_user.first_name+".")
else:
bot.send_message(cid, "No hay debate actualmente, "+m.from_user.first_name+".\n/nuevo para comenzar uno.")
########################################
REGLASID = ""
# reglas
@bot.message_handler(commands=['reglas'])
def command_to_reglas(m):
cid = m.chat.id
if cid < 0:
REGLASID = str(cid)
bot.send_message(cid,"Pulse [aquí](https://telegram.me/debate_bot?start=reglas)",parse_mode="Markdown")
else:
command_reglas(m)
def command_reglas(m):
if REGLASID != "":
reglas = get_reglas(REGLASID)
else:
cid = m.chat.id
reglas = get_reglas(cid)
if reglas != "":
bot.reply_to(m,"Reglas de participación en este grupo:\n\n"+reglas)
else:
bot.reply_to(m,"No hay relgas definidas para este grupo.")
########################################
# definir las reglas
@bot.message_handler(commands=['definereglas'])
def command_definereglas(m):
cid = m.chat.id
text = m.text
pos = text.find(" ")
if pos != -1:
txt = m.text[pos+1:]
set_reglas(cid, txt)
else:
txt = ""
set_reglas(cid, txt)
###############################################################################
# functions
###############################################################################
##### matter #####
def set_matter(chatid,txt=""):
cid = str(chatid)
with open("./matter/"+cid+".mat",'w') as f:
f.write(txt)
def get_matter(chatid):
cid = str(chatid)
with open("./matter/"+cid+".mat",'a') as f:
pass
with open("./matter/"+cid+".mat",'r') as f:
matter = f.read()
return matter
##### reglas #####
def set_reglas(chatid, txt):
cid = str(chatid)
with open("./reglas/"+cid+".rul",'w') as f:
f.write(txt)
def get_reglas(chatid):
cid = str(chatid)
with open("./reglas/"+cid+".rul",'a') as f:
pass
with open("./reglas/"+cid+".rul",'r') as f:
reglas = f.read()
return reglas
##### matter id #####
def set_matter_id(chatid,userid):
cid = str(chatid)
uid = str(userid)
with open("./matter/"+cid+".matid",'w') as f:
f.write(uid)
def get_matter_id(chatid):
cid = str(chatid)
with open("./matter/"+cid+".matid",'a') as f:
pass
with open("./matter/"+cid+".matid",'r') as f:
uid = f.read()
if uid == "":
return -1
else:
return int(uid)
###############################################################################
bot.polling()
| gpl-2.0 | 6,137,335,804,472,736,000 | 31.083333 | 232 | 0.520779 | false |
chugunovyar/factoryForBuild | neuron/SaveClosedPossition.py | 1 | 31069 | # -*- coding: utf-8 -*-
import logging
from neuron.models import DataSet
import dateutil.parser as DP
loggermsg = logging.getLogger('django')
def saveClosedPossition(jsondata):
#loggermsg.info(len(jsondata))
# Проверяем есть ли такой ордер в БД
ifExistOrdernum = DataSet.objects.filter(open_magicnum=jsondata['magicnum'])
# Если нет такого ордера то записываем его в бд.
if len(ifExistOrdernum) == 0:
if float(jsondata['result']) > 0:
effectivnes = 1
else:
effectivnes = 0
dataToSave = DataSet(
open_magicnum = jsondata['magicnum'],\
open_neuron_name = jsondata['neuron_name'],\
open_period = jsondata['period'],\
orderOpenPrice = jsondata['openprice'],\
open_type = jsondata['open_type'],\
open_time = DP.parse(jsondata['orderopentime']),\
open_close_1 = jsondata['open_close_1'],\
open_open_1 = jsondata['open_open_1'],\
open_high_1 = jsondata['open_high_1'],\
open_low_1 = jsondata['open_low_1'],
open_upband_1 = jsondata['open_upband_1'],
open_lowband_1 = jsondata['open_lowband_1'],
open_midleband_1 = jsondata['open_midleband_1'],
open_jaw_1 = jsondata['open_jaw_1'],
open_lips_1 = jsondata['open_lips_1'],
open_teeth_1 = jsondata['open_teeth_1'],
open_volume_1 = jsondata['open_volume_1'],
open_close_2 = jsondata['open_close_2'],
open_open_2 = jsondata['open_open_2'],
open_high_2 = jsondata['open_high_2'],
open_low_2 = jsondata['open_low_2'],
open_upband_2 = jsondata['open_upband_2'],
open_lowband_2 = jsondata['open_lowband_2'],
open_midleband_2 = jsondata['open_midleband_2'],
open_jaw_2 = jsondata['open_jaw_2'],
open_lips_2 = jsondata['open_lips_2'],
open_teeth_2 = jsondata['open_teeth_2'],
open_volume_2 = jsondata['open_volume_2'],
open_close_3 = jsondata['open_close_3'],
open_open_3 = jsondata['open_open_3'],
open_high_3 = jsondata['open_high_3'],
open_low_3 = jsondata['open_low_3'],
open_upband_3 = jsondata['open_upband_3'],
open_lowband_3 = jsondata['open_lowband_3'],
open_midleband_3 = jsondata['open_midleband_3'],
open_jaw_3 = jsondata['open_jaw_3'],
open_lips_3 = jsondata['open_lips_3'],
open_teeth_3 = jsondata['open_teeth_3'],
open_volume_3 = jsondata['open_volume_3'],
open_close_4 = jsondata['open_close_4'],
open_open_4 = jsondata['open_open_4'],
open_high_4 = jsondata['open_high_4'],
open_low_4 = jsondata['open_low_4'],
open_upband_4 = jsondata['open_upband_4'],
open_lowband_4 = jsondata['open_lowband_4'],
open_midleband_4 = jsondata['open_midleband_4'],
open_jaw_4 = jsondata['open_jaw_4'],
open_lips_4 = jsondata['open_lips_4'],
open_teeth_4 = jsondata['open_teeth_4'],
open_volume_4 = jsondata['open_volume_4'],
open_close_5 = jsondata['open_close_5'],
open_open_5 = jsondata['open_open_5'],
open_high_5 = jsondata['open_high_5'],
open_low_5 = jsondata['open_low_5'],
open_upband_5 = jsondata['open_upband_5'],
open_lowband_5 = jsondata['open_lowband_5'],
open_midleband_5 = jsondata['open_midleband_5'],
open_jaw_5 = jsondata['open_jaw_5'],
open_lips_5 = jsondata['open_lips_5'],
open_teeth_5 = jsondata['open_teeth_5'],
open_volume_5 = jsondata['open_volume_5'],
open_close_6 = jsondata['open_close_6'],
open_open_6 = jsondata['open_open_6'],
open_high_6 = jsondata['open_high_6'],
open_low_6 = jsondata['open_low_6'],
open_upband_6 = jsondata['open_upband_6'],
open_lowband_6 = jsondata['open_lowband_6'],
open_midleband_6 = jsondata['open_midleband_6'],
open_jaw_6 = jsondata['open_jaw_6'],
open_lips_6 = jsondata['open_lips_6'],
open_teeth_6 = jsondata['open_teeth_6'],
open_volume_6 = jsondata['open_volume_6'],
open_close_7 = jsondata['open_close_7'],
open_open_7 = jsondata['open_open_7'],
open_high_7 = jsondata['open_high_7'],
open_low_7 = jsondata['open_low_7'],
open_upband_7 = jsondata['open_upband_7'],
open_lowband_7 = jsondata['open_lowband_7'],
open_midleband_7 = jsondata['open_midleband_7'],
open_jaw_7 = jsondata['open_jaw_7'],
open_lips_7 = jsondata['open_lips_7'],
open_teeth_7 = jsondata['open_teeth_7'],
open_volume_7 = jsondata['open_volume_7'],
open_close_8 = jsondata['open_close_8'],
open_open_8 = jsondata['open_open_8'],
open_high_8 = jsondata['open_high_8'],
open_low_8 = jsondata['open_low_8'],
open_upband_8 = jsondata['open_upband_8'],
open_lowband_8 = jsondata['open_lowband_8'],
open_midleband_8 = jsondata['open_midleband_8'],
open_jaw_8 = jsondata['open_jaw_8'],
open_lips_8 = jsondata['open_lips_8'],
open_teeth_8 = jsondata['open_teeth_8'],
open_volume_8 = jsondata['open_volume_8'],
open_close_9 = jsondata['open_close_9'],
open_open_9 = jsondata['open_open_9'],
open_high_9 = jsondata['open_high_9'],
open_low_9 = jsondata['open_low_9'],
open_upband_9 = jsondata['open_upband_9'],
open_lowband_9 = jsondata['open_lowband_9'],
open_midleband_9 = jsondata['open_midleband_9'],
open_jaw_9 = jsondata['open_jaw_9'],
open_lips_9 = jsondata['open_lips_9'],
open_teeth_9 = jsondata['open_teeth_9'],
open_volume_9 = jsondata['open_volume_9'],
open_close_10 = jsondata['open_close_10'],
open_open_10 = jsondata['open_open_10'],
open_high_10 = jsondata['open_high_10'],
open_low_10 = jsondata['open_low_10'],
open_upband_10 = jsondata['open_upband_10'],
open_lowband_10 = jsondata['open_lowband_10'],
open_midleband_10 = jsondata['open_midleband_10'],
open_jaw_10 = jsondata['open_jaw_10'],
open_lips_10 = jsondata['open_lips_10'],
open_teeth_10 = jsondata['open_teeth_10'],
open_volume_10 = jsondata['open_volume_10'],
)
dataToSave.save()
DataSet.objects.filter(open_magicnum=jsondata['magicnum']).update(
open_close_11 = jsondata['open_close_11'],
open_open_11 = jsondata['open_open_11'],
open_high_11 = jsondata['open_high_11'],
open_low_11 = jsondata['open_low_11'],
open_upband_11 = jsondata['open_upband_11'],
open_lowband_11 = jsondata['open_lowband_11'],
open_midleband_11 = jsondata['open_midleband_11'],
open_jaw_11 = jsondata['open_jaw_11'],
open_lips_11 = jsondata['open_lips_11'],
open_teeth_11 = jsondata['open_teeth_11'],
open_volume_11 = jsondata['open_volume_11'],
open_close_12 = jsondata['open_close_12'],
open_open_12 = jsondata['open_open_12'],
open_high_12 = jsondata['open_high_12'],
open_low_12 = jsondata['open_low_12'],
open_upband_12 = jsondata['open_upband_12'],
open_lowband_12 = jsondata['open_lowband_12'],
open_midleband_12 = jsondata['open_midleband_12'],
open_jaw_12 = jsondata['open_jaw_12'],
open_lips_12 = jsondata['open_lips_12'],
open_teeth_12 = jsondata['open_teeth_12'],
open_volume_12 = jsondata['open_volume_12'],
open_close_13 = jsondata['open_close_13'],
open_open_13 = jsondata['open_open_13'],
open_high_13 = jsondata['open_high_13'],
open_low_13 = jsondata['open_low_13'],
open_upband_13 = jsondata['open_upband_13'],
open_lowband_13 = jsondata['open_lowband_13'],
open_midleband_13 = jsondata['open_midleband_13'],
open_jaw_13 = jsondata['open_jaw_13'],
open_lips_13 = jsondata['open_lips_13'],
open_teeth_13 = jsondata['open_teeth_13'],
open_volume_13 = jsondata['open_volume_13'],
open_close_14 = jsondata['open_close_14'],
open_open_14 = jsondata['open_open_14'],
open_high_14 = jsondata['open_high_14'],
open_low_14 = jsondata['open_low_14'],
open_upband_14 = jsondata['open_upband_14'],
open_lowband_14 = jsondata['open_lowband_14'],
open_midleband_14 = jsondata['open_midleband_14'],
open_jaw_14 = jsondata['open_jaw_14'],
open_lips_14 = jsondata['open_lips_14'],
open_teeth_14 = jsondata['open_teeth_14'],
open_volume_14 = jsondata['open_volume_14'],
open_close_15 = jsondata['open_close_15'],
open_open_15 = jsondata['open_open_15'],
open_high_15 = jsondata['open_high_15'],
open_low_15 = jsondata['open_low_15'],
open_upband_15 = jsondata['open_upband_15'],
open_lowband_15 = jsondata['open_lowband_15'],
open_midleband_15 = jsondata['open_midleband_15'],
open_jaw_15 = jsondata['open_jaw_15'],
open_lips_15 = jsondata['open_lips_15'],
open_teeth_15 = jsondata['open_teeth_15'],
open_volume_15 = jsondata['open_volume_15'],
open_close_16 = jsondata['open_close_16'],
open_open_16 = jsondata['open_open_16'],
open_high_16 = jsondata['open_high_16'],
open_low_16 = jsondata['open_low_16'],
open_upband_16 = jsondata['open_upband_16'],
open_lowband_16 = jsondata['open_lowband_16'],
open_midleband_16 = jsondata['open_midleband_16'],
open_jaw_16 = jsondata['open_jaw_16'],
open_lips_16 = jsondata['open_lips_16'],
open_teeth_16 = jsondata['open_teeth_16'],
open_volume_16 = jsondata['open_volume_16'],
open_close_17 = jsondata['open_close_17'],
open_open_17 = jsondata['open_open_17'],
open_high_17 = jsondata['open_high_17'],
open_low_17 = jsondata['open_low_17'],
open_upband_17 = jsondata['open_upband_17'],
open_lowband_17 = jsondata['open_lowband_17'],
open_midleband_17 = jsondata['open_midleband_17'],
open_jaw_17 = jsondata['open_jaw_17'],
open_lips_17 = jsondata['open_lips_17'],
open_teeth_17 = jsondata['open_teeth_17'],
open_volume_17 = jsondata['open_volume_17'],
open_close_18 = jsondata['open_close_18'],
open_open_18 = jsondata['open_open_18'],
open_high_18 = jsondata['open_high_18'],
open_low_18 = jsondata['open_low_18'],
open_upband_18 = jsondata['open_upband_18'],
open_lowband_18 = jsondata['open_lowband_18'],
open_midleband_18 = jsondata['open_midleband_18'],
open_jaw_18 = jsondata['open_jaw_18'],
open_lips_18 = jsondata['open_lips_18'],
open_teeth_18 = jsondata['open_teeth_18'],
open_volume_18 = jsondata['open_volume_18'],
open_close_19 = jsondata['open_close_19'],
open_open_19 = jsondata['open_open_19'],
open_high_19 = jsondata['open_high_19'],
open_low_19 = jsondata['open_low_19'],
open_upband_19 = jsondata['open_upband_19'],
open_lowband_19 = jsondata['open_lowband_19'],
open_midleband_19 = jsondata['open_midleband_19'],
open_jaw_19 = jsondata['open_jaw_19'],
open_lips_19 = jsondata['open_lips_19'],
open_teeth_19 = jsondata['open_teeth_19'],
open_volume_19 = jsondata['open_volume_19'],
open_close_20 = jsondata['open_close_20'],
open_open_20 = jsondata['open_open_20'],
open_high_20 = jsondata['open_high_20'],
open_low_20 = jsondata['open_low_20'],
open_upband_20 = jsondata['open_upband_20'],
open_lowband_20 = jsondata['open_lowband_20'],
open_midleband_20 = jsondata['open_midleband_20'],
open_jaw_20 = jsondata['open_jaw_20'],
open_lips_20 = jsondata['open_lips_20'],
open_teeth_20 = jsondata['open_teeth_20'],
open_volume_20 = jsondata['open_volume_20'],
open_close_21 = jsondata['open_close_21'],
open_open_21 = jsondata['open_open_21'],
open_high_21 = jsondata['open_high_21'],
open_low_21 = jsondata['open_low_21'],
open_upband_21 = jsondata['open_upband_21'],
open_lowband_21 = jsondata['open_lowband_21'],
open_midleband_21 = jsondata['open_midleband_21'],
open_jaw_21 = jsondata['open_jaw_21'],
open_lips_21 = jsondata['open_lips_21'],
open_teeth_21 = jsondata['open_teeth_21'],
open_volume_21 = jsondata['open_volume_21'],
open_close_22 = jsondata['open_close_22'],
open_open_22 = jsondata['open_open_22'],
open_high_22 = jsondata['open_high_22'],
open_low_22 = jsondata['open_low_22'],
open_upband_22 = jsondata['open_upband_22'],
open_lowband_22 = jsondata['open_lowband_22'],
open_midleband_22 = jsondata['open_midleband_22'],
open_jaw_22 = jsondata['open_jaw_22'],
open_lips_22 = jsondata['open_lips_22'],
open_teeth_22 = jsondata['open_teeth_22'],
open_volume_22 = jsondata['open_volume_22'],
open_close_23 = jsondata['open_close_23'],
open_open_23 = jsondata['open_open_23'],
open_high_23 = jsondata['open_high_23'],
open_low_23 = jsondata['open_low_23'],
open_upband_23 = jsondata['open_upband_23'],
open_lowband_23 = jsondata['open_lowband_23'],
open_midleband_23 = jsondata['open_midleband_23'],
open_jaw_23 = jsondata['open_jaw_23'],
open_lips_23 = jsondata['open_lips_23'],
open_teeth_23 = jsondata['open_teeth_23'],
open_volume_23 = jsondata['open_volume_23'],
open_close_24 = jsondata['open_close_24'],
open_open_24 = jsondata['open_open_24'],
open_high_24 = jsondata['open_high_24'],
open_low_24 = jsondata['open_low_24'],
open_upband_24 = jsondata['open_upband_24'],
open_lowband_24 = jsondata['open_lowband_24'],
open_midleband_24 = jsondata['open_midleband_24'],
open_jaw_24 = jsondata['open_jaw_24'],
open_lips_24 = jsondata['open_lips_24'],
open_teeth_24 = jsondata['open_teeth_24'],
open_volume_24 = jsondata['open_volume_24']
)
DataSet.objects.filter(open_magicnum=jsondata['magicnum']).update(
close_close_1 = jsondata['close_close_1'],
close_open_1 = jsondata['close_open_1'],
close_high_1 = jsondata['close_high_1'],
close_low_1 = jsondata['close_low_1'],
close_upband_1 = jsondata['close_upband_1'],
close_lowband_1 = jsondata['close_lowband_1'],
close_midleband_1 = jsondata['close_midleband_1'],
close_jaw_1 = jsondata['close_jaw_1'],
close_lips_1 = jsondata['close_lips_1'],
close_teeth_1 = jsondata['close_teeth_1'],
close_volume_1 = jsondata['close_volume_1'],
close_close_2 = jsondata['close_close_2'],
close_open_2 = jsondata['close_open_2'],
close_high_2 = jsondata['close_high_2'],
close_low_2 = jsondata['close_low_2'],
close_upband_2 = jsondata['close_upband_2'],
close_lowband_2 = jsondata['close_lowband_2'],
close_midleband_2 = jsondata['close_midleband_2'],
close_jaw_2 = jsondata['close_jaw_2'],
close_lips_2 = jsondata['close_lips_2'],
close_teeth_2 = jsondata['close_teeth_2'],
close_volume_2 = jsondata['close_volume_2'],
close_close_3 = jsondata['close_close_3'],
close_open_3 = jsondata['close_open_3'],
close_high_3 = jsondata['close_high_3'],
close_low_3 = jsondata['close_low_3'],
close_upband_3 = jsondata['close_upband_3'],
close_lowband_3 = jsondata['close_lowband_3'],
close_midleband_3 = jsondata['close_midleband_3'],
close_jaw_3 = jsondata['close_jaw_3'],
close_lips_3 = jsondata['close_lips_3'],
close_teeth_3 = jsondata['close_teeth_3'],
close_volume_3 = jsondata['close_volume_3'],
close_close_4 = jsondata['close_close_4'],
close_open_4 = jsondata['close_open_4'],
close_high_4 = jsondata['close_high_4'],
close_low_4 = jsondata['close_low_4'],
close_upband_4 = jsondata['close_upband_4'],
close_lowband_4 = jsondata['close_lowband_4'],
close_midleband_4 = jsondata['close_midleband_4'],
close_jaw_4 = jsondata['close_jaw_4'],
close_lips_4 = jsondata['close_lips_4'],
close_teeth_4 = jsondata['close_teeth_4'],
close_volume_4 = jsondata['close_volume_4'],
close_close_5 = jsondata['close_close_5'],
close_open_5 = jsondata['close_open_5'],
close_high_5 = jsondata['close_high_5'],
close_low_5 = jsondata['close_low_5'],
close_upband_5 = jsondata['close_upband_5'],
close_lowband_5 = jsondata['close_lowband_5'],
close_midleband_5 = jsondata['close_midleband_5'],
close_jaw_5 = jsondata['close_jaw_5'],
close_lips_5 = jsondata['close_lips_5'],
close_teeth_5 = jsondata['close_teeth_5'],
close_volume_5 = jsondata['close_volume_5'],
close_close_6 = jsondata['close_close_6'],
close_open_6 = jsondata['close_open_6'],
close_high_6 = jsondata['close_high_6'],
close_low_6 = jsondata['close_low_6'],
close_upband_6 = jsondata['close_upband_6'],
close_lowband_6 = jsondata['close_lowband_6'],
close_midleband_6 = jsondata['close_midleband_6'],
close_jaw_6 = jsondata['close_jaw_6'],
close_lips_6 = jsondata['close_lips_6'],
close_teeth_6 = jsondata['close_teeth_6'],
close_volume_6 = jsondata['close_volume_6'],
close_close_7 = jsondata['close_close_7'],
close_open_7 = jsondata['close_open_7'],
close_high_7 = jsondata['close_high_7'],
close_low_7 = jsondata['close_low_7'],
close_upband_7 = jsondata['close_upband_7'],
close_lowband_7 = jsondata['close_lowband_7'],
close_midleband_7 = jsondata['close_midleband_7'],
close_jaw_7 = jsondata['close_jaw_7'],
close_lips_7 = jsondata['close_lips_7'],
close_teeth_7 = jsondata['close_teeth_7'],
close_volume_7 = jsondata['close_volume_7'],
close_close_8 = jsondata['close_close_8'],
close_open_8 = jsondata['close_open_8'],
close_high_8 = jsondata['close_high_8'],
close_low_8 = jsondata['close_low_8'],
close_upband_8 = jsondata['close_upband_8'],
close_lowband_8 = jsondata['close_lowband_8'],
close_midleband_8 = jsondata['close_midleband_8'],
close_jaw_8 = jsondata['close_jaw_8'],
close_lips_8 = jsondata['close_lips_8'],
close_teeth_8 = jsondata['close_teeth_8'],
close_volume_8 = jsondata['close_volume_8'],
close_close_9 = jsondata['close_close_9'],
close_open_9 = jsondata['close_open_9'],
close_high_9 = jsondata['close_high_9'],
close_low_9 = jsondata['close_low_9'],
close_upband_9 = jsondata['close_upband_9'],
close_lowband_9 = jsondata['close_lowband_9'],
close_midleband_9 = jsondata['close_midleband_9'],
close_jaw_9 = jsondata['close_jaw_9'],
close_lips_9 = jsondata['close_lips_9'],
close_teeth_9 = jsondata['close_teeth_9'],
close_volume_9 = jsondata['close_volume_9'],
close_close_10 = jsondata['close_close_10'],
close_open_10 = jsondata['close_open_10'],
close_high_10 = jsondata['close_high_10'],
close_low_10 = jsondata['close_low_10'],
close_upband_10 = jsondata['close_upband_10'],
close_lowband_10 = jsondata['close_lowband_10'],
close_midleband_10 = jsondata['close_midleband_10'],
close_jaw_10 = jsondata['close_jaw_10'],
close_lips_10 = jsondata['close_lips_10'],
close_teeth_10 = jsondata['close_teeth_10'],
close_volume_10 = jsondata['close_volume_10'],
close_close_11 = jsondata['close_close_11'],
close_open_11 = jsondata['close_open_11'],
close_high_11 = jsondata['close_high_11'],
close_low_11 = jsondata['close_low_11'],
close_upband_11 = jsondata['close_upband_11'],
close_lowband_11 = jsondata['close_lowband_11'],
close_midleband_11 = jsondata['close_midleband_11'],
close_jaw_11 = jsondata['close_jaw_11'],
close_lips_11 = jsondata['close_lips_11'],
close_teeth_11 = jsondata['close_teeth_11'],
close_volume_11 = jsondata['close_volume_11'],
close_close_12 = jsondata['close_close_12'],
close_open_12 = jsondata['close_open_12'],
close_high_12 = jsondata['close_high_12'],
close_low_12 = jsondata['close_low_12'],
close_upband_12 = jsondata['close_upband_12'],
close_lowband_12 = jsondata['close_lowband_12'],
close_midleband_12 = jsondata['close_midleband_12'],
close_jaw_12 = jsondata['close_jaw_12'],
close_lips_12 = jsondata['close_lips_12'],
close_teeth_12 = jsondata['close_teeth_12'],
close_volume_12 = jsondata['close_volume_12'],
)
DataSet.objects.filter(open_magicnum=jsondata['magicnum']).update(
close_close_13 = jsondata['close_close_13'],
close_open_13 = jsondata['close_open_13'],
close_high_13 = jsondata['close_high_13'],
close_low_13 = jsondata['close_low_13'],
close_upband_13 = jsondata['close_upband_13'],
close_lowband_13 = jsondata['close_lowband_13'],
close_midleband_13 = jsondata['close_midleband_13'],
close_jaw_13 = jsondata['close_jaw_13'],
close_lips_13 = jsondata['close_lips_13'],
close_teeth_13 = jsondata['close_teeth_13'],
close_volume_13 = jsondata['close_volume_13'],
close_close_14 = jsondata['close_close_14'],
close_open_14 = jsondata['close_open_14'],
close_high_14 = jsondata['close_high_14'],
close_low_14 = jsondata['close_low_14'],
close_upband_14 = jsondata['close_upband_14'],
close_lowband_14 = jsondata['close_lowband_14'],
close_midleband_14 = jsondata['close_midleband_14'],
close_jaw_14 = jsondata['close_jaw_14'],
close_lips_14 = jsondata['close_lips_14'],
close_teeth_14 = jsondata['close_teeth_14'],
close_volume_14 = jsondata['close_volume_14'],
close_close_15 = jsondata['close_close_15'],
close_open_15 = jsondata['close_open_15'],
close_high_15 = jsondata['close_high_15'],
close_low_15 = jsondata['close_low_15'],
close_upband_15 = jsondata['close_upband_15'],
close_lowband_15 = jsondata['close_lowband_15'],
close_midleband_15 = jsondata['close_midleband_15'],
close_jaw_15 = jsondata['close_jaw_15'],
close_lips_15 = jsondata['close_lips_15'],
close_teeth_15 = jsondata['close_teeth_15'],
close_volume_15 = jsondata['close_volume_15'],
close_close_16 = jsondata['close_close_16'],
close_open_16 = jsondata['close_open_16'],
close_high_16 = jsondata['close_high_16'],
close_low_16 = jsondata['close_low_16'],
close_upband_16 = jsondata['close_upband_16'],
close_lowband_16 = jsondata['close_lowband_16'],
close_midleband_16 = jsondata['close_midleband_16'],
close_jaw_16 = jsondata['close_jaw_16'],
close_lips_16 = jsondata['close_lips_16'],
close_teeth_16 = jsondata['close_teeth_16'],
close_volume_16 = jsondata['close_volume_16'],
close_close_17 = jsondata['close_close_17'],
close_open_17 = jsondata['close_open_17'],
close_high_17 = jsondata['close_high_17'],
close_low_17 = jsondata['close_low_17'],
close_upband_17 = jsondata['close_upband_17'],
close_lowband_17 = jsondata['close_lowband_17'],
close_midleband_17 = jsondata['close_midleband_17'],
close_jaw_17 = jsondata['close_jaw_17'],
close_lips_17 = jsondata['close_lips_17'],
close_teeth_17 = jsondata['close_teeth_17'],
close_volume_17 = jsondata['close_volume_17'],
close_close_18 = jsondata['close_close_18'],
close_open_18 = jsondata['close_open_18'],
close_high_18 = jsondata['close_high_18'],
close_low_18 = jsondata['close_low_18'],
close_upband_18 = jsondata['close_upband_18'],
close_lowband_18 = jsondata['close_lowband_18'],
close_midleband_18 = jsondata['close_midleband_18'],
close_jaw_18 = jsondata['close_jaw_18'],
close_lips_18 = jsondata['close_lips_18'],
close_teeth_18 = jsondata['close_teeth_18'],
close_volume_18 = jsondata['close_volume_18'],
close_close_19 = jsondata['close_close_19'],
close_open_19 = jsondata['close_open_19'],
close_high_19 = jsondata['close_high_19'],
close_low_19 = jsondata['close_low_19'],
close_upband_19 = jsondata['close_upband_19'],
close_lowband_19 = jsondata['close_lowband_19'],
close_midleband_19 = jsondata['close_midleband_19'],
close_jaw_19 = jsondata['close_jaw_19'],
close_lips_19 = jsondata['close_lips_19'],
close_teeth_19 = jsondata['close_teeth_19'],
close_volume_19 = jsondata['close_volume_19'],
close_close_20 = jsondata['close_close_20'],
close_open_20 = jsondata['close_open_20'],
close_high_20 = jsondata['close_high_20'],
close_low_20 = jsondata['close_low_20'],
close_upband_20 = jsondata['close_upband_20'],
close_lowband_20 = jsondata['close_lowband_20'],
close_midleband_20 = jsondata['close_midleband_20'],
close_jaw_20 = jsondata['close_jaw_20'],
close_lips_20 = jsondata['close_lips_20'],
close_teeth_20 = jsondata['close_teeth_20'],
close_volume_20 = jsondata['close_volume_20'],
close_close_21 = jsondata['close_close_21'],
close_open_21 = jsondata['close_open_21'],
close_high_21 = jsondata['close_high_21'],
close_low_21 = jsondata['close_low_21'],
close_upband_21 = jsondata['close_upband_21'],
close_lowband_21 = jsondata['close_lowband_21'],
close_midleband_21 = jsondata['close_midleband_21'],
close_jaw_21 = jsondata['close_jaw_21'],
close_lips_21 = jsondata['close_lips_21'],
close_teeth_21 = jsondata['close_teeth_21'],
close_volume_21 = jsondata['close_volume_21'],
close_close_22 = jsondata['close_close_22'],
close_open_22 = jsondata['close_open_22'],
close_high_22 = jsondata['close_high_22'],
close_low_22 = jsondata['close_low_22'],
close_upband_22 = jsondata['close_upband_22'],
close_lowband_22 = jsondata['close_lowband_22'],
close_midleband_22 = jsondata['close_midleband_22'],
close_jaw_22 = jsondata['close_jaw_22'],
close_lips_22 = jsondata['close_lips_22'],
close_teeth_22 = jsondata['close_teeth_22'],
close_volume_22 = jsondata['close_volume_22'],
close_close_23 = jsondata['close_close_23'],
close_open_23 = jsondata['close_open_23'],
close_high_23 = jsondata['close_high_23'],
close_low_23 = jsondata['close_low_23'],
close_upband_23 = jsondata['close_upband_23'],
close_lowband_23 = jsondata['close_lowband_23'],
close_midleband_23 = jsondata['close_midleband_23'],
close_jaw_23 = jsondata['close_jaw_23'],
close_lips_23 = jsondata['close_lips_23'],
close_teeth_23 = jsondata['close_teeth_23'],
close_volume_23 = jsondata['close_volume_23'],
close_close_24 = jsondata['close_close_24'],
close_open_24 = jsondata['close_open_24'],
close_high_24 = jsondata['close_high_24'],
close_low_24 = jsondata['close_low_24'],
close_upband_24 = jsondata['close_upband_24'],
close_lowband_24 = jsondata['close_lowband_24'],
close_midleband_24 = jsondata['close_midleband_24'],
close_jaw_24 = jsondata['close_jaw_24'],
close_lips_24 = jsondata['close_lips_24'],
close_teeth_24 = jsondata['close_teeth_24'],
close_volume_24 = jsondata['close_volume_24'],
close_result = jsondata['result'],
close_effectivnes = effectivnes,
close_neuron_name = jsondata['neuron_name'],
close_closeprice = jsondata['closeprice'],
close_time = DP.parse(jsondata['orderclosetime'])
)
| gpl-3.0 | -1,601,113,315,527,009,800 | 49.413008 | 135 | 0.546768 | false |
diego-d5000/MisValesMd | env/lib/python2.7/site-packages/django/core/checks/model_checks.py | 1 | 2454 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import inspect
import types
from django.apps import apps
from django.core.checks import Error, Tags, register
@register(Tags.models)
def check_all_models(app_configs=None, **kwargs):
errors = []
for model in apps.get_models():
if app_configs is None or model._meta.app_config in app_configs:
if not inspect.ismethod(model.check):
errors.append(
Error(
"The '%s.check()' class method is "
"currently overridden by %r." % (
model.__name__, model.check),
hint=None,
obj=model,
id='models.E020'
)
)
else:
errors.extend(model.check(**kwargs))
return errors
@register(Tags.models, Tags.signals)
def check_model_signals(app_configs=None, **kwargs):
"""
Ensure lazily referenced model signals senders are installed.
"""
# Avoid circular import
from django.db import models
errors = []
for name in dir(models.signals):
obj = getattr(models.signals, name)
if isinstance(obj, models.signals.ModelSignal):
for reference, receivers in obj.unresolved_references.items():
for receiver, _, _ in receivers:
# The receiver is either a function or an instance of class
# defining a `__call__` method.
if isinstance(receiver, types.FunctionType):
description = "The '%s' function" % receiver.__name__
else:
description = "An instance of the '%s' class" % receiver.__class__.__name__
errors.append(
Error(
"%s was connected to the '%s' signal "
"with a lazy reference to the '%s' sender, "
"which has not been installed." % (
description, name, '.'.join(reference)
),
obj=receiver.__module__,
hint=None,
id='signals.E001'
)
)
return errors
| mit | 6,105,422,011,354,093,000 | 36.34375 | 99 | 0.467808 | false |
napalm-automation/napalm-yang | napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes_/__init__.py | 1 | 42200 | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import state
from . import default_metric
from . import delay_metric
from . import expense_metric
from . import error_metric
class prefixes(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/ipv4-external-reachability/prefixes/prefixes. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: IPv4 external prefixes and reachability attributes.
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__state",
"__default_metric",
"__delay_metric",
"__expense_metric",
"__error_metric",
)
_yang_name = "prefixes"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__default_metric = YANGDynClass(
base=default_metric.default_metric,
is_container="container",
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__delay_metric = YANGDynClass(
base=delay_metric.delay_metric,
is_container="container",
yang_name="delay-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__expense_metric = YANGDynClass(
base=expense_metric.expense_metric,
is_container="container",
yang_name="expense-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__error_metric = YANGDynClass(
base=error_metric.error_metric,
is_container="container",
yang_name="error-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"ipv4-external-reachability",
"prefixes",
"prefixes",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/state (container)
YANG Description: State parameters of IPv4 standard prefix.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of IPv4 standard prefix.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_default_metric(self):
"""
Getter method for default_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/default_metric (container)
YANG Description: This container defines ISIS Default Metric.
"""
return self.__default_metric
def _set_default_metric(self, v, load=False):
"""
Setter method for default_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/default_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_default_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_default_metric() directly.
YANG Description: This container defines ISIS Default Metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=default_metric.default_metric,
is_container="container",
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """default_metric must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=default_metric.default_metric, is_container='container', yang_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__default_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_default_metric(self):
self.__default_metric = YANGDynClass(
base=default_metric.default_metric,
is_container="container",
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_delay_metric(self):
"""
Getter method for delay_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/delay_metric (container)
YANG Description: This container defines the ISIS delay metric.
"""
return self.__delay_metric
def _set_delay_metric(self, v, load=False):
"""
Setter method for delay_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/delay_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_delay_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_delay_metric() directly.
YANG Description: This container defines the ISIS delay metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=delay_metric.delay_metric,
is_container="container",
yang_name="delay-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """delay_metric must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=delay_metric.delay_metric, is_container='container', yang_name="delay-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__delay_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_delay_metric(self):
self.__delay_metric = YANGDynClass(
base=delay_metric.delay_metric,
is_container="container",
yang_name="delay-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_expense_metric(self):
"""
Getter method for expense_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/expense_metric (container)
YANG Description: This container defines the ISIS expense metric.
"""
return self.__expense_metric
def _set_expense_metric(self, v, load=False):
"""
Setter method for expense_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/expense_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_expense_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_expense_metric() directly.
YANG Description: This container defines the ISIS expense metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=expense_metric.expense_metric,
is_container="container",
yang_name="expense-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """expense_metric must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=expense_metric.expense_metric, is_container='container', yang_name="expense-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__expense_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_expense_metric(self):
self.__expense_metric = YANGDynClass(
base=expense_metric.expense_metric,
is_container="container",
yang_name="expense-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_error_metric(self):
"""
Getter method for error_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/error_metric (container)
YANG Description: This container defines the ISIS error metric.
"""
return self.__error_metric
def _set_error_metric(self, v, load=False):
"""
Setter method for error_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/error_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_error_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_error_metric() directly.
YANG Description: This container defines the ISIS error metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=error_metric.error_metric,
is_container="container",
yang_name="error-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """error_metric must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=error_metric.error_metric, is_container='container', yang_name="error-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__error_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_error_metric(self):
self.__error_metric = YANGDynClass(
base=error_metric.error_metric,
is_container="container",
yang_name="error-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
state = __builtin__.property(_get_state)
default_metric = __builtin__.property(_get_default_metric)
delay_metric = __builtin__.property(_get_delay_metric)
expense_metric = __builtin__.property(_get_expense_metric)
error_metric = __builtin__.property(_get_error_metric)
_pyangbind_elements = OrderedDict(
[
("state", state),
("default_metric", default_metric),
("delay_metric", delay_metric),
("expense_metric", expense_metric),
("error_metric", error_metric),
]
)
from . import state
from . import default_metric
from . import delay_metric
from . import expense_metric
from . import error_metric
class prefixes(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/ipv4-external-reachability/prefixes/prefixes. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: IPv4 external prefixes and reachability attributes.
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__state",
"__default_metric",
"__delay_metric",
"__expense_metric",
"__error_metric",
)
_yang_name = "prefixes"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__default_metric = YANGDynClass(
base=default_metric.default_metric,
is_container="container",
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__delay_metric = YANGDynClass(
base=delay_metric.delay_metric,
is_container="container",
yang_name="delay-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__expense_metric = YANGDynClass(
base=expense_metric.expense_metric,
is_container="container",
yang_name="expense-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
self.__error_metric = YANGDynClass(
base=error_metric.error_metric,
is_container="container",
yang_name="error-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"ipv4-external-reachability",
"prefixes",
"prefixes",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/state (container)
YANG Description: State parameters of IPv4 standard prefix.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of IPv4 standard prefix.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_default_metric(self):
"""
Getter method for default_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/default_metric (container)
YANG Description: This container defines ISIS Default Metric.
"""
return self.__default_metric
def _set_default_metric(self, v, load=False):
"""
Setter method for default_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/default_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_default_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_default_metric() directly.
YANG Description: This container defines ISIS Default Metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=default_metric.default_metric,
is_container="container",
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """default_metric must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=default_metric.default_metric, is_container='container', yang_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__default_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_default_metric(self):
self.__default_metric = YANGDynClass(
base=default_metric.default_metric,
is_container="container",
yang_name="default-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_delay_metric(self):
"""
Getter method for delay_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/delay_metric (container)
YANG Description: This container defines the ISIS delay metric.
"""
return self.__delay_metric
def _set_delay_metric(self, v, load=False):
"""
Setter method for delay_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/delay_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_delay_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_delay_metric() directly.
YANG Description: This container defines the ISIS delay metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=delay_metric.delay_metric,
is_container="container",
yang_name="delay-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """delay_metric must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=delay_metric.delay_metric, is_container='container', yang_name="delay-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__delay_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_delay_metric(self):
self.__delay_metric = YANGDynClass(
base=delay_metric.delay_metric,
is_container="container",
yang_name="delay-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_expense_metric(self):
"""
Getter method for expense_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/expense_metric (container)
YANG Description: This container defines the ISIS expense metric.
"""
return self.__expense_metric
def _set_expense_metric(self, v, load=False):
"""
Setter method for expense_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/expense_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_expense_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_expense_metric() directly.
YANG Description: This container defines the ISIS expense metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=expense_metric.expense_metric,
is_container="container",
yang_name="expense-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """expense_metric must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=expense_metric.expense_metric, is_container='container', yang_name="expense-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__expense_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_expense_metric(self):
self.__expense_metric = YANGDynClass(
base=expense_metric.expense_metric,
is_container="container",
yang_name="expense-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
def _get_error_metric(self):
"""
Getter method for error_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/error_metric (container)
YANG Description: This container defines the ISIS error metric.
"""
return self.__error_metric
def _set_error_metric(self, v, load=False):
"""
Setter method for error_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/error_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_error_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_error_metric() directly.
YANG Description: This container defines the ISIS error metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=error_metric.error_metric,
is_container="container",
yang_name="error-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """error_metric must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=error_metric.error_metric, is_container='container', yang_name="error-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__error_metric = t
if hasattr(self, "_set"):
self._set()
def _unset_error_metric(self):
self.__error_metric = YANGDynClass(
base=error_metric.error_metric,
is_container="container",
yang_name="error-metric",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
state = __builtin__.property(_get_state)
default_metric = __builtin__.property(_get_default_metric)
delay_metric = __builtin__.property(_get_delay_metric)
expense_metric = __builtin__.property(_get_expense_metric)
error_metric = __builtin__.property(_get_error_metric)
_pyangbind_elements = OrderedDict(
[
("state", state),
("default_metric", default_metric),
("delay_metric", delay_metric),
("expense_metric", expense_metric),
("error_metric", error_metric),
]
)
| apache-2.0 | -214,556,804,422,065,730 | 41.157842 | 402 | 0.593128 | false |
Thraxis/pymedusa | sickbeard/server/web/config/notifications.py | 1 | 17493 | # coding=utf-8
"""
Configure notifications
"""
from __future__ import unicode_literals
import os
from tornado.routes import route
import sickbeard
from sickbeard import (
config, logger, ui,
)
from sickrage.helper.common import try_int
from sickrage.helper.encoding import ek
from sickbeard.server.web.core import PageTemplate
from sickbeard.server.web.config.handler import Config
@route('/config/notifications(/?.*)')
class ConfigNotifications(Config):
"""
Handler for notification configuration
"""
def __init__(self, *args, **kwargs):
super(ConfigNotifications, self).__init__(*args, **kwargs)
def index(self):
"""
Render the notification configuration page
"""
t = PageTemplate(rh=self, filename='config_notifications.mako')
return t.render(submenu=self.ConfigMenu(), title='Config - Notifications',
header='Notifications', topmenu='config',
controller='config', action='notifications')
def saveNotifications(self, use_kodi=None, kodi_always_on=None, kodi_notify_onsnatch=None,
kodi_notify_ondownload=None,
kodi_notify_onsubtitledownload=None, kodi_update_onlyfirst=None,
kodi_update_library=None, kodi_update_full=None, kodi_host=None, kodi_username=None,
kodi_password=None,
use_plex_server=None, plex_notify_onsnatch=None, plex_notify_ondownload=None,
plex_notify_onsubtitledownload=None, plex_update_library=None,
plex_server_host=None, plex_server_token=None, plex_client_host=None, plex_server_username=None, plex_server_password=None,
use_plex_client=None, plex_client_username=None, plex_client_password=None,
plex_server_https=None, use_emby=None, emby_host=None, emby_apikey=None,
use_growl=None, growl_notify_onsnatch=None, growl_notify_ondownload=None,
growl_notify_onsubtitledownload=None, growl_host=None, growl_password=None,
use_freemobile=None, freemobile_notify_onsnatch=None, freemobile_notify_ondownload=None,
freemobile_notify_onsubtitledownload=None, freemobile_id=None, freemobile_apikey=None,
use_telegram=None, telegram_notify_onsnatch=None, telegram_notify_ondownload=None,
telegram_notify_onsubtitledownload=None, telegram_id=None, telegram_apikey=None,
use_prowl=None, prowl_notify_onsnatch=None, prowl_notify_ondownload=None,
prowl_notify_onsubtitledownload=None, prowl_api=None, prowl_priority=0,
prowl_show_list=None, prowl_show=None, prowl_message_title=None,
use_twitter=None, twitter_notify_onsnatch=None, twitter_notify_ondownload=None,
twitter_notify_onsubtitledownload=None, twitter_usedm=None, twitter_dmto=None,
use_boxcar2=None, boxcar2_notify_onsnatch=None, boxcar2_notify_ondownload=None,
boxcar2_notify_onsubtitledownload=None, boxcar2_accesstoken=None,
use_pushover=None, pushover_notify_onsnatch=None, pushover_notify_ondownload=None,
pushover_notify_onsubtitledownload=None, pushover_userkey=None, pushover_apikey=None, pushover_device=None, pushover_sound=None,
use_libnotify=None, libnotify_notify_onsnatch=None, libnotify_notify_ondownload=None,
libnotify_notify_onsubtitledownload=None,
use_nmj=None, nmj_host=None, nmj_database=None, nmj_mount=None, use_synoindex=None,
use_nmjv2=None, nmjv2_host=None, nmjv2_dbloc=None, nmjv2_database=None,
use_trakt=None, trakt_username=None, trakt_pin=None,
trakt_remove_watchlist=None, trakt_sync_watchlist=None, trakt_remove_show_from_sickrage=None, trakt_method_add=None,
trakt_start_paused=None, trakt_use_recommended=None, trakt_sync=None, trakt_sync_remove=None,
trakt_default_indexer=None, trakt_remove_serieslist=None, trakt_timeout=None, trakt_blacklist_name=None,
use_synologynotifier=None, synologynotifier_notify_onsnatch=None,
synologynotifier_notify_ondownload=None, synologynotifier_notify_onsubtitledownload=None,
use_pytivo=None, pytivo_notify_onsnatch=None, pytivo_notify_ondownload=None,
pytivo_notify_onsubtitledownload=None, pytivo_update_library=None,
pytivo_host=None, pytivo_share_name=None, pytivo_tivo_name=None,
use_nma=None, nma_notify_onsnatch=None, nma_notify_ondownload=None,
nma_notify_onsubtitledownload=None, nma_api=None, nma_priority=0,
use_pushalot=None, pushalot_notify_onsnatch=None, pushalot_notify_ondownload=None,
pushalot_notify_onsubtitledownload=None, pushalot_authorizationtoken=None,
use_pushbullet=None, pushbullet_notify_onsnatch=None, pushbullet_notify_ondownload=None,
pushbullet_notify_onsubtitledownload=None, pushbullet_api=None, pushbullet_device=None,
pushbullet_device_list=None,
use_email=None, email_notify_onsnatch=None, email_notify_ondownload=None,
email_notify_onsubtitledownload=None, email_host=None, email_port=25, email_from=None,
email_tls=None, email_user=None, email_password=None, email_list=None, email_subject=None, email_show_list=None,
email_show=None):
"""
Save notification related settings
"""
results = []
sickbeard.USE_KODI = config.checkbox_to_value(use_kodi)
sickbeard.KODI_ALWAYS_ON = config.checkbox_to_value(kodi_always_on)
sickbeard.KODI_NOTIFY_ONSNATCH = config.checkbox_to_value(kodi_notify_onsnatch)
sickbeard.KODI_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(kodi_notify_ondownload)
sickbeard.KODI_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(kodi_notify_onsubtitledownload)
sickbeard.KODI_UPDATE_LIBRARY = config.checkbox_to_value(kodi_update_library)
sickbeard.KODI_UPDATE_FULL = config.checkbox_to_value(kodi_update_full)
sickbeard.KODI_UPDATE_ONLYFIRST = config.checkbox_to_value(kodi_update_onlyfirst)
sickbeard.KODI_HOST = config.clean_hosts(kodi_host)
sickbeard.KODI_USERNAME = kodi_username
sickbeard.KODI_PASSWORD = kodi_password
sickbeard.USE_PLEX_SERVER = config.checkbox_to_value(use_plex_server)
sickbeard.PLEX_NOTIFY_ONSNATCH = config.checkbox_to_value(plex_notify_onsnatch)
sickbeard.PLEX_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(plex_notify_ondownload)
sickbeard.PLEX_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(plex_notify_onsubtitledownload)
sickbeard.PLEX_UPDATE_LIBRARY = config.checkbox_to_value(plex_update_library)
sickbeard.PLEX_CLIENT_HOST = config.clean_hosts(plex_client_host)
sickbeard.PLEX_SERVER_HOST = config.clean_hosts(plex_server_host)
sickbeard.PLEX_SERVER_TOKEN = config.clean_host(plex_server_token)
sickbeard.PLEX_SERVER_USERNAME = plex_server_username
if plex_server_password != '*' * len(sickbeard.PLEX_SERVER_PASSWORD):
sickbeard.PLEX_SERVER_PASSWORD = plex_server_password
sickbeard.USE_PLEX_CLIENT = config.checkbox_to_value(use_plex_client)
sickbeard.PLEX_CLIENT_USERNAME = plex_client_username
if plex_client_password != '*' * len(sickbeard.PLEX_CLIENT_PASSWORD):
sickbeard.PLEX_CLIENT_PASSWORD = plex_client_password
sickbeard.PLEX_SERVER_HTTPS = config.checkbox_to_value(plex_server_https)
sickbeard.USE_EMBY = config.checkbox_to_value(use_emby)
sickbeard.EMBY_HOST = config.clean_host(emby_host)
sickbeard.EMBY_APIKEY = emby_apikey
sickbeard.USE_GROWL = config.checkbox_to_value(use_growl)
sickbeard.GROWL_NOTIFY_ONSNATCH = config.checkbox_to_value(growl_notify_onsnatch)
sickbeard.GROWL_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(growl_notify_ondownload)
sickbeard.GROWL_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(growl_notify_onsubtitledownload)
sickbeard.GROWL_HOST = config.clean_host(growl_host, default_port=23053)
sickbeard.GROWL_PASSWORD = growl_password
sickbeard.USE_FREEMOBILE = config.checkbox_to_value(use_freemobile)
sickbeard.FREEMOBILE_NOTIFY_ONSNATCH = config.checkbox_to_value(freemobile_notify_onsnatch)
sickbeard.FREEMOBILE_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(freemobile_notify_ondownload)
sickbeard.FREEMOBILE_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(freemobile_notify_onsubtitledownload)
sickbeard.FREEMOBILE_ID = freemobile_id
sickbeard.FREEMOBILE_APIKEY = freemobile_apikey
sickbeard.USE_TELEGRAM = config.checkbox_to_value(use_telegram)
sickbeard.TELEGRAM_NOTIFY_ONSNATCH = config.checkbox_to_value(telegram_notify_onsnatch)
sickbeard.TELEGRAM_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(telegram_notify_ondownload)
sickbeard.TELEGRAM_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(telegram_notify_onsubtitledownload)
sickbeard.TELEGRAM_ID = telegram_id
sickbeard.TELEGRAM_APIKEY = telegram_apikey
sickbeard.USE_PROWL = config.checkbox_to_value(use_prowl)
sickbeard.PROWL_NOTIFY_ONSNATCH = config.checkbox_to_value(prowl_notify_onsnatch)
sickbeard.PROWL_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(prowl_notify_ondownload)
sickbeard.PROWL_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(prowl_notify_onsubtitledownload)
sickbeard.PROWL_API = prowl_api
sickbeard.PROWL_PRIORITY = prowl_priority
sickbeard.PROWL_MESSAGE_TITLE = prowl_message_title
sickbeard.USE_TWITTER = config.checkbox_to_value(use_twitter)
sickbeard.TWITTER_NOTIFY_ONSNATCH = config.checkbox_to_value(twitter_notify_onsnatch)
sickbeard.TWITTER_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(twitter_notify_ondownload)
sickbeard.TWITTER_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(twitter_notify_onsubtitledownload)
sickbeard.TWITTER_USEDM = config.checkbox_to_value(twitter_usedm)
sickbeard.TWITTER_DMTO = twitter_dmto
sickbeard.USE_BOXCAR2 = config.checkbox_to_value(use_boxcar2)
sickbeard.BOXCAR2_NOTIFY_ONSNATCH = config.checkbox_to_value(boxcar2_notify_onsnatch)
sickbeard.BOXCAR2_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(boxcar2_notify_ondownload)
sickbeard.BOXCAR2_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(boxcar2_notify_onsubtitledownload)
sickbeard.BOXCAR2_ACCESSTOKEN = boxcar2_accesstoken
sickbeard.USE_PUSHOVER = config.checkbox_to_value(use_pushover)
sickbeard.PUSHOVER_NOTIFY_ONSNATCH = config.checkbox_to_value(pushover_notify_onsnatch)
sickbeard.PUSHOVER_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(pushover_notify_ondownload)
sickbeard.PUSHOVER_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(pushover_notify_onsubtitledownload)
sickbeard.PUSHOVER_USERKEY = pushover_userkey
sickbeard.PUSHOVER_APIKEY = pushover_apikey
sickbeard.PUSHOVER_DEVICE = pushover_device
sickbeard.PUSHOVER_SOUND = pushover_sound
sickbeard.USE_LIBNOTIFY = config.checkbox_to_value(use_libnotify)
sickbeard.LIBNOTIFY_NOTIFY_ONSNATCH = config.checkbox_to_value(libnotify_notify_onsnatch)
sickbeard.LIBNOTIFY_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(libnotify_notify_ondownload)
sickbeard.LIBNOTIFY_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(libnotify_notify_onsubtitledownload)
sickbeard.USE_NMJ = config.checkbox_to_value(use_nmj)
sickbeard.NMJ_HOST = config.clean_host(nmj_host)
sickbeard.NMJ_DATABASE = nmj_database
sickbeard.NMJ_MOUNT = nmj_mount
sickbeard.USE_NMJv2 = config.checkbox_to_value(use_nmjv2)
sickbeard.NMJv2_HOST = config.clean_host(nmjv2_host)
sickbeard.NMJv2_DATABASE = nmjv2_database
sickbeard.NMJv2_DBLOC = nmjv2_dbloc
sickbeard.USE_SYNOINDEX = config.checkbox_to_value(use_synoindex)
sickbeard.USE_SYNOLOGYNOTIFIER = config.checkbox_to_value(use_synologynotifier)
sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSNATCH = config.checkbox_to_value(synologynotifier_notify_onsnatch)
sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(synologynotifier_notify_ondownload)
sickbeard.SYNOLOGYNOTIFIER_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(
synologynotifier_notify_onsubtitledownload)
config.change_USE_TRAKT(use_trakt)
sickbeard.TRAKT_USERNAME = trakt_username
sickbeard.TRAKT_REMOVE_WATCHLIST = config.checkbox_to_value(trakt_remove_watchlist)
sickbeard.TRAKT_REMOVE_SERIESLIST = config.checkbox_to_value(trakt_remove_serieslist)
sickbeard.TRAKT_REMOVE_SHOW_FROM_SICKRAGE = config.checkbox_to_value(trakt_remove_show_from_sickrage)
sickbeard.TRAKT_SYNC_WATCHLIST = config.checkbox_to_value(trakt_sync_watchlist)
sickbeard.TRAKT_METHOD_ADD = int(trakt_method_add)
sickbeard.TRAKT_START_PAUSED = config.checkbox_to_value(trakt_start_paused)
sickbeard.TRAKT_USE_RECOMMENDED = config.checkbox_to_value(trakt_use_recommended)
sickbeard.TRAKT_SYNC = config.checkbox_to_value(trakt_sync)
sickbeard.TRAKT_SYNC_REMOVE = config.checkbox_to_value(trakt_sync_remove)
sickbeard.TRAKT_DEFAULT_INDEXER = int(trakt_default_indexer)
sickbeard.TRAKT_TIMEOUT = int(trakt_timeout)
sickbeard.TRAKT_BLACKLIST_NAME = trakt_blacklist_name
sickbeard.USE_EMAIL = config.checkbox_to_value(use_email)
sickbeard.EMAIL_NOTIFY_ONSNATCH = config.checkbox_to_value(email_notify_onsnatch)
sickbeard.EMAIL_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(email_notify_ondownload)
sickbeard.EMAIL_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(email_notify_onsubtitledownload)
sickbeard.EMAIL_HOST = config.clean_host(email_host)
sickbeard.EMAIL_PORT = try_int(email_port, 25)
sickbeard.EMAIL_FROM = email_from
sickbeard.EMAIL_TLS = config.checkbox_to_value(email_tls)
sickbeard.EMAIL_USER = email_user
sickbeard.EMAIL_PASSWORD = email_password
sickbeard.EMAIL_LIST = email_list
sickbeard.EMAIL_SUBJECT = email_subject
sickbeard.USE_PYTIVO = config.checkbox_to_value(use_pytivo)
sickbeard.PYTIVO_NOTIFY_ONSNATCH = config.checkbox_to_value(pytivo_notify_onsnatch)
sickbeard.PYTIVO_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(pytivo_notify_ondownload)
sickbeard.PYTIVO_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(pytivo_notify_onsubtitledownload)
sickbeard.PYTIVO_UPDATE_LIBRARY = config.checkbox_to_value(pytivo_update_library)
sickbeard.PYTIVO_HOST = config.clean_host(pytivo_host)
sickbeard.PYTIVO_SHARE_NAME = pytivo_share_name
sickbeard.PYTIVO_TIVO_NAME = pytivo_tivo_name
sickbeard.USE_NMA = config.checkbox_to_value(use_nma)
sickbeard.NMA_NOTIFY_ONSNATCH = config.checkbox_to_value(nma_notify_onsnatch)
sickbeard.NMA_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(nma_notify_ondownload)
sickbeard.NMA_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(nma_notify_onsubtitledownload)
sickbeard.NMA_API = nma_api
sickbeard.NMA_PRIORITY = nma_priority
sickbeard.USE_PUSHALOT = config.checkbox_to_value(use_pushalot)
sickbeard.PUSHALOT_NOTIFY_ONSNATCH = config.checkbox_to_value(pushalot_notify_onsnatch)
sickbeard.PUSHALOT_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(pushalot_notify_ondownload)
sickbeard.PUSHALOT_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(pushalot_notify_onsubtitledownload)
sickbeard.PUSHALOT_AUTHORIZATIONTOKEN = pushalot_authorizationtoken
sickbeard.USE_PUSHBULLET = config.checkbox_to_value(use_pushbullet)
sickbeard.PUSHBULLET_NOTIFY_ONSNATCH = config.checkbox_to_value(pushbullet_notify_onsnatch)
sickbeard.PUSHBULLET_NOTIFY_ONDOWNLOAD = config.checkbox_to_value(pushbullet_notify_ondownload)
sickbeard.PUSHBULLET_NOTIFY_ONSUBTITLEDOWNLOAD = config.checkbox_to_value(pushbullet_notify_onsubtitledownload)
sickbeard.PUSHBULLET_API = pushbullet_api
sickbeard.PUSHBULLET_DEVICE = pushbullet_device_list
sickbeard.save_config()
if results:
for x in results:
logger.log(x, logger.ERROR)
ui.notifications.error('Error(s) Saving Configuration',
'<br>\n'.join(results))
else:
ui.notifications.message('Configuration Saved', ek(os.path.join, sickbeard.CONFIG_FILE))
return self.redirect('/config/notifications/')
| gpl-3.0 | -4,339,851,400,314,656,300 | 64.02974 | 154 | 0.694449 | false |
poliastro/poliastro | tests/test_bodies.py | 1 | 2126 | import pytest
from astropy import units as u
from astropy.tests.helper import assert_quantity_allclose
from poliastro.bodies import Body, Earth, Jupiter, Sun
def test_body_has_k_given_in_constructor():
k = 3.98e5 * u.km ** 3 / u.s ** 2
earth = Body(None, k, "")
assert earth.k == k
def test_body_from_parameters_raises_valueerror_if_k_units_not_correct():
wrong_k = 4902.8 * u.kg
_name = _symbol = ""
_R = 0
with pytest.raises(u.UnitsError) as excinfo:
Body.from_parameters(None, wrong_k, _name, _symbol, _R)
assert (
"UnitsError: Argument 'k' to function 'from_parameters' must be in units convertible to 'km3 / s2'."
in excinfo.exconly()
)
def test_body_from_parameters_returns_body_object():
k = 1.26712763e17 * u.m ** 3 / u.s ** 2
R = 71492000 * u.m
_name = _symbol = "jupiter"
jupiter = Body.from_parameters(Sun, k, _name, _symbol, Jupiter.R)
assert jupiter.k == k
assert jupiter.R == R
def test_body_printing_has_name_and_symbol():
name = "2 Pallas"
symbol = u"\u26b4"
k = 1.41e10 * u.m ** 3 / u.s ** 2
pallas2 = Body(None, k, name, symbol)
assert name in str(pallas2)
assert symbol in str(pallas2)
def test_earth_has_k_given_in_literature():
expected_k = 3.986004418e14 * u.m ** 3 / u.s ** 2
k = Earth.k
assert_quantity_allclose(k.decompose([u.km, u.s]), expected_k)
def test_earth_has_angular_velocity_given_in_literature():
expected_k = 7.292114e-5 * u.rad / u.s
k = Earth.angular_velocity
assert_quantity_allclose(k.decompose([u.rad, u.s]), expected_k)
def test_from_relative():
TRAPPIST1 = Body.from_relative(
reference=Sun,
parent=None,
k=0.08, # Relative to the Sun
name="TRAPPIST",
symbol=None,
R=0.114,
) # Relative to the Sun
# Check values properly calculated
VALUECHECK = Body.from_relative(
reference=Earth,
parent=TRAPPIST1,
k=1,
name="VALUECHECK",
symbol=None,
R=1,
)
assert Earth.k == VALUECHECK.k
assert Earth.R == VALUECHECK.R
| mit | 3,506,300,801,196,985,300 | 26.61039 | 108 | 0.61571 | false |
lferr/charm | charm/test/schemes/dabenc_test.py | 1 | 3191 | from charm.schemes.dabe_aw11 import Dabe
from charm.adapters.dabenc_adapt_hybrid import HybridABEncMA
from charm.toolbox.pairinggroup import PairingGroup, GT
import unittest
debug = False
class DabeTest(unittest.TestCase):
def testDabe(self):
groupObj = PairingGroup('SS512')
dabe = Dabe(groupObj)
GP = dabe.setup()
#Setup an authority
auth_attrs= ['ONE', 'TWO', 'THREE', 'FOUR']
(SK, PK) = dabe.authsetup(GP, auth_attrs)
if debug: print("Authority SK")
if debug: print(SK)
#Setup a user and give him some keys
gid, K = "bob", {}
usr_attrs = ['THREE', 'ONE', 'TWO']
for i in usr_attrs: dabe.keygen(GP, SK, i, gid, K)
if debug: print('User credential list: %s' % usr_attrs)
if debug: print("\nSecret key:")
if debug: groupObj.debug(K)
#Encrypt a random element in GT
m = groupObj.random(GT)
policy = '((one or three) and (TWO or FOUR))'
if debug: print('Acces Policy: %s' % policy)
CT = dabe.encrypt(PK, GP, m, policy)
if debug: print("\nCiphertext...")
if debug: groupObj.debug(CT)
orig_m = dabe.decrypt(GP, K, CT)
assert m == orig_m, 'FAILED Decryption!!!'
if debug: print('Successful Decryption!')
class HybridABEncMATest(unittest.TestCase):
def testHybridABEncMA(self):
groupObj = PairingGroup('SS512')
dabe = Dabe(groupObj)
hyb_abema = HybridABEncMA(dabe, groupObj)
#Setup global parameters for all new authorities
gp = hyb_abema.setup()
#Instantiate a few authorities
#Attribute names must be globally unique. HybridABEncMA
#Two authorities may not issue keys for the same attribute.
#Otherwise, the decryption algorithm will not know which private key to use
jhu_attributes = ['jhu.professor', 'jhu.staff', 'jhu.student']
jhmi_attributes = ['jhmi.doctor', 'jhmi.nurse', 'jhmi.staff', 'jhmi.researcher']
(jhuSK, jhuPK) = hyb_abema.authsetup(gp, jhu_attributes)
(jhmiSK, jhmiPK) = hyb_abema.authsetup(gp, jhmi_attributes)
allAuthPK = {}; allAuthPK.update(jhuPK); allAuthPK.update(jhmiPK)
#Setup a user with a few keys
bobs_gid = "20110615 [email protected] cryptokey"
K = {}
hyb_abema.keygen(gp, jhuSK,'jhu.professor', bobs_gid, K)
hyb_abema.keygen(gp, jhmiSK,'jhmi.researcher', bobs_gid, K)
msg = b'Hello World, I am a sensitive record!'
size = len(msg)
policy_str = "(jhmi.doctor or (jhmi.researcher and jhu.professor))"
ct = hyb_abema.encrypt(allAuthPK, gp, msg, policy_str)
if debug:
print("Ciphertext")
print("c1 =>", ct['c1'])
print("c2 =>", ct['c2'])
decrypted_msg = hyb_abema.decrypt(gp, K, ct)
if debug: print("Result =>", decrypted_msg)
assert decrypted_msg == msg, "Failed Decryption!!!"
if debug: print("Successful Decryption!!!")
del groupObj
if __name__ == "__main__":
unittest.main()
| lgpl-3.0 | 3,195,663,149,788,621,300 | 36.104651 | 88 | 0.587277 | false |
ahmetcemturan/SFACT | skeinforge_application/skeinforge_plugins/craft_plugins/limit.py | 1 | 8282 | #! /usr/bin/env python
"""
This page is in the table of contents.
This plugin limits the feed rate of the tool head, so that the stepper motors are not driven too fast and skip steps.
The limit manual page is at:
http://fabmetheus.crsndoo.com/wiki/index.php/Skeinforge_Limit
The maximum z feed rate is defined in speed.
==Operation==
The default 'Activate Limit' checkbox is on. When it is on, the functions described below will work, when it is off, nothing will be done.
==Settings==
===Maximum Initial Feed Rate===
Default is one millimeter per second.
Defines the maximum speed of the inital tool head move.
==Examples==
The following examples limit the file Screw Holder Bottom.stl. The examples are run in a terminal in the folder which contains Screw Holder Bottom.stl and limit.py.
> python limit.py
This brings up the limit dialog.
> python limit.py Screw Holder Bottom.stl
The limit tool is parsing the file:
Screw Holder Bottom.stl
..
The limit tool has created the file:
.. Screw Holder Bottom_limit.gcode
"""
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
from datetime import date
from fabmetheus_utilities.fabmetheus_tools import fabmetheus_interpret
from fabmetheus_utilities.vector3 import Vector3
from fabmetheus_utilities import archive
from fabmetheus_utilities import euclidean
from fabmetheus_utilities import gcodec
from fabmetheus_utilities import intercircle
from fabmetheus_utilities import settings
from skeinforge_application.skeinforge_utilities import skeinforge_craft
from skeinforge_application.skeinforge_utilities import skeinforge_polyfile
from skeinforge_application.skeinforge_utilities import skeinforge_profile
import math
import os
import sys
__author__ = 'Enrique Perez ([email protected])'
__date__ = '$Date: 2008/28/04 $'
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
def getCraftedText(fileName, gcodeText='', repository=None):
'Limit a gcode file or text.'
return getCraftedTextFromText( archive.getTextIfEmpty(fileName, gcodeText), repository )
def getCraftedTextFromText(gcodeText, repository=None):
'Limit a gcode text.'
if gcodec.isProcedureDoneOrFileIsEmpty(gcodeText, 'limit'):
return gcodeText
if repository == None:
repository = settings.getReadRepository(LimitRepository())
if not repository.activateLimit.value:
return gcodeText
return LimitSkein().getCraftedGcode(gcodeText, repository)
def getNewRepository():
'Get new repository.'
return LimitRepository()
def writeOutput(fileName, shouldAnalyze=True):
'Limit a gcode file.'
skeinforge_craft.writeChainTextWithNounMessage(fileName, 'limit', shouldAnalyze)
class LimitRepository:
'A class to handle the limit settings.'
def __init__(self):
'Set the default settings, execute title & settings fileName.'
skeinforge_profile.addListsToCraftTypeRepository('skeinforge_application.skeinforge_plugins.craft_plugins.limit.html', self )
self.fileNameInput = settings.FileNameInput().getFromFileName( fabmetheus_interpret.getGNUTranslatorGcodeFileTypeTuples(), 'Open File for Limit', self, '')
self.openWikiManualHelpPage = settings.HelpPage().getOpenFromAbsolute('http://fabmetheus.crsndoo.com/wiki/index.php/Skeinforge_Limit')
self.activateLimit = settings.BooleanSetting().getFromValue('Activate Limit', self, False)
self.maximumInitialFeedRate = settings.FloatSpin().getFromValue(0.5, 'Maximum Initial Feed Rate (mm/s):', self, 10.0, 1.0)
self.executeTitle = 'Limit'
def execute(self):
'Limit button has been clicked.'
fileNames = skeinforge_polyfile.getFileOrDirectoryTypesUnmodifiedGcode(self.fileNameInput.value, fabmetheus_interpret.getImportPluginFileNames(), self.fileNameInput.wasCancelled)
for fileName in fileNames:
writeOutput(fileName)
class LimitSkein:
'A class to limit a skein of extrusions.'
def __init__(self):
self.distanceFeedRate = gcodec.DistanceFeedRate()
self.feedRateMinute = None
self.lineIndex = 0
self.maximumZDrillFeedRatePerSecond = 987654321.0
self.maximumZFeedRatePerSecond = 2.0
self.oldLocation = None
def getCraftedGcode(self, gcodeText, repository):
'Parse gcode text and store the limit gcode.'
self.repository = repository
self.lines = archive.getTextLines(gcodeText)
self.parseInitialization()
self.maximumZDrillFeedRatePerSecond = min(self.maximumZDrillFeedRatePerSecond, self.maximumZFeedRatePerSecond)
self.maximumZCurrentFeedRatePerSecond = self.maximumZFeedRatePerSecond
for lineIndex in xrange(self.lineIndex, len(self.lines)):
self.parseLine( lineIndex )
return self.distanceFeedRate.output.getvalue()
def getLimitedInitialMovement(self, line, splitLine):
'Get a limited linear movement.'
if self.oldLocation == None:
line = self.distanceFeedRate.getLineWithFeedRate(60.0 * self.repository.maximumInitialFeedRate.value, line, splitLine)
return line
def getZLimitedLine(self, deltaZ, distance, line, splitLine):
'Get a replaced z limited gcode movement line.'
zFeedRateSecond = self.feedRateMinute * deltaZ / distance / 60.0
if zFeedRateSecond <= self.maximumZCurrentFeedRatePerSecond:
return line
limitedFeedRateMinute = self.feedRateMinute * self.maximumZCurrentFeedRatePerSecond / zFeedRateSecond
return self.distanceFeedRate.getLineWithFeedRate(limitedFeedRateMinute, line, splitLine)
def getZLimitedLineArc(self, line, splitLine):
'Get a replaced z limited gcode arc movement line.'
self.feedRateMinute = gcodec.getFeedRateMinute(self.feedRateMinute, splitLine)
if self.feedRateMinute == None or self.oldLocation == None:
return line
relativeLocation = gcodec.getLocationFromSplitLine(self.oldLocation, splitLine)
self.oldLocation += relativeLocation
deltaZ = abs(relativeLocation.z)
distance = gcodec.getArcDistance(relativeLocation, splitLine)
return self.getZLimitedLine(deltaZ, distance, line, splitLine)
def getZLimitedLineLinear(self, line, location, splitLine):
'Get a replaced z limited gcode linear movement line.'
self.feedRateMinute = gcodec.getFeedRateMinute(self.feedRateMinute, splitLine)
if location == self.oldLocation:
return ''
if self.feedRateMinute == None or self.oldLocation == None:
return line
deltaZ = abs(location.z - self.oldLocation.z)
distance = abs(location - self.oldLocation)
return self.getZLimitedLine(deltaZ, distance, line, splitLine)
def parseInitialization(self):
'Parse gcode initialization and store the parameters.'
for self.lineIndex in xrange(len(self.lines)):
line = self.lines[self.lineIndex]
splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line)
firstWord = gcodec.getFirstWord(splitLine)
self.distanceFeedRate.parseSplitLine(firstWord, splitLine)
if firstWord == '(</extruderInitialization>)':
self.distanceFeedRate.addTagBracketedProcedure('limit')
return
elif firstWord == '(<maximumZDrillFeedRatePerSecond>':
self.maximumZDrillFeedRatePerSecond = float(splitLine[1])
elif firstWord == '(<maximumZFeedRatePerSecond>':
self.maximumZFeedRatePerSecond = float(splitLine[1])
self.distanceFeedRate.addLine(line)
def parseLine( self, lineIndex ):
'Parse a gcode line and add it to the limit skein.'
line = self.lines[lineIndex].lstrip()
splitLine = gcodec.getSplitLineBeforeBracketSemicolon(line)
if len(splitLine) < 1:
return
firstWord = gcodec.getFirstWord(splitLine)
if firstWord == 'G1':
location = gcodec.getLocationFromSplitLine(self.oldLocation, splitLine)
line = self.getLimitedInitialMovement(line, splitLine)
line = self.getZLimitedLineLinear(line, location, splitLine)
self.oldLocation = location
elif firstWord == 'G2' or firstWord == 'G3':
line = self.getZLimitedLineArc(line, splitLine)
elif firstWord == 'M101':
self.maximumZCurrentFeedRatePerSecond = self.maximumZDrillFeedRatePerSecond
elif firstWord == 'M103':
self.maximumZCurrentFeedRatePerSecond = self.maximumZFeedRatePerSecond
self.distanceFeedRate.addLine(line)
def main():
'Display the limit dialog.'
if len(sys.argv) > 1:
writeOutput(' '.join(sys.argv[1 :]))
else:
settings.startMainLoopFromConstructor(getNewRepository())
if __name__ == '__main__':
main()
| agpl-3.0 | 246,727,834,341,910,940 | 40 | 180 | 0.781574 | false |
mfnch/pyrtist | old/web/in/examples/create_example.py | 1 | 2754 | import sys, os, os.path, commands, re
usage = "USAGE: python create_example.py box.example"
if len(sys.argv) != 2:
raise "Expected one argument.\n" + usage
example_file = sys.argv[1]
print "Working on '%s'..." % example_file
# Default values for variables which may be changed inside example_file
in_directory = ".."
box = "box -l g"
convert = "convert"
convert_opts = ""
highlight = "%s/../katehighlight/bin/highlight" % in_directory
rst_skeleton = "skeleton"
rst_out = None
title = None
description = None
figure_caption = None
box_source = None
out_eps = None
out_png = None
_f = open(example_file)
exec(_f)
_f.close()
if title == None:
title = "Box example: %s" % crumb
print "Removing old figure if present..."
if out_eps and os.access(out_eps, os.W_OK):
try:
os.remove(out_eps)
except:
print "Failed to remove the figure: continuing anyway..."
print "Executing the Box program..."
print commands.getoutput("%s %s" % (box, box_source))
have_figure = False
if out_eps and os.access(out_eps, os.R_OK):
print "Adjusting eps figure..."
out_png = os.path.splitext(out_eps)[0] + ".png"
print commands.getoutput("%s %s %s %s" %
(convert, convert_opts, out_eps, out_png))
print out_png
have_figure = os.access(out_png, os.R_OK)
if not have_figure:
raise "The figure '%s' has not been produced: stopping here!" % out_png
print "Highlighting the Box source..."
highlighted_source = "/tmp/h.html"
print commands.getoutput("%s Box %s %s" % (highlight, box_source, highlighted_source))
f = open(highlighted_source, "r")
htmlized_box_program = f.read()
f.close()
print "Opening the skeleton..."
f = open(rst_skeleton, "r")
data_skeleton = f.read()
f.close()
vars_dict = {
'title': title,
'description': description,
'crumb': crumb,
'box_file':box_source,
'figure_caption':figure_caption,
'image': out_png,
'htmlized_box_program': htmlized_box_program
}
r = re.compile("[$][^$]*[$]")
def substitutor(var):
try:
var_name = var.group(0)[1:-1]
except:
raise "Error when substituting variable."
if vars_dict.has_key(var_name):
return str(vars_dict[var_name])
print "WARNING: Variable '%s' not found!" % var_name
return var.group(0)
print "Filling the skeleton..."
out = re.sub(r, substitutor, data_skeleton)
f = open(rst_out, "w")
f.write(out)
f.close()
print "Output produced (%s)" % rst_out
print "Generating thumbnail..."
html_out = os.path.splitext(out_png)[0] + ".html"
out_thumb_png = "small_" + out_png
scale_opts = "-scale 100"
print commands.getoutput("%s %s %s %s"
% (convert, scale_opts, out_png, out_thumb_png))
f = open("thumbnails.dat", "a")
f.write("%s, %s\n" % (html_out, out_thumb_png))
f.close()
| lgpl-2.1 | 1,265,988,056,238,007,300 | 24.738318 | 86 | 0.649601 | false |
pelodelfuego/word2vec-toolbox | toolbox/cpLib/test/testConcept.py | 1 | 2049 | #!/usr/bin/env python
# encoding: utf-8
import unittest
import cpLib.concept as cp
import cpLib.conceptDB as db
import numpy as np
class ConceptTest(unittest.TestCase):
def setUp(self):
self.d = db.DB('../data/voc/npy/googleNews_mini.npy')
def test_transform(self):
k = self.d.get('king')
norm = np.linalg.norm(k.vect)
k_p = k.polarVect()
k_a = k.angularVect()
for a, b in zip(np.concatenate(([norm], k_a)), k_p):
self.assertAlmostEquals(a, b, places=5)
# DISTANCE
def test_cosSim(self):
k = self.d.get('king')
q = self.d.get('queen')
self.assertAlmostEquals(cp.cosSim(k, q), cp.cosSim(q, k), places=5)
self.assertAlmostEquals(cp.cosSim(k, k), 1.0, places=5)
def test_euclDist(self):
k = self.d.get('king')
q = self.d.get('queen')
self.assertEqual(cp.euclDist(k, q), cp.euclDist(q, k))
self.assertAlmostEquals(cp.euclDist(k, k), 0.0, places=5)
def test_manaDist(self):
k = self.d.get('king')
q = self.d.get('queen')
self.assertEqual(cp.manaDist(k, q), cp.manaDist(q, k))
self.assertAlmostEquals(cp.manaDist(k, k), 0.0, places=5)
# OPERATION
def test_arith(self):
# k - m = q - w
k = self.d.get('king')
q = self.d.get('queen')
m = self.d.get('man')
w = self.d.get('woman')
v1 = cp.add(k, w)
v1 = cp.sub(v1, m)
v2 = cp.sub(k, m)
v2 = cp.add(v2, w)
v3 = cp.addSub([k, w], [m])
v4 = cp.sub(k.normalized(), m.normalized())
v4 = cp.add(v4, w.normalized())
self.assertAlmostEquals(cp.cosSim(v1, v2), 1.0, places=5)
self.assertAlmostEquals(cp.cosSim(v3, v4), 1.0, places=5)
self.assertEquals(self.d.find_cosSim(v1)[0][1], 'queen')
self.assertEquals(self.d.find_cosSim(v2)[0][1], 'queen')
self.assertEquals(self.d.find_cosSim(v3)[0][1], 'queen')
self.assertEquals(self.d.find_cosSim(v4)[0][1], 'queen')
| gpl-3.0 | 1,787,303,736,457,299,700 | 26.32 | 75 | 0.561249 | false |
tkwon/dj-stripe | djstripe/migrations/0025_auto_20170322_0428.py | 1 | 3906 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-03-22 04:28
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('djstripe', '0024_auto_20170308_0757'),
]
operations = [
migrations.AlterField(
model_name='account',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='account',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='charge',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='charge',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='customer',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='customer',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='event',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='event',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='eventprocessingexception',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='eventprocessingexception',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='invoice',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='invoice',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='invoiceitem',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='invoiceitem',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='plan',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='plan',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='stripesource',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='stripesource',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='subscription',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='subscription',
name='modified',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='transfer',
name='created',
field=models.DateTimeField(auto_now_add=True),
),
migrations.AlterField(
model_name='transfer',
name='modified',
field=models.DateTimeField(auto_now=True),
),
]
| mit | 8,493,379,797,407,598,000 | 30.248 | 58 | 0.536354 | false |
eqcorrscan/ci.testing | eqcorrscan/utils/stacking.py | 1 | 6254 | """
Utility module of the EQcorrscan package to allow for different methods of \
stacking of seismic signal in one place.
:copyright:
EQcorrscan developers.
:license:
GNU Lesser General Public License, Version 3
(https://www.gnu.org/copyleft/lesser.html)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
from scipy.signal import hilbert
from copy import deepcopy
from eqcorrscan.core.match_filter import normxcorr2
def linstack(streams, normalize=True):
"""
Compute the linear stack of a series of seismic streams of \
multiplexed data.
:type streams: list
:param streams: List of streams to stack
:type normalize: bool
:param normalize: Normalize traces before stacking, normalizes by the RMS \
amplitude.
:returns: stacked data
:rtype: :class:`obspy.core.stream.Stream`
"""
stack = streams[np.argmax([len(stream) for stream in streams])].copy()
if normalize:
for tr in stack:
tr.data = tr.data / np.sqrt(np.mean(np.square(tr.data)))
tr.data = np.nan_to_num(tr.data)
for i in range(1, len(streams)):
for tr in stack:
matchtr = streams[i].select(station=tr.stats.station,
channel=tr.stats.channel)
if matchtr:
# Normalize the data before stacking
if normalize:
norm = matchtr[0].data /\
np.sqrt(np.mean(np.square(matchtr[0].data)))
norm = np.nan_to_num(norm)
else:
norm = matchtr[0].data
tr.data = np.sum((norm, tr.data), axis=0)
return stack
def PWS_stack(streams, weight=2, normalize=True):
"""
Compute the phase weighted stack of a series of streams.
.. note:: It is recommended to align the traces before stacking.
:type streams: list
:param streams: List of :class:`obspy.core.stream.Stream` to stack.
:type weight: float
:param weight: Exponent to the phase stack used for weighting.
:type normalize: bool
:param normalize: Normalize traces before stacking.
:return: Stacked stream.
:rtype: :class:`obspy.core.stream.Stream`
"""
# First get the linear stack which we will weight by the phase stack
Linstack = linstack(streams)
# Compute the instantaneous phase
instaphases = []
print("Computing instantaneous phase")
for stream in streams:
instaphase = stream.copy()
for tr in instaphase:
analytic = hilbert(tr.data)
envelope = np.sqrt(np.sum((np.square(analytic),
np.square(tr.data)), axis=0))
tr.data = analytic / envelope
instaphases.append(instaphase)
# Compute the phase stack
print("Computing the phase stack")
Phasestack = linstack(instaphases, normalize=normalize)
# Compute the phase-weighted stack
for tr in Phasestack:
tr.data = Linstack.select(station=tr.stats.station)[0].data *\
np.abs(tr.data ** weight)
return Phasestack
def align_traces(trace_list, shift_len, master=False, positive=False,
plot=False):
"""
Align traces relative to each other based on their cross-correlation value.
Uses the :func:`obspy.signal.cross_correlation.xcorr` function to find the
optimum shift to align traces relative to a master event. Either uses a
given master to align traces, or uses the first trace in the list.
.. Note::
The cross-correlation function may yield an error/warning
about shift_len being too large: this is raised by the
:func:`obspy.signal.cross_correlation.xcorr` routine when the shift_len
is greater than half the length of either master or a trace, then
the correlation will not be robust. We may switch to a different
correlation routine later.
:type trace_list: list
:param trace_list: List of traces to align
:type shift_len: int
:param shift_len: Length to allow shifting within in samples
:type master: obspy.core.trace.Trace
:param master: Master trace to align to, if set to False will align to \
the largest amplitude trace (default)
:type positive: bool
:param positive: Return the maximum positive cross-correlation, or the \
absolute maximum, defaults to False (absolute maximum).
:type plot: bool
:param plot: If true, will plot each trace aligned with the master.
:returns: list of shifts and correlations for best alignment in seconds.
:rtype: list
"""
from eqcorrscan.utils.plotting import xcorr_plot
traces = deepcopy(trace_list)
if not master:
# Use trace with largest MAD amplitude as master
master = traces[0]
MAD_master = np.median(np.abs(master.data))
for i in range(1, len(traces)):
if np.median(np.abs(traces[i])) > MAD_master:
master = traces[i]
MAD_master = np.median(np.abs(master.data))
else:
print('Using master given by user')
shifts = []
ccs = []
for i in range(len(traces)):
if not master.stats.sampling_rate == traces[i].stats.sampling_rate:
raise ValueError('Sampling rates not the same')
cc_vec = normxcorr2(template=traces[i].data.
astype(np.float32)[shift_len:-shift_len],
image=master.data.astype(np.float32))
cc_vec = cc_vec[0]
shift = np.abs(cc_vec).argmax()
cc = cc_vec[shift]
if plot:
xcorr_plot(template=traces[i].data.
astype(np.float32)[shift_len:-shift_len],
image=master.data.astype(np.float32), shift=shift,
cc=cc)
shift -= shift_len
if cc < 0 and positive:
cc = cc_vec.max()
shift = cc_vec.argmax() - shift_len
shifts.append(shift / master.stats.sampling_rate)
ccs.append(cc)
return shifts, ccs
if __name__ == "__main__":
import doctest
doctest.testmod()
| lgpl-3.0 | 6,233,989,075,923,252,000 | 35.573099 | 79 | 0.624081 | false |
gspilio/nova | nova/network/quantumv2/api.py | 1 | 41934 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved
# Copyright (c) 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# vim: tabstop=4 shiftwidth=4 softtabstop=4
import time
from oslo.config import cfg
from nova.compute import instance_types
from nova import conductor
from nova import context
from nova.db import base
from nova import exception
from nova.network import api as network_api
from nova.network import model as network_model
from nova.network import quantumv2
from nova.network.security_group import openstack_driver
from nova.openstack.common import excutils
from nova.openstack.common import log as logging
from nova.openstack.common import uuidutils
quantum_opts = [
cfg.StrOpt('quantum_url',
default='http://127.0.0.1:9696',
help='URL for connecting to quantum'),
cfg.IntOpt('quantum_url_timeout',
default=30,
help='timeout value for connecting to quantum in seconds'),
cfg.StrOpt('quantum_admin_username',
help='username for connecting to quantum in admin context'),
cfg.StrOpt('quantum_admin_password',
help='password for connecting to quantum in admin context',
secret=True),
cfg.StrOpt('quantum_admin_tenant_name',
help='tenant name for connecting to quantum in admin context'),
cfg.StrOpt('quantum_region_name',
help='region name for connecting to quantum in admin context'),
cfg.StrOpt('quantum_admin_auth_url',
default='http://localhost:5000/v2.0',
help='auth url for connecting to quantum in admin context'),
cfg.BoolOpt('quantum_api_insecure',
default=False,
help='if set, ignore any SSL validation issues'),
cfg.StrOpt('quantum_auth_strategy',
default='keystone',
help='auth strategy for connecting to '
'quantum in admin context'),
# TODO(berrange) temporary hack until Quantum can pass over the
# name of the OVS bridge it is configured with
cfg.StrOpt('quantum_ovs_bridge',
default='br-int',
help='Name of Integration Bridge used by Open vSwitch'),
cfg.IntOpt('quantum_extension_sync_interval',
default=600,
help='Number of seconds before querying quantum for'
' extensions'),
]
CONF = cfg.CONF
CONF.register_opts(quantum_opts)
CONF.import_opt('default_floating_pool', 'nova.network.floating_ips')
CONF.import_opt('flat_injected', 'nova.network.manager')
LOG = logging.getLogger(__name__)
NET_EXTERNAL = 'router:external'
refresh_cache = network_api.refresh_cache
update_instance_info_cache = network_api.update_instance_cache_with_nw_info
class API(base.Base):
"""API for interacting with the quantum 2.x API."""
conductor_api = conductor.API()
security_group_api = openstack_driver.get_openstack_security_group_driver()
def __init__(self):
super(API, self).__init__()
self.last_quantum_extension_sync = None
self.extensions = {}
def setup_networks_on_host(self, context, instance, host=None,
teardown=False):
"""Setup or teardown the network structures."""
def _get_available_networks(self, context, project_id,
net_ids=None):
"""Return a network list available for the tenant.
The list contains networks owned by the tenant and public networks.
If net_ids specified, it searches networks with requested IDs only.
"""
quantum = quantumv2.get_client(context)
# If user has specified to attach instance only to specific
# networks, add them to **search_opts
# (1) Retrieve non-public network list owned by the tenant.
search_opts = {"tenant_id": project_id, 'shared': False}
if net_ids:
search_opts['id'] = net_ids
nets = quantum.list_networks(**search_opts).get('networks', [])
# (2) Retrieve public network list.
search_opts = {'shared': True}
if net_ids:
search_opts['id'] = net_ids
nets += quantum.list_networks(**search_opts).get('networks', [])
_ensure_requested_network_ordering(
lambda x: x['id'],
nets,
net_ids)
return nets
@refresh_cache
def allocate_for_instance(self, context, instance, **kwargs):
"""Allocate network resources for the instance.
TODO(someone): document the rest of these parameters.
:param macs: None or a set of MAC addresses that the instance
should use. macs is supplied by the hypervisor driver (contrast
with requested_networks which is user supplied).
NB: QuantumV2 currently assigns hypervisor supplied MAC addresses
to arbitrary networks, which requires openflow switches to
function correctly if more than one network is being used with
the bare metal hypervisor (which is the only one known to limit
MAC addresses).
"""
hypervisor_macs = kwargs.get('macs', None)
available_macs = None
if hypervisor_macs is not None:
# Make a copy we can mutate: records macs that have not been used
# to create a port on a network. If we find a mac with a
# pre-allocated port we also remove it from this set.
available_macs = set(hypervisor_macs)
quantum = quantumv2.get_client(context)
LOG.debug(_('allocate_for_instance() for %s'),
instance['display_name'])
if not instance['project_id']:
msg = _('empty project id for instance %s')
raise exception.InvalidInput(
reason=msg % instance['display_name'])
requested_networks = kwargs.get('requested_networks')
ports = {}
fixed_ips = {}
net_ids = []
if requested_networks:
for network_id, fixed_ip, port_id in requested_networks:
if port_id:
port = quantum.show_port(port_id)['port']
if hypervisor_macs is not None:
if port['mac_address'] not in hypervisor_macs:
raise exception.PortNotUsable(port_id=port_id,
instance=instance['display_name'])
else:
# Don't try to use this MAC if we need to create a
# port on the fly later. Identical MACs may be
# configured by users into multiple ports so we
# discard rather than popping.
available_macs.discard(port['mac_address'])
network_id = port['network_id']
ports[network_id] = port
elif fixed_ip and network_id:
fixed_ips[network_id] = fixed_ip
if network_id:
net_ids.append(network_id)
nets = self._get_available_networks(context, instance['project_id'],
net_ids)
security_groups = kwargs.get('security_groups', [])
security_group_ids = []
# TODO(arosen) Should optimize more to do direct query for security
# group if len(security_groups) == 1
if len(security_groups):
search_opts = {'tenant_id': instance['project_id']}
user_security_groups = quantum.list_security_groups(
**search_opts).get('security_groups')
for security_group in security_groups:
name_match = None
uuid_match = None
for user_security_group in user_security_groups:
if user_security_group['name'] == security_group:
if name_match:
msg = (_("Multiple security groups found matching"
" '%s'. Use an ID to be more specific."),
security_group)
raise exception.NoUniqueMatch(msg)
name_match = user_security_group['id']
if user_security_group['id'] == security_group:
uuid_match = user_security_group['id']
# If a user names the security group the same as
# another's security groups uuid, the name takes priority.
if not name_match and not uuid_match:
raise exception.SecurityGroupNotFound(
security_group_id=security_group)
security_group_ids.append(name_match)
elif name_match:
security_group_ids.append(name_match)
elif uuid_match:
security_group_ids.append(uuid_match)
touched_port_ids = []
created_port_ids = []
for network in nets:
# If security groups are requested on an instance then the
# network must has a subnet associated with it. Some plugins
# implement the port-security extension which requires
# 'port_security_enabled' to be True for security groups.
# That is why True is returned if 'port_security_enabled'
# is not found.
if (security_groups and not (
network['subnets']
and network.get('port_security_enabled', True))):
raise exception.SecurityGroupCannotBeApplied()
network_id = network['id']
zone = 'compute:%s' % instance['availability_zone']
port_req_body = {'port': {'device_id': instance['uuid'],
'device_owner': zone}}
try:
port = ports.get(network_id)
if port:
quantum.update_port(port['id'], port_req_body)
touched_port_ids.append(port['id'])
else:
fixed_ip = fixed_ips.get(network_id)
if fixed_ip:
port_req_body['port']['fixed_ips'] = [{'ip_address':
fixed_ip}]
port_req_body['port']['network_id'] = network_id
port_req_body['port']['admin_state_up'] = True
port_req_body['port']['tenant_id'] = instance['project_id']
if security_group_ids:
port_req_body['port']['security_groups'] = (
security_group_ids)
if available_macs is not None:
if not available_macs:
raise exception.PortNotFree(
instance=instance['display_name'])
mac_address = available_macs.pop()
port_req_body['port']['mac_address'] = mac_address
self._populate_quantum_extension_values(instance,
port_req_body)
created_port_ids.append(
quantum.create_port(port_req_body)['port']['id'])
except Exception:
with excutils.save_and_reraise_exception():
for port_id in touched_port_ids:
port_in_server = quantum.show_port(port_id).get('port')
if not port_in_server:
raise Exception(_('Port not found'))
port_req_body = {'port': {'device_id': None}}
quantum.update_port(port_id, port_req_body)
for port_id in created_port_ids:
try:
quantum.delete_port(port_id)
except Exception as ex:
msg = _("Fail to delete port %(portid)s with"
" failure: %(exception)s")
LOG.debug(msg, {'portid': port_id,
'exception': ex})
self.trigger_security_group_members_refresh(context, instance)
self.trigger_instance_add_security_group_refresh(context, instance)
nw_info = self._get_instance_nw_info(context, instance, networks=nets)
# NOTE(danms): Only return info about ports we created in this run.
# In the initial allocation case, this will be everything we created,
# and in later runs will only be what was created that time. Thus,
# this only affects the attach case, not the original use for this
# method.
return network_model.NetworkInfo([port for port in nw_info
if port['id'] in created_port_ids +
touched_port_ids])
def _refresh_quantum_extensions_cache(self):
if (not self.last_quantum_extension_sync or
((time.time() - self.last_quantum_extension_sync)
>= CONF.quantum_extension_sync_interval)):
quantum = quantumv2.get_client(context.get_admin_context())
extensions_list = quantum.list_extensions()['extensions']
self.last_quantum_extension_sync = time.time()
self.extensions.clear()
self.extensions = dict((ext['name'], ext)
for ext in extensions_list)
def _populate_quantum_extension_values(self, instance, port_req_body):
self._refresh_quantum_extensions_cache()
if 'nvp-qos' in self.extensions:
instance_type = instance_types.extract_instance_type(instance)
rxtx_factor = instance_type.get('rxtx_factor')
port_req_body['port']['rxtx_factor'] = rxtx_factor
def deallocate_for_instance(self, context, instance, **kwargs):
"""Deallocate all network resources related to the instance."""
LOG.debug(_('deallocate_for_instance() for %s'),
instance['display_name'])
search_opts = {'device_id': instance['uuid']}
data = quantumv2.get_client(context).list_ports(**search_opts)
ports = data.get('ports', [])
for port in ports:
try:
quantumv2.get_client(context).delete_port(port['id'])
except Exception as ex:
LOG.exception(_("Failed to delete quantum port %(portid)s ")
% {'portid': port['id']})
self.trigger_security_group_members_refresh(context, instance)
self.trigger_instance_remove_security_group_refresh(context, instance)
@refresh_cache
def allocate_port_for_instance(self, context, instance, port_id,
network_id=None, requested_ip=None,
conductor_api=None):
return self.allocate_for_instance(context, instance,
requested_networks=[(network_id, requested_ip, port_id)],
conductor_api=conductor_api)
@refresh_cache
def deallocate_port_for_instance(self, context, instance, port_id,
conductor_api=None):
try:
quantumv2.get_client(context).delete_port(port_id)
except Exception as ex:
LOG.exception(_("Failed to delete quantum port %(port_id)s ") %
locals())
self.trigger_security_group_members_refresh(context, instance)
self.trigger_instance_remove_security_group_refresh(context, instance)
return self._get_instance_nw_info(context, instance)
def list_ports(self, context, **search_opts):
return quantumv2.get_client(context).list_ports(**search_opts)
def show_port(self, context, port_id):
return quantumv2.get_client(context).show_port(port_id)
def get_instance_nw_info(self, context, instance, conductor_api=None,
networks=None):
result = self._get_instance_nw_info(context, instance, networks)
update_instance_info_cache(self, context, instance, result,
conductor_api)
return result
def _get_instance_nw_info(self, context, instance, networks=None):
LOG.debug(_('get_instance_nw_info() for %s'),
instance['display_name'])
nw_info = self._build_network_info_model(context, instance, networks)
return network_model.NetworkInfo.hydrate(nw_info)
@refresh_cache
def add_fixed_ip_to_instance(self, context, instance, network_id,
conductor_api=None):
"""Add a fixed ip to the instance from specified network."""
search_opts = {'network_id': network_id}
data = quantumv2.get_client(context).list_subnets(**search_opts)
ipam_subnets = data.get('subnets', [])
if not ipam_subnets:
raise exception.NetworkNotFoundForInstance(
instance_id=instance['uuid'])
zone = 'compute:%s' % instance['availability_zone']
search_opts = {'device_id': instance['uuid'],
'device_owner': zone,
'network_id': network_id}
data = quantumv2.get_client(context).list_ports(**search_opts)
ports = data['ports']
for p in ports:
for subnet in ipam_subnets:
fixed_ips = p['fixed_ips']
fixed_ips.append({'subnet_id': subnet['id']})
port_req_body = {'port': {'fixed_ips': fixed_ips}}
try:
quantumv2.get_client(context).update_port(p['id'],
port_req_body)
return
except Exception as ex:
msg = _("Unable to update port %(portid)s on subnet "
"%(subnet_id)s with failure: %(exception)s")
LOG.debug(msg, {'portid': p['id'],
'subnet_id': subnet['id'],
'exception': ex})
raise exception.NetworkNotFoundForInstance(
instance_id=instance['uuid'])
@refresh_cache
def remove_fixed_ip_from_instance(self, context, instance, address,
conductor_api=None):
"""Remove a fixed ip from the instance."""
zone = 'compute:%s' % instance['availability_zone']
search_opts = {'device_id': instance['uuid'],
'device_owner': zone,
'fixed_ips': 'ip_address=%s' % address}
data = quantumv2.get_client(context).list_ports(**search_opts)
ports = data['ports']
for p in ports:
fixed_ips = p['fixed_ips']
new_fixed_ips = []
for fixed_ip in fixed_ips:
if fixed_ip['ip_address'] != address:
new_fixed_ips.append(fixed_ip)
port_req_body = {'port': {'fixed_ips': new_fixed_ips}}
try:
quantumv2.get_client(context).update_port(p['id'],
port_req_body)
except Exception as ex:
msg = _("Unable to update port %(portid)s with"
" failure: %(exception)s")
LOG.debug(msg, {'portid': p['id'], 'exception': ex})
return
raise exception.FixedIpNotFoundForSpecificInstance(
instance_uuid=instance['uuid'], ip=address)
def validate_networks(self, context, requested_networks):
"""Validate that the tenant can use the requested networks."""
LOG.debug(_('validate_networks() for %s'),
requested_networks)
if not requested_networks:
return
net_ids = []
for (net_id, _i, port_id) in requested_networks:
if not port_id:
net_ids.append(net_id)
continue
port = quantumv2.get_client(context).show_port(port_id).get('port')
if not port:
raise exception.PortNotFound(port_id=port_id)
if port.get('device_id', None):
raise exception.PortInUse(port_id=port_id)
net_id = port['network_id']
if net_id in net_ids:
raise exception.NetworkDuplicated(network_id=net_id)
net_ids.append(net_id)
nets = self._get_available_networks(context, context.project_id,
net_ids)
if len(nets) != len(net_ids):
requsted_netid_set = set(net_ids)
returned_netid_set = set([net['id'] for net in nets])
lostid_set = requsted_netid_set - returned_netid_set
id_str = ''
for _id in lostid_set:
id_str = id_str and id_str + ', ' + _id or _id
raise exception.NetworkNotFound(network_id=id_str)
def _get_instance_uuids_by_ip(self, context, address):
"""Retrieve instance uuids associated with the given ip address.
:returns: A list of dicts containing the uuids keyed by 'instance_uuid'
e.g. [{'instance_uuid': uuid}, ...]
"""
search_opts = {"fixed_ips": 'ip_address=%s' % address}
data = quantumv2.get_client(context).list_ports(**search_opts)
ports = data.get('ports', [])
return [{'instance_uuid': port['device_id']} for port in ports
if port['device_id']]
def get_instance_uuids_by_ip_filter(self, context, filters):
"""Return a list of dicts in the form of
[{'instance_uuid': uuid}] that matched the ip filter.
"""
# filters['ip'] is composed as '^%s$' % fixed_ip.replace('.', '\\.')
ip = filters.get('ip')
# we remove ^$\ in the ip filer
if ip[0] == '^':
ip = ip[1:]
if ip[-1] == '$':
ip = ip[:-1]
ip = ip.replace('\\.', '.')
return self._get_instance_uuids_by_ip(context, ip)
def trigger_instance_add_security_group_refresh(self, context,
instance_ref):
admin_context = context.elevated()
for group in instance_ref['security_groups']:
self.conductor_api.security_groups_trigger_handler(context,
'instance_add_security_group', instance_ref, group['name'])
def trigger_instance_remove_security_group_refresh(self, context,
instance_ref):
admin_context = context.elevated()
for group in instance_ref['security_groups']:
self.conductor_api.security_groups_trigger_handler(context,
'instance_remove_security_group', instance_ref, group['name'])
def trigger_security_group_members_refresh(self, context, instance_ref):
admin_context = context.elevated()
group_ids = [group['id'] for group in instance_ref['security_groups']]
self.conductor_api.security_groups_trigger_members_refresh(
admin_context, group_ids)
self.conductor_api.security_groups_trigger_handler(admin_context,
'security_group_members', group_ids)
def _get_port_id_by_fixed_address(self, client,
instance, address):
zone = 'compute:%s' % instance['availability_zone']
search_opts = {'device_id': instance['uuid'],
'device_owner': zone}
data = client.list_ports(**search_opts)
ports = data['ports']
port_id = None
for p in ports:
for ip in p['fixed_ips']:
if ip['ip_address'] == address:
port_id = p['id']
break
if not port_id:
raise exception.FixedIpNotFoundForAddress(address=address)
return port_id
@refresh_cache
def associate_floating_ip(self, context, instance,
floating_address, fixed_address,
affect_auto_assigned=False):
"""Associate a floating ip with a fixed ip."""
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
client = quantumv2.get_client(context)
port_id = self._get_port_id_by_fixed_address(client, instance,
fixed_address)
fip = self._get_floating_ip_by_address(client, floating_address)
param = {'port_id': port_id,
'fixed_ip_address': fixed_address}
client.update_floatingip(fip['id'], {'floatingip': param})
def get_all(self, context):
client = quantumv2.get_client(context)
networks = client.list_networks().get('networks') or {}
for network in networks:
network['label'] = network['name']
return networks
def get(self, context, network_uuid):
client = quantumv2.get_client(context)
network = client.show_network(network_uuid).get('network') or {}
network['label'] = network['name']
return network
def delete(self, context, network_uuid):
raise NotImplementedError()
def disassociate(self, context, network_uuid):
raise NotImplementedError()
def get_fixed_ip(self, context, id):
raise NotImplementedError()
def get_fixed_ip_by_address(self, context, address):
uuid_maps = self._get_instance_uuids_by_ip(context, address)
if len(uuid_maps) == 1:
return uuid_maps[0]
elif not uuid_maps:
raise exception.FixedIpNotFoundForAddress(address=address)
else:
raise exception.FixedIpAssociatedWithMultipleInstances(
address=address)
def _setup_net_dict(self, client, network_id):
if not network_id:
return {}
pool = client.show_network(network_id)['network']
return {pool['id']: pool}
def _setup_port_dict(self, client, port_id):
if not port_id:
return {}
port = client.show_port(port_id)['port']
return {port['id']: port}
def _setup_pools_dict(self, client):
pools = self._get_floating_ip_pools(client)
return dict([(i['id'], i) for i in pools])
def _setup_ports_dict(self, client, project_id=None):
search_opts = {'tenant_id': project_id} if project_id else {}
ports = client.list_ports(**search_opts)['ports']
return dict([(p['id'], p) for p in ports])
def get_floating_ip(self, context, id):
client = quantumv2.get_client(context)
fip = client.show_floatingip(id)['floatingip']
pool_dict = self._setup_net_dict(client,
fip['floating_network_id'])
port_dict = self._setup_port_dict(client, fip['port_id'])
return self._format_floating_ip_model(fip, pool_dict, port_dict)
def _get_floating_ip_pools(self, client, project_id=None):
search_opts = {NET_EXTERNAL: True}
if project_id:
search_opts.update({'tenant_id': project_id})
data = client.list_networks(**search_opts)
return data['networks']
def get_floating_ip_pools(self, context):
client = quantumv2.get_client(context)
pools = self._get_floating_ip_pools(client)
return [{'name': n['name'] or n['id']} for n in pools]
def _format_floating_ip_model(self, fip, pool_dict, port_dict):
pool = pool_dict[fip['floating_network_id']]
result = {'id': fip['id'],
'address': fip['floating_ip_address'],
'pool': pool['name'] or pool['id'],
'project_id': fip['tenant_id'],
# In Quantum v2, an exact fixed_ip_id does not exist.
'fixed_ip_id': fip['port_id'],
}
# In Quantum v2 API fixed_ip_address and instance uuid
# (= device_id) are known here, so pass it as a result.
result['fixed_ip'] = {'address': fip['fixed_ip_address']}
if fip['port_id']:
instance_uuid = port_dict[fip['port_id']]['device_id']
result['instance'] = {'uuid': instance_uuid}
else:
result['instance'] = None
return result
def get_floating_ip_by_address(self, context, address):
client = quantumv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
pool_dict = self._setup_net_dict(client,
fip['floating_network_id'])
port_dict = self._setup_port_dict(client, fip['port_id'])
return self._format_floating_ip_model(fip, pool_dict, port_dict)
def get_floating_ips_by_project(self, context):
client = quantumv2.get_client(context)
project_id = context.project_id
fips = client.list_floatingips(tenant_id=project_id)['floatingips']
pool_dict = self._setup_pools_dict(client)
port_dict = self._setup_ports_dict(client, project_id)
return [self._format_floating_ip_model(fip, pool_dict, port_dict)
for fip in fips]
def get_floating_ips_by_fixed_address(self, context, fixed_address):
return []
def get_instance_id_by_floating_address(self, context, address):
"""Returns the instance id a floating ip's fixed ip is allocated to."""
client = quantumv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
if not fip['port_id']:
return None
port = client.show_port(fip['port_id'])['port']
return port['device_id']
def get_vifs_by_instance(self, context, instance):
raise NotImplementedError()
def get_vif_by_mac_address(self, context, mac_address):
raise NotImplementedError()
def _get_floating_ip_pool_id_by_name_or_id(self, client, name_or_id):
search_opts = {NET_EXTERNAL: True, 'fields': 'id'}
if uuidutils.is_uuid_like(name_or_id):
search_opts.update({'id': name_or_id})
else:
search_opts.update({'name': name_or_id})
data = client.list_networks(**search_opts)
nets = data['networks']
if len(nets) == 1:
return nets[0]['id']
elif len(nets) == 0:
raise exception.FloatingIpPoolNotFound()
else:
msg = (_("Multiple floating IP pools matches found for name '%s'")
% name_or_id)
raise exception.NovaException(message=msg)
def allocate_floating_ip(self, context, pool=None):
"""Add a floating ip to a project from a pool."""
client = quantumv2.get_client(context)
pool = pool or CONF.default_floating_pool
pool_id = self._get_floating_ip_pool_id_by_name_or_id(client, pool)
# TODO(amotoki): handle exception during create_floatingip()
# At this timing it is ensured that a network for pool exists.
# quota error may be returned.
param = {'floatingip': {'floating_network_id': pool_id}}
fip = client.create_floatingip(param)
return fip['floatingip']['floating_ip_address']
def _get_floating_ip_by_address(self, client, address):
"""Get floatingip from floating ip address."""
data = client.list_floatingips(floating_ip_address=address)
fips = data['floatingips']
if len(fips) == 0:
raise exception.FloatingIpNotFoundForAddress(address=address)
elif len(fips) > 1:
raise exception.FloatingIpMultipleFoundForAddress(address=address)
return fips[0]
def _get_floating_ips_by_fixed_and_port(self, client, fixed_ip, port):
"""Get floatingips from fixed ip and port."""
data = client.list_floatingips(fixed_ip_address=fixed_ip, port_id=port)
return data['floatingips']
def release_floating_ip(self, context, address,
affect_auto_assigned=False):
"""Remove a floating ip with the given address from a project."""
# Note(amotoki): We cannot handle a case where multiple pools
# have overlapping IP address range. In this case we cannot use
# 'address' as a unique key.
# This is a limitation of the current nova.
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
client = quantumv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
if fip['port_id']:
raise exception.FloatingIpAssociated(address=address)
client.delete_floatingip(fip['id'])
@refresh_cache
def disassociate_floating_ip(self, context, instance, address,
affect_auto_assigned=False):
"""Disassociate a floating ip from the instance."""
# Note(amotoki): 'affect_auto_assigned' is not respected
# since it is not used anywhere in nova code and I could
# find why this parameter exists.
client = quantumv2.get_client(context)
fip = self._get_floating_ip_by_address(client, address)
client.update_floatingip(fip['id'], {'floatingip': {'port_id': None}})
def migrate_instance_start(self, context, instance, migration):
"""Start to migrate the network of an instance."""
# NOTE(wenjianhn): just pass to make migrate instance doesn't
# raise for now.
pass
def migrate_instance_finish(self, context, instance, migration):
"""Finish migrating the network of an instance."""
# NOTE(wenjianhn): just pass to make migrate instance doesn't
# raise for now.
pass
def add_network_to_project(self, context, project_id, network_uuid=None):
"""Force add a network to the project."""
raise NotImplementedError()
def _build_network_info_model(self, context, instance, networks=None):
search_opts = {'tenant_id': instance['project_id'],
'device_id': instance['uuid'], }
client = quantumv2.get_client(context, admin=True)
data = client.list_ports(**search_opts)
ports = data.get('ports', [])
if networks is None:
networks = self._get_available_networks(context,
instance['project_id'])
else:
# ensure ports are in preferred network order
_ensure_requested_network_ordering(
lambda x: x['network_id'],
ports,
[n['id'] for n in networks])
nw_info = network_model.NetworkInfo()
for port in ports:
network_name = None
for net in networks:
if port['network_id'] == net['id']:
network_name = net['name']
break
if network_name is None:
raise exception.NotFound(_('Network %(net)s for '
'port %(port_id)s not found!') %
{'net': port['network_id'],
'port': port['id']})
network_IPs = []
for fixed_ip in port['fixed_ips']:
fixed = network_model.FixedIP(address=fixed_ip['ip_address'])
floats = self._get_floating_ips_by_fixed_and_port(
client, fixed_ip['ip_address'], port['id'])
for ip in floats:
fip = network_model.IP(address=ip['floating_ip_address'],
type='floating')
fixed.add_floating_ip(fip)
network_IPs.append(fixed)
subnets = self._get_subnets_from_port(context, port)
for subnet in subnets:
subnet['ips'] = [fixed_ip for fixed_ip in network_IPs
if fixed_ip.is_in_subnet(subnet)]
bridge = None
ovs_interfaceid = None
# Network model metadata
should_create_bridge = None
vif_type = port.get('binding:vif_type')
# TODO(berrange) Quantum should pass the bridge name
# in another binding metadata field
if vif_type == network_model.VIF_TYPE_OVS:
bridge = CONF.quantum_ovs_bridge
ovs_interfaceid = port['id']
elif vif_type == network_model.VIF_TYPE_BRIDGE:
bridge = "brq" + port['network_id']
should_create_bridge = True
if bridge is not None:
bridge = bridge[:network_model.NIC_NAME_LEN]
devname = "tap" + port['id']
devname = devname[:network_model.NIC_NAME_LEN]
network = network_model.Network(
id=port['network_id'],
bridge=bridge,
injected=CONF.flat_injected,
label=network_name,
tenant_id=net['tenant_id']
)
network['subnets'] = subnets
if should_create_bridge is not None:
network['should_create_bridge'] = should_create_bridge
nw_info.append(network_model.VIF(
id=port['id'],
address=port['mac_address'],
network=network,
type=port.get('binding:vif_type'),
ovs_interfaceid=ovs_interfaceid,
devname=devname))
return nw_info
def _get_subnets_from_port(self, context, port):
"""Return the subnets for a given port."""
fixed_ips = port['fixed_ips']
# No fixed_ips for the port means there is no subnet associated
# with the network the port is created on.
# Since list_subnets(id=[]) returns all subnets visible for the
# current tenant, returned subnets may contain subnets which is not
# related to the port. To avoid this, the method returns here.
if not fixed_ips:
return []
search_opts = {'id': [ip['subnet_id'] for ip in fixed_ips]}
data = quantumv2.get_client(context).list_subnets(**search_opts)
ipam_subnets = data.get('subnets', [])
subnets = []
for subnet in ipam_subnets:
subnet_dict = {'cidr': subnet['cidr'],
'gateway': network_model.IP(
address=subnet['gateway_ip'],
type='gateway'),
}
# attempt to populate DHCP server field
search_opts = {'network_id': subnet['network_id'],
'device_owner': 'network:dhcp'}
data = quantumv2.get_client(context).list_ports(**search_opts)
dhcp_ports = data.get('ports', [])
for p in dhcp_ports:
for ip_pair in p['fixed_ips']:
if ip_pair['subnet_id'] == subnet['id']:
subnet_dict['dhcp_server'] = ip_pair['ip_address']
break
subnet_object = network_model.Subnet(**subnet_dict)
for dns in subnet.get('dns_nameservers', []):
subnet_object.add_dns(
network_model.IP(address=dns, type='dns'))
# TODO(gongysh) get the routes for this subnet
subnets.append(subnet_object)
return subnets
def get_dns_domains(self, context):
"""Return a list of available dns domains.
These can be used to create DNS entries for floating ips.
"""
raise NotImplementedError()
def add_dns_entry(self, context, address, name, dns_type, domain):
"""Create specified DNS entry for address."""
raise NotImplementedError()
def modify_dns_entry(self, context, name, address, domain):
"""Create specified DNS entry for address."""
raise NotImplementedError()
def delete_dns_entry(self, context, name, domain):
"""Delete the specified dns entry."""
raise NotImplementedError()
def delete_dns_domain(self, context, domain):
"""Delete the specified dns domain."""
raise NotImplementedError()
def get_dns_entries_by_address(self, context, address, domain):
"""Get entries for address and domain."""
raise NotImplementedError()
def get_dns_entries_by_name(self, context, name, domain):
"""Get entries for name and domain."""
raise NotImplementedError()
def create_private_dns_domain(self, context, domain, availability_zone):
"""Create a private DNS domain with nova availability zone."""
raise NotImplementedError()
def create_public_dns_domain(self, context, domain, project=None):
"""Create a private DNS domain with optional nova project."""
raise NotImplementedError()
def _ensure_requested_network_ordering(accessor, unordered, preferred):
"""Sort a list with respect to the preferred network ordering."""
if preferred:
unordered.sort(key=lambda i: preferred.index(accessor(i)))
| apache-2.0 | 8,649,179,692,638,891,000 | 43.374603 | 79 | 0.563147 | false |
Chealion/yycbike | archive/weatherLoad.py | 1 | 6271 | #! /usr/bin/python
# :set tabstop=4 shiftwidth=4 expandtab
# Downoads Environment Canada data and sends the data to Graphite. Additionally logs the data to a file we can use to import later
import csv
import time
import graphitesend
import urllib2
from datetime import date, timedelta
import datetime
graphitesend.init(graphite_server='localhost',prefix='yycbike',system_name='')
metriclog = open('/home/ubuntu/devmetriclog.log', 'a')
# Watch out for timezones - this script fails to function past 5 PM MST.
yesterday = date.today() - timedelta(1)
year = yesterday.strftime('%Y')
month = yesterday.strftime('%m')
day = yesterday.strftime('%d')
#Installations
# URLs per ftp://ftp.tor.ec.gc.ca/Pub/Get_More_Data_Plus_de_donnees/Readme.txt
HOURLY_URL='http://climate.weather.gc.ca/climate_data/bulk_data_e.html?format=csv&stationID=50430&Year=' + year + '&Month=' + month + '&Day=' + day + '&submit=Download+Data&timeframe=1'
DAILY_URL= 'http://climate.weather.gc.ca/climate_data/bulk_data_e.html?format=csv&stationID=50430&Year=' + year + '&Month=' + month + '&Day=' + day + '&submit=Download+Data&timeframe=2'
## HOURLY
url = HOURLY_URL
print 'Loading Hourly Weather Data...'
response = urllib2.urlopen(url)
csv_data = response.read()
# Delete first 17 lines - up to and inlcuding header line
cleaned_data = '\n'.join(csv_data.split('\n')[17:])
# split into list, and use non unicode field names
csv_reader = csv.DictReader(cleaned_data.split('\n'), fieldnames=['Date', 'Year', 'Month', 'Day', 'Time', 'Quality', 'Temp', 'TempFlag', 'DewPoint', 'DewPointFlag', 'Humidity', 'HumFlag', 'WindDir', 'WindFlag', 'WindSpd', 'WindFlg', 'Visbility', 'VisFlag', 'Pressure', 'PressFlag', 'Humidex', 'HmdxFlag', 'WindChill', 'WindChillFlag', 'Weather'])
for row in csv_reader:
#Create timestamp
timestamp = time.mktime(datetime.datetime.strptime(row['Date'], "%Y-%m-%d %H:%M").timetuple())
yesterday_timestamp = float(yesterday.strftime('%s'))
#Ignore any data "newer" than yesterday. Data that doesn't exist yet.
if timestamp > yesterday_timestamp:
break
else:
timestamp = str(int(timestamp))
#print row
# Data Cleaning - Wind Chill or Humidex - merge
if row['Temp'] is None or row['Temp'] == '':
continue
if row['Humidex'] == '' and row['WindChill'] == '':
feelslike = row['Temp']
elif row['Humidex'] == '':
feelslike = row['WindChill']
else:
feelslike = row['Humidex']
if row['WindSpd'] == '':
row['WindSpd'] = 0
if row['WindDir'] == '':
row['WindDir'] = 0
metric_string = 'weather.hourly.temp ' + str(row['Temp']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.hourly.temp', str(row['Temp']), timestamp)
metric_string = 'weather.hourly.windspeed ' + str(row['WindSpd']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.hourly.windspeed', str(row['WindSpd']), timestamp)
metric_string = 'weather.hourly.winddir ' + str(row['WindDir']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.hourly.winddir', str(row['WindDir']), timestamp)
metric_string = 'weather.hourly.humidity ' + str(row['Humidity']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.hourly.humidity', str(row['Humidity']), timestamp)
metric_string = 'weather.hourly.feelslike ' + str(feelslike) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.hourly.feelslike', str(feelslike), timestamp)
## DAILY
url = DAILY_URL
print 'Loading Daily Weather Data...'
response = urllib2.urlopen(url)
csv_data = response.read()
# Delete first 26 lines - up to and including header line
cleaned_data = '\n'.join(csv_data.split('\n')[26:])
# split into list, and use non unicode field names
csv_reader = csv.DictReader(cleaned_data.split('\n'), fieldnames=['Date', 'Year', 'Month', 'Day', 'Quality', 'Max', 'MaxFlag', 'Min', 'MinFlag', 'Mean', 'MeanFlag', 'Heat1', 'Heat2', 'Heat3', 'Heat4', 'Rain', 'RainFlag', 'Snow', 'SnowFlag', 'TotalPrecip', 'PrecipFlag', 'SnowonGround', 'SnowFlag', 'Wind1', 'Wind2', 'Wind3', 'Wind4'])
for row in csv_reader:
#Create timestamp
timestamp = time.mktime(datetime.datetime.strptime(row['Date'], "%Y-%m-%d").timetuple())
yesterday_timestamp = float(yesterday.strftime('%s'))
#Ignore any data "newer" than yesterday. Data that doesn't exist yet.
if timestamp > yesterday_timestamp:
break
else:
timestamp = str(int(timestamp))
#print row
if row['Max'] is None or row['Max'] == '' or row['Min'] == '':
continue
metric_string = 'weather.daily.high ' + str(row['Max']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.daily.high', str(row['Max']), timestamp)
metric_string = 'weather.daily.low ' + str(row['Min']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.daily.low', str(row['Min']), timestamp)
metric_string = 'weather.daily.mean ' + str(row['Mean']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.daily.mean', str(row['Mean']), timestamp)
# Data Cleaning
if row['TotalPrecip'] == '':
row['TotalPrecip'] = 0
metric_string = 'weather.daily.precip ' + str(row['TotalPrecip']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.daily.precip', str(row['TotalPrecip']), timestamp)
# Data Cleaning
if row['SnowonGround'] == '':
row['SnowonGround'] = 0
metric_string = 'weather.daily.snowamt ' + str(row['SnowonGround']) + ' ' + timestamp
metriclog.write(metric_string + "\n")
graphitesend.send('weather.daily.snowamt', str(row['SnowonGround']), timestamp)
# OUTPUT FORMAT:
# <metric path> <metric value> <metric timestamp>
# yycbike.peacebridge.north.trips 5 123456789
metriclog.close()
print 'Done.'
| mit | 2,237,672,961,989,469,700 | 39.986928 | 346 | 0.635784 | false |
Ziqi-Li/bknqgis | bokeh/bokeh/server/server.py | 1 | 10467 | ''' Provides a Server which instantiates Application instances as clients connect
'''
from __future__ import absolute_import, print_function
import atexit
import logging
log = logging.getLogger(__name__)
import signal
import tornado
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado import netutil
from .tornado import BokehTornado
from bokeh import __version__
from bokeh.application import Application
from bokeh.resources import DEFAULT_SERVER_PORT
def _create_hosts_whitelist(host_list, port):
if not host_list:
return ['localhost:' + str(port)]
hosts = []
for host in host_list:
if '*' in host:
log.warning(
"Host wildcard %r will allow websocket connections originating "
"from multiple (or possibly all) hostnames or IPs. Use non-wildcard "
"values to restrict access explicitly", host)
if host == '*':
# do not append the :80 port suffix in that case: any port is
# accepted
hosts.append(host)
continue
parts = host.split(':')
if len(parts) == 1:
if parts[0] == "":
raise ValueError("Empty host value")
hosts.append(host+":80")
elif len(parts) == 2:
try:
int(parts[1])
except ValueError:
raise ValueError("Invalid port in host value: %s" % host)
if parts[0] == "":
raise ValueError("Empty host value")
hosts.append(host)
else:
raise ValueError("Invalid host value: %s" % host)
return hosts
def _bind_sockets(address, port):
'''Like tornado.netutil.bind_sockets(), but also returns the
assigned port number.
'''
ss = netutil.bind_sockets(port=port or 0, address=address)
assert len(ss)
ports = {s.getsockname()[1] for s in ss}
assert len(ports) == 1, "Multiple ports assigned??"
actual_port = ports.pop()
if port:
assert actual_port == port
return ss, actual_port
class Server(object):
''' A Server which creates a new Session for each connection, using an Application to initialize each Session.
Args:
applications (dict of str: bokeh.application.Application) or bokeh.application.Application:
mapping from URL paths to Application instances, or a single Application to put at the root URL
The Application is a factory for Document, with a new Document initialized for each Session.
Each application should be identified by a path meant to go in a URL, like "/" or "/foo"
Kwargs:
num_procs (str):
Number of worker processes for an app. Default to one. Using 0 will autodetect number of cores
tornado_server_kwargs (dict):
Additional arguments passed to tornado.httpserver.HTTPServer. E.g. max_buffer_size to
specify the maximum upload size. More details can be found at:
http://www.tornadoweb.org/en/stable/httpserver.html#http-server
'''
def __init__(self, applications, io_loop=None, tornado_server_kwargs=None, **kwargs):
log.info("Starting Bokeh server version %s (running on Tornado %s)" % (__version__, tornado.version))
if isinstance(applications, Application):
self._applications = { '/' : applications }
else:
self._applications = applications
tornado_kwargs = { key: kwargs[key] for key in ['extra_patterns',
'secret_key',
'sign_sessions',
'generate_session_ids',
'keep_alive_milliseconds',
'check_unused_sessions_milliseconds',
'unused_session_lifetime_milliseconds',
'stats_log_frequency_milliseconds',
]
if key in kwargs }
prefix = kwargs.get('prefix')
if prefix is None:
prefix = ""
prefix = prefix.strip("/")
if prefix:
prefix = "/" + prefix
self._prefix = prefix
self._started = False
self._stopped = False
port = kwargs.get('port', DEFAULT_SERVER_PORT)
self._address = kwargs.get('address') or None
if tornado_server_kwargs is None:
tornado_server_kwargs = {}
tornado_server_kwargs.setdefault('xheaders', kwargs.get('use_xheaders', False))
self._num_procs = kwargs.get('num_procs', 1)
if self._num_procs != 1:
assert all(app.safe_to_fork for app in self._applications.values()), (
'User code has ran before attempting to run multiple '
'processes. This is considered an unsafe operation.')
sockets, self._port = _bind_sockets(self._address, port)
try:
tornado_kwargs['extra_websocket_origins'] = _create_hosts_whitelist(kwargs.get('allow_websocket_origin'), self._port)
tornado_kwargs['use_index'] = kwargs.get('use_index', True)
tornado_kwargs['redirect_root'] = kwargs.get('redirect_root', True)
self._tornado = BokehTornado(self._applications, self.prefix, **tornado_kwargs)
self._http = HTTPServer(self._tornado, **tornado_server_kwargs)
self._http.start(self._num_procs)
self._http.add_sockets(sockets)
except Exception:
for s in sockets:
s.close()
raise
# Can only instantiate the IO loop after HTTPServer.start() was
# called because of `num_procs`, see issue #5524
if io_loop is None:
io_loop = IOLoop.current()
self._loop = io_loop
self._tornado.initialize(io_loop=io_loop, **tornado_kwargs)
@property
def port(self):
'''The actual port number the server is listening on for HTTP
requests.
'''
return self._port
@property
def address(self):
'''The address the server is listening on for HTTP requests
(may be empty or None).
'''
return self._address
@property
def prefix(self):
return self._prefix
@property
def io_loop(self):
return self._loop
def start(self):
''' Start the Bokeh Server and its background tasks.
Notes:
This method does not block and does not affect the state of
the Tornado I/O loop. You must start and stop the loop yourself.
'''
assert not self._started, "Already started"
self._started = True
self._tornado.start()
def stop(self, wait=True):
''' Stop the Bokeh Server.
Args:
fast (boolean): whether to wait for orderly cleanup (default: True)
Returns:
None
'''
assert not self._stopped, "Already stopped"
self._stopped = True
self._tornado.stop(wait)
self._http.stop()
def run_until_shutdown(self):
''' Run the Bokeh Server until shutdown is requested by the user,
either via a Keyboard interrupt (Ctrl-C) or SIGTERM.
'''
if not self._started:
self.start()
# Install shutdown hooks
atexit.register(self._atexit)
signal.signal(signal.SIGTERM, self._sigterm)
try:
self._loop.start()
except KeyboardInterrupt:
print("\nInterrupted, shutting down")
self.stop()
_atexit_ran = False
def _atexit(self):
if self._atexit_ran:
return
self._atexit_ran = True
log.debug("Shutdown: cleaning up")
if not self._stopped:
self.stop(wait=False)
def _sigterm(self, signum, frame):
print("Received signal %d, shutting down" % (signum,))
# Tell self._loop.start() to return.
self._loop.add_callback_from_signal(self._loop.stop)
def unlisten(self):
'''Stop listening on ports (Server will no longer be usable after calling this)
Returns:
None
'''
self._http.close_all_connections()
self._http.stop()
def get_session(self, app_path, session_id):
'''Gets a session by name (session must already exist)'''
return self._tornado.get_session(app_path, session_id)
def get_sessions(self, app_path=None):
'''Gets all live sessions for an application.'''
if app_path is not None:
return self._tornado.get_sessions(app_path)
all_sessions = []
for path in self._tornado.app_paths:
all_sessions += self._tornado.get_sessions(path)
return all_sessions
def show(self, app_path, browser=None, new='tab'):
''' Opens an app in a browser window or tab.
Useful for testing server applications on your local desktop but
should not call when running bokeh-server on an actual server.
Args:
app_path (str) : the app path to open
The part of the URL after the hostname:port, with leading slash.
browser (str, optional) : browser to show with (default: None)
For systems that support it, the **browser** argument allows
specifying which browser to display in, e.g. "safari", "firefox",
"opera", "windows-default" (see the ``webbrowser`` module
documentation in the standard lib for more details).
new (str, optional) : window or tab (default: "tab")
If ``new`` is 'tab', then opens a new tab.
If ``new`` is 'window', then opens a new window.
Returns:
None
'''
if not app_path.startswith("/"):
raise ValueError("app_path must start with a /")
address_string = 'localhost'
if self.address is not None and self.address != '':
address_string = self.address
url = "http://%s:%d%s%s" % (address_string, self.port, self.prefix, app_path)
from bokeh.util.browser import view
view(url, browser=browser, new=new)
| gpl-2.0 | 7,007,244,162,705,073,000 | 35.217993 | 129 | 0.572179 | false |
jelly/calibre | src/calibre/db/cli/cmd_catalog.py | 2 | 3866 | #!/usr/bin/env python2
# vim:fileencoding=utf-8
# License: GPLv3 Copyright: 2017, Kovid Goyal <kovid at kovidgoyal.net>
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from calibre.customize.ui import available_catalog_formats, plugin_for_catalog_format
from calibre.db.cli import integers_from_string
readonly = True
version = 0 # change this if you change signature of implementation()
needs_srv_ctx = True
no_remote = True
def implementation(db, notify_changes, ctx):
raise NotImplementedError()
def option_parser(get_parser, args): # {{{
def add_plugin_parser_options(fmt, parser):
# Fetch the extension-specific CLI options from the plugin
# library.catalogs.<format>.py
plugin = plugin_for_catalog_format(fmt)
p = parser.add_option_group(_('{} OPTIONS').format(fmt.upper()))
for option in plugin.cli_options:
if option.action:
p.add_option(
option.option,
default=option.default,
dest=option.dest,
action=option.action,
help=option.help
)
else:
p.add_option(
option.option,
default=option.default,
dest=option.dest,
help=option.help
)
# Entry point
parser = get_parser(
_(
'''\
%prog catalog /path/to/destination.(csv|epub|mobi|xml...) [options]
Export a catalog in format specified by path/to/destination extension.
Options control how entries are displayed in the generated catalog output.
Note that different catalog formats support different sets of options.
'''
)
)
# Add options common to all catalog plugins
parser.add_option(
'-i',
'--ids',
default=None,
dest='ids',
help=_(
"Comma-separated list of database IDs to catalog.\n"
"If declared, --search is ignored.\n"
"Default: all"
)
)
parser.add_option(
'-s',
'--search',
default=None,
dest='search_text',
help=_(
"Filter the results by the search query. "
"For the format of the search query, please see "
"the search-related documentation in the User Manual.\n"
"Default: no filtering"
)
)
parser.add_option(
'-v',
'--verbose',
default=False,
action='store_true',
dest='verbose',
help=_('Show detailed output information. Useful for debugging')
)
fmt = 'epub'
if args and '.' in args[0]:
fmt = args[0].rpartition('.')[-1].lower()
if fmt not in available_catalog_formats():
fmt = 'epub'
# Add options specific to fmt plugin
add_plugin_parser_options(fmt, parser)
return parser
# }}}
def main(opts, args, dbctx):
if len(args) < 1:
raise SystemExit(_('You must specify a catalog output file'))
if opts.ids:
opts.ids = list(integers_from_string(opts.ids))
fmt = args[0].rpartition('.')[-1]
if fmt not in available_catalog_formats():
raise SystemExit(
_('Cannot generate a catalog in the {} format').format(fmt.upper())
)
# No support for connected device in CLI environment
# Parallel initialization in calibre.gui2.tools:generate_catalog()
opts.connected_device = {
'is_device_connected': False,
'kind': None,
'name': None,
'save_template': None,
'serial': None,
'storage': None,
}
dest = os.path.abspath(os.path.expanduser(args[0]))
plugin = plugin_for_catalog_format(fmt)
with plugin:
plugin.run(dest, opts, dbctx.db)
return 0
| gpl-3.0 | 3,301,390,566,288,786,000 | 28.51145 | 85 | 0.579152 | false |
geotagx/geotagx-pybossa-archive | pybossa/auth/task.py | 1 | 1535 | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
from flask.ext.login import current_user
import pybossa.model as model
from pybossa.core import db
def create(task=None):
if not current_user.is_anonymous():
app = db.session.query(model.App).filter_by(id=task.app_id).one()
if app.owner_id == current_user.id or current_user.admin is True:
return True
else:
return False
else:
return False
def read(task=None):
return True
def update(task):
if not current_user.is_anonymous():
app = db.session.query(model.App).filter_by(id=task.app_id).one()
if app.owner_id == current_user.id or current_user.admin is True:
return True
else:
return False
else:
return False
def delete(task):
return update(task)
| agpl-3.0 | -5,745,328,043,428,878,000 | 29.098039 | 77 | 0.683388 | false |
MasterGowen/moonrain | moonrain/accounts/models.py | 1 | 2939 | from django.db import models
from django.contrib.auth.models import BaseUserManager, AbstractBaseUser
from ..projects.models import Project
class UserManager(BaseUserManager):
def create_user(self, email, username, password=None):
if not email:
raise ValueError('Необходимо ввести электронный адрес')
user = self.model(
email=UserManager.normalize_email(email),
username=username,
)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, username, password):
user = self.create_user(email,
password=password,
username=username)
user.is_admin = True
user.save(using=self._db)
return user
class User(AbstractBaseUser):
'''
Пользователь
'''
email = models.EmailField(
verbose_name='Электронная почта',
max_length=32,
unique=True,
db_index=True,
)
username = models.CharField(
verbose_name='Имя пользователя',
blank=False,
max_length=32,
unique=True,
)
avatar = models.ImageField(
verbose_name='Аватар',
upload_to='images/%Y/%m',
blank=True,
)
first_name = models.CharField(
verbose_name='Имя',
max_length=16,
blank=True,
)
last_name = models.CharField(
verbose_name='Фамилия',
max_length=32,
blank=True,
)
department = models.CharField(
verbose_name='Подразделение',
max_length=255,
blank=True,
)
is_admin = models.BooleanField(
verbose_name='Является администратором?',
default=False,
)
is_superuser = models.BooleanField(
verbose_name='Является суперпользователем?',
default=False,
)
projects = models.ManyToManyField(Project, verbose_name='Проекты',
blank=True,
help_text='Проекты, в которых участвует пользователь',)
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username']
objects = UserManager()
def get_full_name(self):
return '%s %s' % (self.last_name,
self.first_name,)
def get_short_name(self):
return self.username
def __str__(self):
return self.email
def has_perm(self, perm, obj=None):
return True
def has_module_perms(self, app_label):
return True
@property
def is_staff(self):
return self.is_admin
class Meta:
verbose_name = ('Пользователь')
verbose_name_plural = ('Пользователи') | gpl-2.0 | 7,740,447,189,795,986,000 | 23.070796 | 93 | 0.573005 | false |
agaveapi/SC17-container-tutorial | content/images/jupyter/examples/setvars.py | 1 | 2421 | # Here we define some utility commands to simplify interaction with the shell.
# You don't need to read or understand this, but it's here in case you want to.
import re
import os
def repvar(v):
"""
repvar() is short for "Replace Variables." The idea is that this
function looks for strings of the form $VAR or ${VAR} or even
$(CMD) in the input string and replaces them, either with
the contents of os.environ[VAR] or os.pipe(CMD), mimicking the
behavior of bash. If a backslace precedes the $, then the backslash
will be removed but the string will not be evaluated. Thus:
${HOME} becomes "/home/user"
$HOME becomes "/home/usr"
$(echo Hello) becomes "Hello"
\$HOME becomes $HOME
"""
epos = 0
buf = ''
for g in re.finditer(r'\$((\w+)|\{([^}]*)\}|\(([^())]*)\))|(\\+\$)',v):
if g:
i = 2
while g.group(i) == None:
i += 1
p = g.start(0)
buf += v[epos:p]
epos = p + len(g.group(0))
if i == 4:
fh = os.popen(g.group(i),"r")
c = repvar(fh.read())
fh.close()
elif i == 5:
c = '$'
else:
if not g.group(i) in os.environ:
raise Exception("no such environment variable: "+g.group(i))
c = repvar(os.environ[g.group(i)])
buf += c
else:
break
buf += v[epos:]
return buf.strip()
def setvar(e):
"""
setvar() emulates the ability of BASH to set environment variables.
Thus, NAME=VALUE will set os.environ["NAME"]="VALUE". Bash-style
comments will be stripped, and bash-line continuations will be processed.
"""
e = re.sub(r'#[^\r\n]*','',e)
e = re.sub(r'\\\n\s*','',e)
for m in re.finditer(r'(?m)(\w+)=(.*)',e):
k = m.group(1)
v = repvar(m.group(2))
print(k+"="+v)
os.environ[k]=v
def readfile(f):
"""
Reads in a file. repvar() will be applied to the file name.
"""
n = repvar(f)
print("Reading file `"+n+"'")
fh = open(n)
c = fh.read()
fh.close()
return c
def writefile(f,c):
"""
Writes out a file. repvar() will be applied both to the file name
and the file contents.
"""
n = repvar(f)
print("Writing file `"+n+"'")
fh = open(n,"w")
fh.write(repvar(c))
fh.close()
| bsd-3-clause | 6,554,591,777,941,709,000 | 31.28 | 80 | 0.523337 | false |
heromod/migrid | mig/shared/functionality/migadmin.py | 1 | 14406 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# --- BEGIN_HEADER ---
#
# migadmin - admin control panel with daemon status monitor
# Copyright (C) 2003-2015 The MiG Project lead by Brian Vinter
#
# This file is part of MiG.
#
# MiG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# MiG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# -- END_HEADER ---
#
"""MiG administrators page with daemon status and configuration"""
import os
import subprocess
import shared.returnvalues as returnvalues
from shared.certreq import build_certreqitem_object, list_cert_reqs, \
get_cert_req, delete_cert_req, accept_cert_req
from shared.defaults import default_pager_entries
from shared.fileio import send_message_to_grid_script
from shared.findtype import is_admin
from shared.functional import validate_input_and_cert
from shared.html import html_post_helper, themed_styles
from shared.init import initialize_main_variables, find_entry
grid_actions = {'reloadconfig': 'RELOADCONFIG',
'showqueued': 'JOBQUEUEINFO',
'showexecuting': 'EXECUTINGQUEUEINFO',
'showdone': 'DONEQUEUEINFO',
'dropqueued': 'DROPQUEUED',
'dropexecuting': 'DROPEXECUTING',
'dropdone': 'DROPDONE',
}
certreq_actions = ['addcertreq', 'delcertreq']
def signature():
"""Signature of the main function"""
defaults = {'action': [''], 'req_id': [], 'job_id': [], 'lines': [20]}
return ['html_form', defaults]
def main(client_id, user_arguments_dict):
"""Main function used by front end"""
(configuration, logger, output_objects, op_name) = \
initialize_main_variables(client_id, op_header=False)
defaults = signature()[1]
(validate_status, accepted) = validate_input_and_cert(
user_arguments_dict,
defaults,
output_objects,
client_id,
configuration,
allow_rejects=False,
)
if not validate_status:
return (accepted, returnvalues.CLIENT_ERROR)
action = accepted['action'][-1]
req_list = accepted['req_id']
job_list = accepted['job_id']
lines = int(accepted['lines'][-1])
meta = '''<meta http-equiv="refresh" content="%s" />
''' % configuration.sleep_secs
style = themed_styles(configuration)
script = '''
<script type="text/javascript" src="/images/js/jquery.js"></script>
<script type="text/javascript" src="/images/js/jquery.tablesorter.js"></script>
<script type="text/javascript" src="/images/js/jquery.tablesorter.pager.js">
</script>
<script type="text/javascript" src="/images/js/jquery.tablesorter.widgets.js"></script>
<script type="text/javascript" src="/images/js/jquery-ui.js"></script>
<script type="text/javascript" src="/images/js/jquery.confirm.js"></script>
<script type="text/javascript" >
$(document).ready(function() {
// init confirmation dialog
$( "#confirm_dialog" ).dialog(
// see http://jqueryui.com/docs/dialog/ for options
{ autoOpen: false,
modal: true, closeOnEscape: true,
width: 500,
buttons: {
"Cancel": function() { $( "#" + name ).dialog("close"); }
}
});
// table initially sorted by col. 9 (created)
var sortOrder = [[9,0]];
$("#certreqtable").tablesorter({widgets: ["zebra", "saveSort"],
sortList:sortOrder
})
.tablesorterPager({ container: $("#pager"),
size: %s
});
}
);
</script>
''' % default_pager_entries
title_entry = find_entry(output_objects, 'title')
title_entry['text'] = '%s administration panel' % configuration.short_title
title_entry['meta'] = meta
title_entry['style'] = style
title_entry['javascript'] = script
output_objects.append({'object_type': 'html_form',
'text':'''
<div id="confirm_dialog" title="Confirm" style="background:#fff;">
<div id="confirm_text"><!-- filled by js --></div>
<textarea cols="40" rows="4" id="confirm_input"
style="display:none;"></textarea>
</div>
''' })
if not is_admin(client_id, configuration, logger):
output_objects.append(
{'object_type': 'error_text', 'text'
: 'You must be an admin to access this control panel.'})
return (output_objects, returnvalues.CLIENT_ERROR)
html = ''
if action and not action in grid_actions.keys() + certreq_actions:
output_objects.append({'object_type': 'error_text', 'text'
: 'Invalid action: %s' % action})
return (output_objects, returnvalues.SYSTEM_ERROR)
if action in grid_actions:
msg = "%s" % grid_actions[action]
if job_list:
msg += ' %s' % ' '.join(job_list)
msg += '\n'
if not send_message_to_grid_script(msg, logger, configuration):
output_objects.append(
{'object_type': 'error_text', 'text'
: '''Error sending %s message to grid_script.''' % action
})
status = returnvalues.SYSTEM_ERROR
elif action in certreq_actions:
if action == "addcertreq":
for req_id in req_list:
if accept_cert_req(req_id, configuration):
output_objects.append(
{'object_type': 'text', 'text':
'Accepted certificate request %s' % req_id})
else:
output_objects.append(
{'object_type': 'error_text', 'text':
'Accept certificate request failed - details in log'
})
elif action == "delcertreq":
for req_id in req_list:
if delete_cert_req(req_id, configuration):
output_objects.append(
{'object_type': 'text', 'text':
'Deleted certificate request %s' % req_id})
else:
output_objects.append(
{'object_type': 'error_text', 'text':
'Delete certificate request failed - details in log'
})
show, drop = '', ''
general = """
<h1>Server Status</h1>
<p class='importanttext'>
This page automatically refreshes every %s seconds.
</p>
<p>
You can see the current grid daemon status and server logs below. The buttons
provide access to e.g. managing the grid job queues.
</p>
<form method='get' action='migadmin.py'>
<input type='hidden' name='action' value='' />
<input type='submit' value='Show last log lines' />
<input type='text' size='2' name='lines' value='%s' />
</form>
<br />
<form method='get' action='migadmin.py'>
<input type='hidden' name='lines' value='%s' />
<input type='hidden' name='action' value='reloadconfig' />
<input type='submit' value='Reload Configuration' />
</form>
<br />
""" % (configuration.sleep_secs, lines, lines)
show += """
<form method='get' action='migadmin.py'>
<input type='hidden' name='lines' value='%s' />
<input type='submit' value='Log Jobs' />
<select name='action'>
""" % lines
drop += """
<form method='get' action='migadmin.py'>
<input type='hidden' name='lines' value='%s' />
<input type='submit' value='Drop Job' />
<select name='action'>
""" % lines
for queue in ['queued', 'executing', 'done']:
selected = ''
if action.find(queue) != -1:
selected = 'selected'
show += "<option %s value='show%s'>%s</option>" % (selected, queue,
queue)
drop += "<option %s value='drop%s'>%s</option>" % (selected, queue,
queue)
show += """
</select>
</form>
<br />
"""
drop += """
</select>
<input type='text' size='20' name='job_id' value='' />
</form>
<br />
"""
html += general
html += show
html += drop
daemons = """
<div id='daemonstatus'>
"""
daemon_names = ['grid_script.py', 'grid_monitor.py', 'grid_sshmux.py']
# No need to run im_notify unless any im notify protocols are enabled
if [i for i in configuration.notify_protocols if i != 'email']:
daemon_names.append('grid_imnotify.py')
if configuration.site_enable_sftp:
daemon_names.append('grid_sftp.py')
if configuration.site_enable_davs:
daemon_names.append('grid_webdavs.py')
if configuration.site_enable_ftps:
daemon_names.append('grid_ftps.py')
if configuration.site_enable_openid:
daemon_names.append('grid_openid.py')
for proc in daemon_names:
pgrep_proc = subprocess.Popen(['pgrep', '-f', proc],
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
pgrep_proc.wait()
ps_out = pgrep_proc.stdout.read().strip()
if pgrep_proc.returncode == 0:
daemons += "<div class='status_online'>%s running (pid %s)</div>" \
% (proc, ps_out)
else:
daemons += "<div class='status_offline'>%s not running!</div>" % \
proc
daemons += """</div>
<br />
"""
html += daemons
output_objects.append({'object_type': 'header', 'text'
: 'Pending Certificate Requests'})
(status, ret) = list_cert_reqs(configuration)
if not status:
logger.error("%s: failed for '%s': %s" % (op_name,
client_id, ret))
output_objects.append({'object_type': 'error_text', 'text'
: ret})
return (output_objects, returnvalues.SYSTEM_ERROR)
certreqs = []
for req_id in ret:
(load_status, req_dict) = get_cert_req(req_id, configuration)
if not load_status:
logger.error("%s: load failed for '%s': %s" % \
(op_name, req_id, req_dict))
output_objects.append({'object_type': 'error_text', 'text'
: 'Could not read details for "%s"' % \
req_id})
return (output_objects, returnvalues.SYSTEM_ERROR)
req_item = build_certreqitem_object(configuration, req_dict)
js_name = 'create%s' % req_id
helper = html_post_helper(js_name, 'migadmin.py',
{'action': 'addcertreq', 'req_id': req_id})
output_objects.append({'object_type': 'html_form', 'text': helper})
req_item['addcertreqlink'] = {
'object_type': 'link', 'destination':
"javascript: confirmDialog(%s, '%s');" % \
(js_name, 'Really accept %s?' % req_id),
'class': 'addlink', 'title': 'Accept %s' % req_id, 'text': ''}
js_name = 'delete%s' % req_id
helper = html_post_helper(js_name, 'migadmin.py',
{'action': 'delcertreq', 'req_id': req_id})
output_objects.append({'object_type': 'html_form', 'text': helper})
req_item['delcertreqlink'] = {
'object_type': 'link', 'destination':
"javascript: confirmDialog(%s, '%s');" % \
(js_name, 'Really remove %s?' % req_id),
'class': 'removelink', 'title': 'Remove %s' % req_id, 'text': ''}
certreqs.append(req_item)
output_objects.append({'object_type': 'table_pager', 'entry_name':
'pending certificate requests',
'default_entries': default_pager_entries})
output_objects.append({'object_type': 'certreqs',
'certreqs': certreqs})
log_path_list = []
if os.path.isabs(configuration.logfile):
log_path_list.append(configuration.logfile)
else:
log_path_list.append(os.path.join(configuration.log_dir,
configuration.logfile))
for log_path in log_path_list:
html += '''
<h1>%s</h1>
<textarea rows=%s cols=200 readonly="readonly">
''' % (log_path, lines)
try:
logger.debug("loading %d lines from %s" % (lines, log_path))
log_fd = open(log_path, 'r')
log_fd.seek(0, os.SEEK_END)
size = log_fd.tell()
pos = log_fd.tell()
log_lines = []
step_size = 100
# locate last X lines
while pos > 0 and len(log_lines) < lines:
offset = min(lines * step_size, size)
logger.debug("seek to offset %d from end of %s" % (offset,
log_path))
log_fd.seek(-offset, os.SEEK_END)
pos = log_fd.tell()
log_lines = log_fd.readlines()
step_size *= 2
logger.debug("reading %d lines from %s" % (lines, log_path))
html += ''.join(log_lines[-lines:])
log_fd.close()
except Exception, exc:
logger.error("reading %d lines from %s: %s" % (lines, log_path,
exc))
output_objects.append({'object_type': 'error_text', 'text'
: 'Error reading log (%s)' % exc})
return (output_objects, returnvalues.SYSTEM_ERROR)
html += '''</textarea>
'''
output_objects.append({'object_type': 'html_form', 'text'
: html})
return (output_objects, returnvalues.OK)
| gpl-2.0 | 4,921,689,031,362,691,000 | 38.253406 | 87 | 0.543801 | false |
Tilo15/PhotoFiddle2 | PF2/Tools/HueEqualiser.py | 1 | 5526 | import cv2
import numpy
import Tool
class HueEqualiser(Tool.Tool):
def on_init(self):
self.id = "hueequaliser"
self.name = "Hue Equaliser"
self.icon_path = "ui/PF2_Icons/HueEqualiser.png"
self.properties = [
Tool.Property("header", "Hue Equaliser", "Header", None, has_toggle=False, has_button=False),
Tool.Property("bleed", "Hue Bleed", "Slider", 0.5, max=2.0, min=0.01),
Tool.Property("neighbour_bleed", "Neighbour Bleed", "Slider", 0.25, max=2.0, min=0.0),
# Red
Tool.Property("header_red", "Red", "Header", None, has_toggle=False, has_button=False),
Tool.Property("red_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("red_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Yellow
Tool.Property("header_yellow", "Yellow", "Header", None, has_toggle=False, has_button=False),
Tool.Property("yellow_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("yellow_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Green
Tool.Property("header_green", "Green", "Header", None, has_toggle=False, has_button=False),
Tool.Property("green_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("green_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Cyan
Tool.Property("header_cyan", "Cyan", "Header", None, has_toggle=False, has_button=False),
Tool.Property("cyan_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("cyan_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Blue
Tool.Property("header_blue", "Blue", "Header", None, has_toggle=False, has_button=False),
Tool.Property("blue_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("blue_saturation", "Saturation", "Slider", 0, max=50, min=-50),
# Violet
Tool.Property("header_violet", "Violet", "Header", None, has_toggle=False, has_button=False),
Tool.Property("violet_value", "Value", "Slider", 0, max=50, min=-50),
Tool.Property("violet_saturation", "Saturation", "Slider", 0, max=50, min=-50),
]
def on_update(self, image):
hues = {
"red": 0,
"yellow": 60,
"green": 120,
"cyan": 180,
"blue": 240,
"violet": 300,
"_red": 360,
}
out = image
if(not self.is_default()):
bleed = self.props["bleed"].get_value()
neighbour_bleed = self.props["neighbour_bleed"].get_value()
out = out.astype(numpy.float32)
# Convert to HSV colorspace
out = cv2.cvtColor(out, cv2.COLOR_BGR2HSV)
# Bits per pixel
bpp = float(str(image.dtype).replace("uint", "").replace("float", ""))
# Pixel value range
np = float(2 ** bpp - 1)
imhue = out[0:, 0:, 0]
imsat = out[0:, 0:, 1]
imval = out[0:, 0:, 2]
for hue in hues:
hsat = self.props["%s_saturation" % hue.replace('_', '')].get_value()
hval = self.props["%s_value" % hue.replace('_', '')].get_value()
isHue = self._is_hue(imhue, hues[hue], (3.5/bleed))
isHue = self._neighbour_bleed(isHue, neighbour_bleed)
imsat = imsat + ((hsat / 10000) * 255) * isHue
imval = imval + ((hval / 1000) * np) * isHue
# Clip any values out of bounds
imval[imval < 0.0] = 0.0
imval[imval > np] = np
imsat[imsat < 0.0] = 0.0
imsat[imsat > 1.0] = 1.0
out[0:, 0:, 1] = imsat
out[0:, 0:, 2] = imval
# Convert back to BGR colorspace
out = cv2.cvtColor(out, cv2.COLOR_HSV2BGR)
out = out.astype(image.dtype)
return out
def _is_hue(self, image, hue_value, bleed_value = 3.5):
mif = hue_value - 30
mir = hue_value + 30
if (mir > 360):
mir = 360
if (mif < 0):
mif = 0
bleed = float(360 / bleed_value)
icopy = image.copy()
print(bleed, mif, mir)
if(mif != 0):
icopy[icopy < mif - bleed] = 0.0
icopy[icopy > mir + bleed] = 0.0
icopy[(icopy < mif) * (icopy != 0.0)] = (((mif - (icopy[(icopy < mif) * (icopy != 0.0)]))/360.0) / (bleed/360.0)) * -1 + 1
icopy[(icopy > mir) * (icopy != 0.0)] = ((((icopy[(icopy > mir) * (icopy != 0.0)]) - mir)/360.0) / (bleed/360.0)) * -1 + 1
icopy[(icopy >= mif) * (icopy <= mir)] = 1.0
if(mif == 0):
icopy[icopy > mir + bleed] = 0.0
icopy[(icopy > mir) * (icopy != 0.0)] = ((((icopy[(icopy > mir) * (icopy != 0.0)]) - mir) / 360.0) / (bleed/360.0)) * -1 + 1
return icopy
def _neighbour_bleed(self, map, bleed):
strength = bleed*30
if (strength > 0):
height, width = map.shape[:2]
size = (height * width)
mul = numpy.math.sqrt(size) / 1064.416 # numpy.math.sqrt(1132982.0)
map = map*255
blur_size = abs(2 * round((round(strength * mul) + 1) / 2) - 1)
im = cv2.blur(map, (int(blur_size), int(blur_size)))
return im/255.0
return map | gpl-3.0 | 7,938,162,124,587,179,000 | 35.361842 | 136 | 0.500181 | false |
OCA/sale-workflow | sale_product_set/wizard/product_set_add.py | 1 | 3428 | # Copyright 2015 Anybox S.A.S
# Copyright 2016-2018 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from odoo import models, fields, api, exceptions, _
import odoo.addons.decimal_precision as dp
class ProductSetAdd(models.TransientModel):
_name = 'product.set.add'
_rec_name = 'product_set_id'
_description = "Wizard model to add product set into a quotation"
order_id = fields.Many2one(
'sale.order', 'Sale Order', required=True,
default=lambda self: self.env.context.get('active_id'),
ondelete='cascade'
)
partner_id = fields.Many2one(
related='order_id.partner_id',
ondelete='cascade'
)
product_set_id = fields.Many2one(
'product.set', 'Product set',
required=True,
ondelete='cascade'
)
quantity = fields.Float(
digits=dp.get_precision('Product Unit of Measure'), required=True,
default=1)
skip_existing_products = fields.Boolean(
default=False,
help='Enable this to not add new lines '
'for products already included in SO lines.'
)
def _check_partner(self):
if self.product_set_id.partner_id:
if self.product_set_id.partner_id != self.order_id.partner_id:
raise exceptions.ValidationError(_(
"Select a product set assigned to "
"the same partner of the order."
))
@api.multi
def add_set(self):
""" Add product set, multiplied by quantity in sale order line """
self._check_partner()
order_lines = self._prepare_order_lines()
if order_lines:
self.order_id.write({
"order_line": order_lines
})
return order_lines
def _prepare_order_lines(self):
max_sequence = self._get_max_sequence()
order_lines = []
for set_line in self._get_lines():
order_lines.append(
(0, 0,
self.prepare_sale_order_line_data(
set_line, max_sequence=max_sequence))
)
return order_lines
def _get_max_sequence(self):
max_sequence = 0
if self.order_id.order_line:
max_sequence = max([
line.sequence for line in self.order_id.order_line
])
return max_sequence
def _get_lines(self):
# hook here to take control on used lines
so_product_ids = self.order_id.order_line.mapped('product_id').ids
for set_line in self.product_set_id.set_line_ids:
if (self.skip_existing_products
and set_line.product_id.id in so_product_ids):
continue
yield set_line
@api.multi
def prepare_sale_order_line_data(self, set_line,
max_sequence=0):
self.ensure_one()
sale_line = self.env['sale.order.line'].new({
'order_id': self.order_id.id,
'product_id': set_line.product_id.id,
'product_uom_qty': set_line.quantity * self.quantity,
'product_uom': set_line.product_id.uom_id.id,
'sequence': max_sequence + set_line.sequence,
'discount': set_line.discount,
})
sale_line.product_id_change()
line_values = sale_line._convert_to_write(sale_line._cache)
return line_values
| agpl-3.0 | 4,705,311,278,881,153,000 | 34.340206 | 74 | 0.574096 | false |
wolfelee/luokr.com | www.luokr.com/app/ctrls/admin/posts.py | 1 | 10035 | #coding=utf-8
from admin import admin, AdminCtrl
class Admin_PostsCtrl(AdminCtrl):
@admin
def get(self):
pager = {}
pager['qnty'] = min(int(self.input('qnty', 10)), 50)
pager['page'] = max(int(self.input('page', 1)), 1)
pager['list'] = 0;
cur_posts = self.dbase('posts').cursor()
cur_users = self.dbase('users').cursor()
cur_posts.execute('select * from posts order by post_id desc limit ? offset ?', (pager['qnty'], (pager['page']-1)*pager['qnty'], ))
posts = cur_posts.fetchall()
psers = {}
if posts:
pager['list'] = len(posts)
cur_users.execute('select * from users where user_id in (' + ','.join(str(i['user_id']) for i in posts) + ')')
psers = self.utils().array_keyto(cur_users.fetchall(), 'user_id')
cur_posts.close()
cur_users.close()
self.render('admin/posts.html', pager = pager, posts = posts, psers = psers)
class Admin_PostHiddenCtrl(AdminCtrl):
@admin
def post(self):
try:
post_id = self.input('post_id')
con = self.dbase('posts')
cur = con.cursor()
cur.execute('update posts set post_stat = 0 where post_id = ?', (post_id, ))
con.commit()
cur.close()
self.flash(1)
except:
self.flash(0)
class Admin_PostCreateCtrl(AdminCtrl):
@admin
def get(self):
cur = self.dbase('terms').cursor()
cur.execute('select * from terms order by term_id desc, term_refc desc limit 9')
terms = cur.fetchall()
cur.close()
mode = self.input('mode', None)
self.render('admin/post-create.html', mode = mode, terms = terms)
@admin
def post(self):
try:
user = self.current_user
post_type = self.input('post_type', 'blog')
post_title = self.input('post_title')
post_descp = self.input('post_descp')
post_author = self.input('post_author')
post_source = self.input('post_source')
post_summary = self.input('post_summary')
post_content = self.input('post_content')
post_rank = self.input('post_rank')
post_stat = self.input('post_stat', 0)
post_ptms = int(self.timer().mktime(self.timer().strptime(self.input('post_ptms'), '%Y-%m-%d %H:%M:%S')))
post_ctms = self.stime()
post_utms = post_ctms
term_list = []
for term_name in self.input('term_list').split(' '):
if term_name == '':
continue
term_list.append(term_name)
if len(term_list) > 10:
self.flash(0, {'msg': '标签数量限制不能超过 10 个'})
return
con_posts = self.dbase('posts')
cur_posts = con_posts.cursor()
con_terms = self.dbase('terms')
cur_terms = con_terms.cursor()
term_imap = {}
term_ctms = self.stime()
for term_name in term_list:
cur_terms.execute('select term_id from terms where term_name = ?', (term_name ,))
term_id = cur_terms.fetchone()
if term_id:
term_id = term_id['term_id']
else:
cur_terms.execute('insert or ignore into terms (term_name, term_ctms) values (?, ?)', (term_name , term_ctms, ))
if cur_terms.lastrowid:
term_id = cur_terms.lastrowid
if term_id:
term_imap[term_id] = term_name
cur_posts.execute('insert into posts (user_id, post_type, post_title, post_descp, post_author, post_source, post_summary, post_content,post_stat, post_rank, post_ptms, post_ctms, post_utms) values (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', \
(user['user_id'], post_type, post_title, post_descp, post_author, post_source, post_summary, post_content, post_stat, post_rank, post_ptms, post_ctms, post_utms ,))
post_id = cur_posts.lastrowid
if term_imap:
for term_id in term_imap:
cur_posts.execute('insert or ignore into post_terms (post_id, term_id) values (' + str(post_id) + ',' + str(term_id) + ')')
if term_imap:
cur_terms.execute('update terms set term_refc = term_refc + 1 where term_id in (' + ','.join([str(i) for i in term_imap.keys()]) + ')')
con_posts.commit()
cur_posts.close()
con_terms.commit()
con_terms.close()
self.model('alogs').add(self.dbase('alogs'), '新增文章:' + str(post_id), user_ip = self.request.remote_ip, user_id = user['user_id'], user_name = user['user_name'])
self.flash(1, {'url': '/admin/post?post_id=' + str(post_id)})
except:
self.flash(0)
class Admin_PostCtrl(AdminCtrl):
@admin
def get(self):
post_id = self.input('post_id')
con_posts = self.dbase('posts')
cur_posts = con_posts.cursor()
cur_posts.execute('select * from posts where post_id = ?', (post_id, ))
post = cur_posts.fetchone()
if not post:
cur_posts.close()
return self.send_error(404)
mode = self.input('mode', None)
con_terms = self.dbase('terms')
cur_terms = con_terms.cursor()
cur_terms.execute('select * from terms order by term_id desc, term_refc desc limit 9')
terms = cur_terms.fetchall()
ptids = {}
ptags = {}
cur_posts.execute('select post_id,term_id from post_terms where post_id = ?', (post_id, ))
ptids = cur_posts.fetchall()
if ptids:
cur_terms.execute('select * from terms where term_id in (' + ','.join(str(i['term_id']) for i in ptids) + ')')
ptags = cur_terms.fetchall()
if ptags:
ptids = self.utils().array_group(ptids, 'post_id')
ptags = self.utils().array_keyto(ptags, 'term_id')
cur_posts.close()
cur_terms.close()
self.render('admin/post.html', mode = mode, post = post, terms = terms, ptids = ptids, ptags = ptags)
@admin
def post(self):
try:
user = self.current_user
post_id = self.input('post_id')
post_title = self.input('post_title')
post_descp = self.input('post_descp')
post_author = self.input('post_author')
post_source = self.input('post_source')
post_summary = self.input('post_summary')
post_content = self.input('post_content')
post_rank = self.input('post_rank')
post_stat = self.input('post_stat', 0)
post_ptms = int(self.timer().mktime(self.timer().strptime(self.input('post_ptms'), '%Y-%m-%d %H:%M:%S')))
post_utms = self.stime()
term_list = []
for term_name in self.input('term_list').split(' '):
if term_name == '':
continue
term_list.append(term_name)
if len(term_list) > 10:
self.flash(0, {'msg': '标签数量限制不能超过 10 个'})
return
con_posts = self.dbase('posts')
cur_posts = con_posts.cursor()
con_terms = self.dbase('terms')
cur_terms = con_terms.cursor()
cur_posts.execute('select * from posts where post_id = ?', (post_id, ))
post = cur_posts.fetchone()
if not post:
cur_posts.close()
cur_terms.close()
self.flash(0, '没有指定文章ID')
return
term_imap = {}
term_ctms = self.stime()
for term_name in term_list:
cur_terms.execute('select term_id from terms where term_name = ?', (term_name ,))
term_id = cur_terms.fetchone()
if term_id:
term_id = term_id['term_id']
else:
cur_terms.execute('insert or ignore into terms (term_name, term_ctms) values (?, ?)', (term_name , term_ctms, ))
if cur_terms.lastrowid:
term_id = cur_terms.lastrowid
if term_id:
term_imap[term_id] = term_name
cur_posts.execute('select term_id from post_terms where post_id = ?', (post_id, ))
post_tids = cur_posts.fetchall()
cur_posts.execute('update posts set user_id=?,post_title=?,post_descp=?,post_author=?,post_source=?,post_summary=?,post_content=?,post_stat=?,post_rank=?,post_ptms=?,post_utms=? where post_id=?', \
(user['user_id'], post_title, post_descp, post_author, post_source, post_summary, post_content, post_stat, post_rank, post_ptms, post_utms, post_id,))
cur_posts.execute('delete from post_terms where post_id = ?', (post_id,))
if term_imap:
for term_id in term_imap:
cur_posts.execute('insert or ignore into post_terms (post_id, term_id) values (' + str(post_id) + ',' + str(term_id) + ')')
if post_tids:
cur_terms.execute('update terms set term_refc = term_refc - 1 where term_id in (' + ','.join([str(i['term_id']) for i in post_tids]) + ')')
if term_imap:
cur_terms.execute('update terms set term_refc = term_refc + 1 where term_id in (' + ','.join([str(i) for i in term_imap.keys()]) + ')')
con_posts.commit()
cur_posts.close()
con_terms.commit()
cur_terms.close()
self.model('alogs').add(self.dbase('alogs'), '更新文章:' + str(post_id), user_ip = self.request.remote_ip, user_id = user['user_id'], user_name = user['user_name'])
self.flash(1)
except:
self.flash(0)
| bsd-3-clause | -2,849,066,017,734,671,000 | 38.519841 | 252 | 0.520835 | false |
Telestream/telestream-cloud-python-sdk | telestream_cloud_notifications_sdk/test/test_params.py | 1 | 1740 | # coding: utf-8
"""
Notifications API
Notifications # noqa: E501
The version of the OpenAPI document: 2.1.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import telestream_cloud_notifications
from telestream_cloud_notifications.models.params import Params # noqa: E501
from telestream_cloud_notifications.rest import ApiException
class TestParams(unittest.TestCase):
"""Params unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test Params
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = telestream_cloud_notifications.models.params.Params() # noqa: E501
if include_optional :
return Params(
addresses = [
'0'
],
url = '0',
method = 'GET',
retries = 56,
content_type = 'application/json',
topic_arn = '0',
role_arn = '0',
topic_endpoint = '0',
access_key = '0',
project_id = '0',
topic_name = '0'
)
else :
return Params(
)
def testParams(self):
"""Test Params"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| mit | 7,053,314,102,232,006,000 | 25.769231 | 85 | 0.556322 | false |
ppqm/fitting | fitter/fit.py | 1 | 9239 |
import sklearn
import sklearn.model_selection
import time
import itertools
import functools
import multiprocessing as mp
import os
import subprocess
import time
import copy
import json
import numpy as np
import pandas as pd
from numpy.linalg import norm
from scipy.optimize import minimize
import rmsd
import joblib
import mndo
cachedir = '.pycache'
memory = joblib.Memory(cachedir, verbose=0)
def get_penalty(calc_properties, refs_properties, property_weights, keys=None):
penalty = 0.0
n = 0
return penalty
@memory.cache
def load_data():
reference = "../dataset-qm9/reference.csv"
reference = pd.read_csv(reference)
filenames = reference["name"]
# energies = reference["binding energy"]
atoms_list = []
coord_list = []
charges = []
titles = []
for filename in filenames:
titles.append(filename)
charges.append(0)
filename = "../dataset-qm9/xyz/" + filename + ".xyz"
atoms, coord = rmsd.get_coordinates_xyz(filename)
atoms_list.append(atoms)
coord_list.append(coord)
offset = 10+100
to_offset = 110+100
atoms_list = atoms_list[offset:to_offset]
coord_list = coord_list[offset:to_offset]
charges = charges[offset:to_offset]
titles = titles[offset:to_offset]
reference = reference[offset:to_offset]
return atoms_list, coord_list, charges, titles, reference
def minimize_parameters(mols_atoms, mols_coords, reference_properties, start_parameters,
n_procs=1,
method="PM3",
ignore_keys=['DD2','DD3','PO1','PO2','PO3','PO9','HYF','CORE','EISOL','FN1','FN2','FN3','GSCAL','BETAS','ZS']):
"""
"""
n_mols = len(mols_atoms)
# Select header
header = """{:} 1SCF MULLIK PRECISE charge={{:}} iparok=1 jprint=5
nextmol=-1
TITLE {{:}}"""
header = header.format(method)
filename = "_tmp_optimizer"
inputtxt = mndo.get_inputs(mols_atoms, mols_coords, np.zeros(n_mols), range(n_mols), header=header)
with open(filename, 'w') as f:
f.write(inputtxt)
# Select atom parameters to optimize
atoms = [np.unique(atom) for atom in mols_atoms]
atoms = list(itertools.chain(*atoms))
atoms = np.unique(atoms)
parameters_values = []
parameters_keys = []
parameters = {}
# Select parameters
for atom in atoms:
atom_params = start_parameters[atom]
current = {}
for key in atom_params:
if key in ignore_keys: continue
value = atom_params[key]
current[key] = value
parameters_values.append(value)
parameters_keys.append([atom, key])
parameters[atom] = current
# Define penalty func
def penalty(params, debug=True):
for param, key in zip(params, parameters_keys):
parameters[key[0]][key[1]] = param
mndo.set_params(parameters)
properties_list = mndo.calculate(filename)
calc_energies = np.array([properties["energy"] for properties in properties_list])
diff = reference_properties - calc_energies
idxs = np.argwhere(np.isnan(diff))
diff[idxs] = 700.0
error = np.abs(diff)
error = error.mean()
if debug:
print("penalty: {:10.2f}".format(error))
return error
def penalty_properties(properties_list):
calc_energies = np.array([properties["energy"] for properties in properties_list])
diff = reference_properties - calc_energies
idxs = np.argwhere(np.isnan(diff))
diff[idxs] = 700.0
error = np.abs(diff)
error = error.mean()
return error
def jacobian(params, dh=10**-5, debug=False):
# TODO Parallelt
grad = []
for i, p in enumerate(params):
dparams = copy.deepcopy(params)
dparams[i] += dh
forward = penalty(dparams, debug=False)
dparams[i] -= (2.0 * dh)
backward = penalty(dparams, debug=False)
de = forward - backward
grad.append(de/(2.0 * dh))
grad = np.array(grad)
if debug:
nm = np.linalg.norm(grad)
print("penalty grad: {:10.2f}".format(nm))
return grad
def jacobian_parallel(params, dh=10**-5, procs=1):
"""
"""
for param, key in zip(params, parameters_keys):
parameters[key[0]][key[1]] = param
params_grad = mndo.numerical_jacobian(inputtxt, parameters, n_procs=procs, dh=dh)
grad = []
for atom, key in parameters_keys:
forward_mols, backward_mols = params_grad[atom][key]
penalty_forward = penalty_properties(forward_mols)
penalty_backward = penalty_properties(backward_mols)
de = penalty_forward - penalty_backward
grad.append(de/(2.0 * dh))
grad = np.array(grad)
return grad
start_error = penalty(parameters_values)
# check grad
dh = 10**-5
t = time.time()
grad = jacobian(parameters_values, dh=dh)
nm = np.linalg.norm(grad)
secs = time.time() - t
print("penalty grad: {:10.2f} time: {:10.2f}".format(nm, secs))
t = time.time()
grad = jacobian_parallel(parameters_values, procs=2, dh=dh)
nm = np.linalg.norm(grad)
secs = time.time() - t
print("penalty grad: {:10.2f} time: {:10.2f}".format(nm, secs))
quit()
res = minimize(penalty, parameters_values,
method="L-BFGS-B",
jac=jacobian,
options={"maxiter": 1000, "disp": True})
parameters_values = res.x
error = penalty(parameters_values)
for param, key in zip(parameters_values, parameters_keys):
parameters[key[0]][key[1]] = param
end_parameters = parameters
return end_parameters, error
def learning_curve(
mols_atoms,
mols_coords,
reference_properties,
start_parameters):
fold_five = sklearn.model_selection.KFold(n_splits=5, random_state=42, shuffle=True)
n_items = len(mols_atoms)
X = list(range(n_items))
score = []
for train_idxs, test_idxs in fold_five.split(X):
train_atoms = [mols_atoms[i] for i in train_idxs]
train_coords = [mols_coords[i] for i in train_idxs]
train_properties = reference_properties[train_idxs]
test_atoms = [mols_atoms[i] for i in test_idxs]
test_coords = [mols_coords[i] for i in test_idxs]
test_properties = reference_properties[test_idxs]
train_parameters, train_error = minimize_parameters(train_atoms, train_coords, train_properties, start_parameters)
print(train_parameters)
quit()
return
def main():
import argparse
import sys
description = """"""
parser = argparse.ArgumentParser(
usage='%(prog)s [options]',
description=description,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument('-f', '--format', action='store', help='', metavar='fmt')
parser.add_argument('-s', '--settings', action='store', help='', metavar='json')
parser.add_argument('-p', '--parameters', action='store', help='', metavar='json')
parser.add_argument('-o', '--results_parameters', action='store', help='', metavar='json')
parser.add_argument('--methods', action='store', help='', metavar='str')
args = parser.parse_args()
mols_atoms, mols_coords, mols_charges, titles, reference = load_data()
ref_energies = reference.iloc[:,1].tolist()
ref_energies = np.array(ref_energies)
with open(args.parameters, 'r') as f:
start_params = f.read()
start_params = json.loads(start_params)
# end_params = minimize_parameters(mols_atoms, mols_coords, ref_energies, start_params)
end_params = learning_curve(mols_atoms, mols_coords, ref_energies, start_params)
print(end_params)
quit()
# TODO select reference
# TODO prepare input file
filename = "_tmp_optimizer"
txt = mndo.get_inputs(atoms_list, coord_list, charges, titles)
f = open(filename, 'w')
f.write(txt)
f.close()
# TODO prepare parameters
parameters = np.array([
-99.,
-77.,
2.,
-32.,
3.,
])
parameter_keys = [
["O", "USS"],
["O", "UPP"],
["O", "ZP"],
["O", "BETAP"],
["O", "ALP"],
]
parameter_dict = {}
parameter_dict["O"] = {}
# TODO calculate penalty
# properties_list = mndo.calculate(filename)
def penalty(params):
for param, key in zip(params, parameter_keys):
parameter_dict[key[0]][key[1]] = param
mndo.set_params(parameter_dict)
properties_list = mndo.calculate(filename)
calc_energies = np.array([properties["energy"] for properties in properties_list])
diff = ref_energies - calc_energies
idxs = np.argwhere(np.isnan(diff))
diff[idxs] = 700.0
error = diff.mean()
return error
print(penalty(parameters))
status = minimize(penalty, parameters,
method="L-BFGS-B",
options={"maxiter": 1000, "disp": True})
print()
print(status)
# TODO optimize
return
if __name__ == "__main__":
main()
| cc0-1.0 | -4,809,500,087,798,806,000 | 22.997403 | 122 | 0.603312 | false |
advancedplotting/aplot | python/plotserv/api_annotations.py | 1 | 8009 | # Copyright (c) 2014-2015, Heliosphere Research LLC
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Handles VIs in "api_annotations".
"""
import numpy as np
from matplotlib import pyplot as plt
from .core import resource
from .terminals import remove_none
from . import filters
from . import errors
@resource('text')
def text(ctx, a):
""" Display text on the plot """
plotid = a.plotid()
x = a.float('x')
y = a.float('y')
s = a.string('s')
relative = a.bool('coordinates')
textprops = a.text()
display = a.display()
ctx.set(plotid)
ax = plt.gca()
# None-finite values here mean we skip the plot
if x is None or y is None:
return
k = textprops._k()
k.update(display._k())
k['clip_on'] = True
if relative:
k['transform'] = ax.transAxes
remove_none(k)
plt.text(x, y, s, **k)
@resource('hline')
def hline(ctx, a):
""" Plot a horizontal line """
plotid = a.plotid()
y = a.float('y')
xmin = a.float('xmin')
xmax = a.float('xmax')
line = a.line()
display = a.display()
ctx.set(plotid)
ctx.fail_if_polar()
# Non-finite value provided
if y is None:
return
k = { 'xmin': xmin,
'xmax': xmax,
'linewidth': line.width,
'linestyle': line.style,
'color': line.color if line.color is not None else 'k', }
k.update(display._k())
remove_none(k)
plt.axhline(y, **k)
@resource('vline')
def vline(ctx, a):
""" Plot a vertical line """
plotid = a.plotid()
x = a.float('x')
ymin = a.float('ymin')
ymax = a.float('ymax')
line = a.line()
display = a.display()
ctx.set(plotid)
ctx.fail_if_polar()
# Non-finite value provided
if x is None:
return
k = { 'ymin': ymin,
'ymax': ymax,
'linewidth': line.width,
'linestyle': line.style,
'color': line.color if line.color is not None else 'k', }
k.update(display._k())
remove_none(k)
plt.axvline(x, **k)
@resource('colorbar')
def colorbar(ctx, a):
""" Display a colorbar """
plotid = a.plotid()
label = a.string('label')
ticks = a.dbl_1d('ticks')
ticklabels = a.string_1d('ticklabels')
ctx.set(plotid)
# If no colormapped object has been plotted, MPL complains.
# We permit this, and simply don't add the colorbar.
if ctx.mappable is None:
return
c = plt.colorbar(ctx.mappable)
# Don't bother setting an empty label
if len(label) > 0:
c.set_label(label)
# Both specified
if len(ticks) > 0 and len(ticklabels) > 0:
ticks, ticklabels = filters.filter_1d(ticks, ticklabels)
c.set_ticks(ticks)
c.set_ticklabels(ticklabels)
# Just ticks specified
elif len(ticks) > 0:
ticks = ticks[np.isfinite(ticks)]
c.set_ticks(ticks)
# Just ticklabels specified
else:
# Providing zero-length "ticks" array invokes auto-ticking, in which
# case any ticklabels are ignored.
pass
@resource('legend')
def legend(ctx, a):
""" Represents Legend.vi.
Note that there is no Positions enum on the Python side; the MPL
values are hard-coded into the LabView control.
"""
POSITIONS = { 0: 0,
1: 1,
2: 9,
3: 2,
4: 6,
5: 3,
6: 8,
7: 4,
8: 7,
9: 10 }
plotid = a.plotid()
position = a.enum('position', POSITIONS)
ctx.set(plotid)
k = {'loc': position, 'fontsize': 'medium'}
remove_none(k)
if len(ctx.legend_entries) > 0:
objects, labels = zip(*ctx.legend_entries)
plt.legend(objects, labels, **k)
@resource('label')
def label(ctx, a):
""" Title, X axis and Y axis labels. """
LOCATIONS = {0: 'title', 1: 'xlabel', 2: 'ylabel'}
plotid = a.plotid()
location = a.enum('kind', LOCATIONS)
label = a.string('label')
text = a.text()
ctx.set(plotid)
k = text._k()
if location == 'title':
plt.title(label, **k)
elif location == 'xlabel':
plt.xlabel(label, **k)
elif location == 'ylabel':
ctx.fail_if_polar()
plt.ylabel(label, **k)
else:
pass
@resource('circle')
def circle(ctx, a):
""" Draw a circle on a rectangular plot """
plotid = a.plotid()
x = a.float('x')
y = a.float('y')
radius = a.float('radius')
color = a.color('color')
line = a.line()
display = a.display()
f = ctx.set(plotid)
ctx.fail_if_polar()
ctx.fail_if_log_symlog()
# Like Text.vi, if any critical input is Nan we do nothing
if x is None or y is None or radius is None:
return
# Catch this before MPL complains
if radius <= 0:
return
k = { 'edgecolor': line.color,
'linestyle': line.style,
'linewidth': line.width,
'facecolor': color if color is not None else '#bbbbbb', }
k.update(display._k())
remove_none(k)
c = plt.Circle((x,y), radius, **k)
f.gca().add_artist(c)
@resource('rectangle')
def rectangle(ctx, a):
""" Draw a rectangle """
plotid = a.plotid()
x = a.float('x')
y = a.float('y')
width = a.float('width')
height = a.float('height')
color = a.color('color')
line = a.line()
display = a.display()
f = ctx.set(plotid)
ctx.fail_if_symlog()
# Like Text.vi, if any critical input is Nan we do nothing
if x is None or y is None or width is None or height is None:
return
if width == 0 or height == 0:
return
k = { 'edgecolor': line.color,
'linestyle': line.style,
'linewidth': line.width,
'facecolor': color if color is not None else '#bbbbbb', }
k.update(display._k())
remove_none(k)
r = plt.Rectangle((x,y), width, height, **k)
f.gca().add_artist(r) | bsd-3-clause | 3,550,723,003,300,049,000 | 25.611296 | 77 | 0.558122 | false |
henriquegemignani/randovania | randovania/gui/main_window.py | 1 | 25113 | import functools
import json
import logging
import os
import platform
import subprocess
from functools import partial
from pathlib import Path
from typing import Optional, List
from PySide2 import QtCore, QtWidgets, QtGui
from PySide2.QtCore import QUrl, Signal, Qt
from qasync import asyncSlot
from randovania import VERSION
from randovania.game_description.resources.trick_resource_info import TrickResourceInfo
from randovania.games.game import RandovaniaGame
from randovania.gui.generated.main_window_ui import Ui_MainWindow
from randovania.gui.lib import common_qt_lib, async_dialog, theme
from randovania.gui.lib.trick_lib import used_tricks, difficulties_for_trick
from randovania.gui.lib.window_manager import WindowManager
from randovania.interface_common import update_checker
from randovania.interface_common.enum_lib import iterate_enum
from randovania.interface_common.options import Options
from randovania.interface_common.preset_manager import PresetManager
from randovania.layout.layout_description import LayoutDescription
from randovania.layout.trick_level import LayoutTrickLevel
from randovania.resolver import debug
_DISABLE_VALIDATION_WARNING = """
<html><head/><body>
<p>While it sometimes throws errors, the validation is what guarantees that your seed is completable.<br/>
Do <span style=" font-weight:600;">not</span> disable if you're uncomfortable with possibly unbeatable seeds.
</p><p align="center">Are you sure you want to disable validation?</p></body></html>
"""
def _update_label_on_show(label: QtWidgets.QLabel, text: str):
def showEvent(_):
if label._delayed_text is not None:
label.setText(label._delayed_text)
label._delayed_text = None
label._delayed_text = text
label.showEvent = showEvent
class MainWindow(WindowManager, Ui_MainWindow):
newer_version_signal = Signal(str, str)
options_changed_signal = Signal()
_is_preview_mode: bool = False
menu_new_version: Optional[QtWidgets.QAction] = None
_current_version_url: Optional[str] = None
_options: Options
_data_visualizer: Optional[QtWidgets.QWidget] = None
_map_tracker: QtWidgets.QWidget
_preset_manager: PresetManager
GameDetailsSignal = Signal(LayoutDescription)
InitPostShowSignal = Signal()
@property
def _tab_widget(self):
return self.main_tab_widget
@property
def preset_manager(self) -> PresetManager:
return self._preset_manager
@property
def main_window(self) -> QtWidgets.QMainWindow:
return self
@property
def is_preview_mode(self) -> bool:
return self._is_preview_mode
def __init__(self, options: Options, preset_manager: PresetManager,
network_client, preview: bool):
super().__init__()
self.setupUi(self)
self.setWindowTitle("Randovania {}".format(VERSION))
self._is_preview_mode = preview
self.setAcceptDrops(True)
common_qt_lib.set_default_window_icon(self)
# Remove all hardcoded link color
about_document: QtGui.QTextDocument = self.about_text_browser.document()
about_document.setHtml(about_document.toHtml().replace("color:#0000ff;", ""))
self.browse_racetime_label.setText(self.browse_racetime_label.text().replace("color:#0000ff;", ""))
self.intro_label.setText(self.intro_label.text().format(version=VERSION))
self._preset_manager = preset_manager
self.network_client = network_client
if preview:
debug.set_level(2)
# Signals
self.newer_version_signal.connect(self.display_new_version)
self.options_changed_signal.connect(self.on_options_changed)
self.GameDetailsSignal.connect(self._open_game_details)
self.InitPostShowSignal.connect(self.initialize_post_show)
self.intro_play_now_button.clicked.connect(lambda: self.welcome_tab_widget.setCurrentWidget(self.tab_play))
self.open_faq_button.clicked.connect(self._open_faq)
self.open_database_viewer_button.clicked.connect(partial(self._open_data_visualizer_for_game,
RandovaniaGame.PRIME2))
for game in RandovaniaGame:
self.hint_item_names_game_combo.addItem(game.long_name, game)
self.hint_location_game_combo.addItem(game.long_name, game)
self.hint_item_names_game_combo.currentIndexChanged.connect(self._update_hints_text)
self.hint_location_game_combo.currentIndexChanged.connect(self._update_hint_locations)
self.import_permalink_button.clicked.connect(self._import_permalink)
self.import_game_file_button.clicked.connect(self._import_spoiler_log)
self.browse_racetime_button.clicked.connect(self._browse_racetime)
self.create_new_seed_button.clicked.connect(
lambda: self.welcome_tab_widget.setCurrentWidget(self.tab_create_seed))
# Menu Bar
for action, game in ((self.menu_action_prime_1_data_visualizer, RandovaniaGame.PRIME1),
(self.menu_action_prime_2_data_visualizer, RandovaniaGame.PRIME2),
(self.menu_action_prime_3_data_visualizer, RandovaniaGame.PRIME3)):
action.triggered.connect(partial(self._open_data_visualizer_for_game, game))
for action, game in ((self.menu_action_edit_prime_1, RandovaniaGame.PRIME1),
(self.menu_action_edit_prime_2, RandovaniaGame.PRIME2),
(self.menu_action_edit_prime_3, RandovaniaGame.PRIME3)):
action.triggered.connect(partial(self._open_data_editor_for_game, game))
self.menu_action_item_tracker.triggered.connect(self._open_item_tracker)
self.menu_action_map_tracker.triggered.connect(self._on_menu_action_map_tracker)
self.menu_action_edit_existing_database.triggered.connect(self._open_data_editor_prompt)
self.menu_action_validate_seed_after.triggered.connect(self._on_validate_seed_change)
self.menu_action_timeout_generation_after_a_time_limit.triggered.connect(self._on_generate_time_limit_change)
self.menu_action_dark_mode.triggered.connect(self._on_menu_action_dark_mode)
self.menu_action_open_auto_tracker.triggered.connect(self._open_auto_tracker)
self.menu_action_previously_generated_games.triggered.connect(self._on_menu_action_previously_generated_games)
self.menu_action_layout_editor.triggered.connect(self._on_menu_action_layout_editor)
self.menu_prime_1_trick_details.aboutToShow.connect(self._create_trick_details_prime_1)
self.menu_prime_2_trick_details.aboutToShow.connect(self._create_trick_details_prime_2)
self.menu_prime_3_trick_details.aboutToShow.connect(self._create_trick_details_prime_3)
# Setting this event only now, so all options changed trigger only once
options.on_options_changed = self.options_changed_signal.emit
self._options = options
self.main_tab_widget.setCurrentIndex(0)
def closeEvent(self, event):
self.generate_seed_tab.stop_background_process()
super().closeEvent(event)
def dragEnterEvent(self, event: QtGui.QDragEnterEvent):
from randovania.layout.preset_migration import VersionedPreset
valid_extensions = [
LayoutDescription.file_extension(),
VersionedPreset.file_extension(),
]
valid_extensions_with_dot = {
f".{extension}"
for extension in valid_extensions
}
for url in event.mimeData().urls():
ext = os.path.splitext(url.toLocalFile())[1]
if ext in valid_extensions_with_dot:
event.acceptProposedAction()
return
def dropEvent(self, event: QtGui.QDropEvent):
from randovania.layout.preset_migration import VersionedPreset
for url in event.mimeData().urls():
path = Path(url.toLocalFile())
if path.suffix == f".{LayoutDescription.file_extension()}":
self.open_game_details(LayoutDescription.from_file(path))
return
elif path.suffix == f".{VersionedPreset.file_extension()}":
self.main_tab_widget.setCurrentWidget(self.welcome_tab)
self.welcome_tab_widget.setCurrentWidget(self.tab_create_seed)
self.generate_seed_tab.import_preset_file(path)
return
def showEvent(self, event: QtGui.QShowEvent):
self.InitPostShowSignal.emit()
# Delayed Initialization
@asyncSlot()
async def initialize_post_show(self):
self.InitPostShowSignal.disconnect(self.initialize_post_show)
logging.info("Will initialize things in post show")
await self._initialize_post_show_body()
logging.info("Finished initializing post show")
async def _initialize_post_show_body(self):
logging.info("Will load OnlineInteractions")
from randovania.gui.main_online_interaction import OnlineInteractions
logging.info("Creating OnlineInteractions...")
self.online_interactions = OnlineInteractions(self, self.preset_manager, self.network_client, self,
self._options)
logging.info("Will load GenerateSeedTab")
from randovania.gui.generate_seed_tab import GenerateSeedTab
logging.info("Creating GenerateSeedTab...")
self.generate_seed_tab = GenerateSeedTab(self, self, self._options)
logging.info("Running GenerateSeedTab.setup_ui")
self.generate_seed_tab.setup_ui()
# Update hints text
logging.info("Will _update_hints_text")
self._update_hints_text()
logging.info("Will hide hint locations combo")
self.hint_location_game_combo.setVisible(False)
self.hint_location_game_combo.setCurrentIndex(1)
logging.info("Will update for modified options")
with self._options:
self.on_options_changed()
def _update_hints_text(self):
from randovania.gui.lib import hints_text
hints_text.update_hints_text(self.hint_item_names_game_combo.currentData(), self.hint_item_names_tree_widget)
def _update_hint_locations(self):
from randovania.gui.lib import hints_text
hints_text.update_hint_locations(self.hint_location_game_combo.currentData(), self.hint_tree_widget)
# Generate Seed
def _open_faq(self):
self.main_tab_widget.setCurrentWidget(self.help_tab)
self.help_tab_widget.setCurrentWidget(self.tab_faq)
async def generate_seed_from_permalink(self, permalink):
from randovania.interface_common.status_update_lib import ProgressUpdateCallable
from randovania.gui.dialog.background_process_dialog import BackgroundProcessDialog
def work(progress_update: ProgressUpdateCallable):
from randovania.interface_common import simplified_patcher
layout = simplified_patcher.generate_layout(progress_update=progress_update,
permalink=permalink,
options=self._options)
progress_update(f"Success! (Seed hash: {layout.shareable_hash})", 1)
return layout
new_layout = await BackgroundProcessDialog.open_for_background_task(work, "Creating a game...")
self.open_game_details(new_layout)
@asyncSlot()
async def _import_permalink(self):
from randovania.gui.dialog.permalink_dialog import PermalinkDialog
dialog = PermalinkDialog()
result = await async_dialog.execute_dialog(dialog)
if result == QtWidgets.QDialog.Accepted:
permalink = dialog.get_permalink_from_field()
await self.generate_seed_from_permalink(permalink)
def _import_spoiler_log(self):
json_path = common_qt_lib.prompt_user_for_input_game_log(self)
if json_path is not None:
layout = LayoutDescription.from_file(json_path)
self.open_game_details(layout)
@asyncSlot()
async def _browse_racetime(self):
from randovania.gui.dialog.racetime_browser_dialog import RacetimeBrowserDialog
dialog = RacetimeBrowserDialog()
if not await dialog.refresh():
return
result = await async_dialog.execute_dialog(dialog)
if result == QtWidgets.QDialog.Accepted:
await self.generate_seed_from_permalink(dialog.permalink)
def open_game_details(self, layout: LayoutDescription):
self.GameDetailsSignal.emit(layout)
def _open_game_details(self, layout: LayoutDescription):
from randovania.gui.seed_details_window import SeedDetailsWindow
details_window = SeedDetailsWindow(self, self._options)
details_window.update_layout_description(layout)
details_window.show()
self.track_window(details_window)
# Releases info
async def request_new_data(self):
from randovania.interface_common import github_releases_data
await self._on_releases_data(await github_releases_data.get_releases())
async def _on_releases_data(self, releases: Optional[List[dict]]):
import markdown
current_version = update_checker.strict_current_version()
last_changelog = self._options.last_changelog_displayed
all_change_logs, new_change_logs, version_to_display = update_checker.versions_to_display_for_releases(
current_version, last_changelog, releases)
if version_to_display is not None:
self.display_new_version(version_to_display)
if all_change_logs:
changelog_tab = QtWidgets.QWidget()
changelog_tab.setObjectName("changelog_tab")
changelog_tab_layout = QtWidgets.QVBoxLayout(changelog_tab)
changelog_tab_layout.setContentsMargins(0, 0, 0, 0)
changelog_tab_layout.setObjectName("changelog_tab_layout")
changelog_scroll_area = QtWidgets.QScrollArea(changelog_tab)
changelog_scroll_area.setWidgetResizable(True)
changelog_scroll_area.setObjectName("changelog_scroll_area")
changelog_scroll_contents = QtWidgets.QWidget()
changelog_scroll_contents.setGeometry(QtCore.QRect(0, 0, 489, 337))
changelog_scroll_contents.setObjectName("changelog_scroll_contents")
changelog_scroll_layout = QtWidgets.QVBoxLayout(changelog_scroll_contents)
changelog_scroll_layout.setObjectName("changelog_scroll_layout")
for entry in all_change_logs:
changelog_label = QtWidgets.QLabel(changelog_scroll_contents)
_update_label_on_show(changelog_label, markdown.markdown(entry))
changelog_label.setObjectName("changelog_label")
changelog_label.setWordWrap(True)
changelog_scroll_layout.addWidget(changelog_label)
changelog_scroll_area.setWidget(changelog_scroll_contents)
changelog_tab_layout.addWidget(changelog_scroll_area)
self.help_tab_widget.addTab(changelog_tab, "Change Log")
if new_change_logs:
await async_dialog.message_box(self, QtWidgets.QMessageBox.Information,
"What's new", markdown.markdown("\n".join(new_change_logs)))
with self._options as options:
options.last_changelog_displayed = current_version
def display_new_version(self, version: update_checker.VersionDescription):
if self.menu_new_version is None:
self.menu_new_version = QtWidgets.QAction("", self)
self.menu_new_version.triggered.connect(self.open_version_link)
self.menu_bar.addAction(self.menu_new_version)
self.menu_new_version.setText("New version available: {}".format(version.tag_name))
self._current_version_url = version.html_url
def open_version_link(self):
if self._current_version_url is None:
raise RuntimeError("Called open_version_link, but _current_version_url is None")
QtGui.QDesktopServices.openUrl(QUrl(self._current_version_url))
# Options
def on_options_changed(self):
self.menu_action_validate_seed_after.setChecked(self._options.advanced_validate_seed_after)
self.menu_action_timeout_generation_after_a_time_limit.setChecked(
self._options.advanced_timeout_during_generation)
self.menu_action_dark_mode.setChecked(self._options.dark_mode)
self.generate_seed_tab.on_options_changed(self._options)
theme.set_dark_theme(self._options.dark_mode)
# Menu Actions
def _open_data_visualizer_for_game(self, game: RandovaniaGame):
self.open_data_visualizer_at(None, None, game)
def open_data_visualizer_at(self,
world_name: Optional[str],
area_name: Optional[str],
game: RandovaniaGame = RandovaniaGame.PRIME2,
):
from randovania.gui.data_editor import DataEditorWindow
data_visualizer = DataEditorWindow.open_internal_data(game, False)
self._data_visualizer = data_visualizer
if world_name is not None:
data_visualizer.focus_on_world(world_name)
if area_name is not None:
data_visualizer.focus_on_area(area_name)
self._data_visualizer.show()
def _open_data_editor_for_game(self, game: RandovaniaGame):
from randovania.gui.data_editor import DataEditorWindow
self._data_editor = DataEditorWindow.open_internal_data(game, True)
self._data_editor.show()
def _open_data_editor_prompt(self):
from randovania.gui.data_editor import DataEditorWindow
database_path = common_qt_lib.prompt_user_for_database_file(self)
if database_path is None:
return
with database_path.open("r") as database_file:
self._data_editor = DataEditorWindow(json.load(database_file), database_path, False, True)
self._data_editor.show()
@asyncSlot()
async def _on_menu_action_map_tracker(self):
dialog = QtWidgets.QInputDialog(self)
dialog.setWindowTitle("Map Tracker")
dialog.setLabelText("Select preset used for the tracker.")
dialog.setComboBoxItems([preset.name for preset in self._preset_manager.all_presets])
dialog.setTextValue(self._options.selected_preset_name)
result = await async_dialog.execute_dialog(dialog)
if result == QtWidgets.QDialog.Accepted:
preset = self._preset_manager.preset_for_name(dialog.textValue())
self.open_map_tracker(preset.get_preset().configuration)
def open_map_tracker(self, configuration: "EchoesConfiguration"):
from randovania.gui.tracker_window import TrackerWindow, InvalidLayoutForTracker
try:
self._map_tracker = TrackerWindow(self._options.tracker_files_path, configuration)
except InvalidLayoutForTracker as e:
QtWidgets.QMessageBox.critical(
self,
"Unsupported configuration for Tracker",
str(e)
)
return
self._map_tracker.show()
def _open_item_tracker(self):
# Importing this at root level seems to crash linux tests :(
from PySide2.QtWebEngineWidgets import QWebEngineView
tracker_window = QtWidgets.QMainWindow()
tracker_window.setWindowTitle("Item Tracker")
tracker_window.resize(370, 380)
web_view = QWebEngineView(tracker_window)
tracker_window.setCentralWidget(web_view)
self.web_view = web_view
def update_window_icon():
tracker_window.setWindowIcon(web_view.icon())
web_view.iconChanged.connect(update_window_icon)
web_view.load(QUrl("https://spaghettitoastbook.github.io/echoes/tracker/"))
tracker_window.show()
self._item_tracker_window = tracker_window
# Difficulties stuff
def _exec_trick_details(self, popup: "TrickDetailsPopup"):
self._trick_details_popup = popup
self._trick_details_popup.setWindowModality(Qt.WindowModal)
self._trick_details_popup.open()
def _open_trick_details_popup(self, game, trick: TrickResourceInfo, level: LayoutTrickLevel):
from randovania.gui.dialog.trick_details_popup import TrickDetailsPopup
self._exec_trick_details(TrickDetailsPopup(self, self, game, trick, level))
def _create_trick_details_prime_1(self):
self.menu_prime_1_trick_details.aboutToShow.disconnect(self._create_trick_details_prime_1)
self._setup_difficulties_menu(RandovaniaGame.PRIME1, self.menu_prime_1_trick_details)
def _create_trick_details_prime_2(self):
self.menu_prime_2_trick_details.aboutToShow.disconnect(self._create_trick_details_prime_2)
self._setup_difficulties_menu(RandovaniaGame.PRIME2, self.menu_prime_2_trick_details)
def _create_trick_details_prime_3(self):
self.menu_prime_3_trick_details.aboutToShow.disconnect(self._create_trick_details_prime_3)
self._setup_difficulties_menu(RandovaniaGame.PRIME3, self.menu_prime_3_trick_details)
def _setup_difficulties_menu(self, game: RandovaniaGame, menu: QtWidgets.QMenu):
from randovania.game_description import default_database
game = default_database.game_description_for(game)
tricks_in_use = used_tricks(game)
menu.clear()
for trick in sorted(game.resource_database.trick, key=lambda _trick: _trick.long_name):
if trick not in tricks_in_use:
continue
trick_menu = QtWidgets.QMenu(self)
trick_menu.setTitle(trick.long_name)
menu.addAction(trick_menu.menuAction())
used_difficulties = difficulties_for_trick(game, trick)
for i, trick_level in enumerate(iterate_enum(LayoutTrickLevel)):
if trick_level in used_difficulties:
difficulty_action = QtWidgets.QAction(self)
difficulty_action.setText(trick_level.long_name)
trick_menu.addAction(difficulty_action)
difficulty_action.triggered.connect(
functools.partial(self._open_trick_details_popup, game, trick, trick_level))
# ==========
@asyncSlot()
async def _on_validate_seed_change(self):
old_value = self._options.advanced_validate_seed_after
new_value = self.menu_action_validate_seed_after.isChecked()
if old_value and not new_value:
box = QtWidgets.QMessageBox(self)
box.setWindowTitle("Disable validation?")
box.setText(_DISABLE_VALIDATION_WARNING)
box.setIcon(QtWidgets.QMessageBox.Warning)
box.setStandardButtons(QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No)
box.setDefaultButton(QtWidgets.QMessageBox.No)
user_response = await async_dialog.execute_dialog(box)
if user_response != QtWidgets.QMessageBox.Yes:
self.menu_action_validate_seed_after.setChecked(True)
return
with self._options as options:
options.advanced_validate_seed_after = new_value
def _on_generate_time_limit_change(self):
is_checked = self.menu_action_timeout_generation_after_a_time_limit.isChecked()
with self._options as options:
options.advanced_timeout_during_generation = is_checked
def _on_menu_action_dark_mode(self):
with self._options as options:
options.dark_mode = self.menu_action_dark_mode.isChecked()
def _open_auto_tracker(self):
from randovania.gui.auto_tracker_window import AutoTrackerWindow
self.auto_tracker_window = AutoTrackerWindow(common_qt_lib.get_game_connection(), self._options)
self.auto_tracker_window.show()
def _on_menu_action_previously_generated_games(self):
path = self._options.data_dir.joinpath("game_history")
try:
if platform.system() == "Windows":
os.startfile(path)
elif platform.system() == "Darwin":
subprocess.run(["open", path])
else:
subprocess.run(["xdg-open", path])
except OSError:
print("Exception thrown :)")
box = QtWidgets.QMessageBox(QtWidgets.QMessageBox.Information, "Game History",
f"Previously generated games can be found at:\n{path}",
QtWidgets.QMessageBox.Ok, self)
box.setTextInteractionFlags(Qt.TextSelectableByMouse)
box.show()
def _on_menu_action_layout_editor(self):
from randovania.gui.corruption_layout_editor import CorruptionLayoutEditor
self.corruption_editor = CorruptionLayoutEditor()
self.corruption_editor.show()
| gpl-3.0 | -1,612,572,667,298,678,800 | 44.330325 | 118 | 0.669454 | false |
maltsev/LatexWebOffice | app/views/document.py | 1 | 15983 | # -*- coding: utf-8 -*-
"""
* Purpose : Dokument- und Projektverwaltung Schnittstelle
* Creation Date : 19-11-2014
* Last Modified : Di 24 Feb 2015 15:46:51 CET
* Author : mattis
* Coauthors : christian, ingo, Kirill
* Sprintnumber : 2, 5
* Backlog entry : TEK1, 3ED9, DOK8, DO14, KOL1
"""
import os
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_http_methods
from django.shortcuts import render
from django.views.static import serve
import settings
from app.common import util
from app.common.constants import ERROR_MESSAGES
from app.views import file, folder, project, template
from app.models.projecttemplate import ProjectTemplate
from app.models.file.file import File
from app.models.file.texfile import TexFile
from app.models.file.plaintextfile import PlainTextFile
from app.models.file.pdf import PDF
from app.models.project import Project
from app.models.folder import Folder
globalparas = {
'id': {'name': 'id', 'type': int},
'content': {'name': 'content', 'type': str},
'folderid': {'name': 'folderid', 'type': int},
'name': {'name': 'name', 'type': str},
'formatid': {'name': 'formatid', 'type': int},
# 'compilerid': {'name': 'compilerid', 'type': int},
'forcecompile': {'name': 'forcecompile', 'type': int}
}
# dictionary mit verfügbaren Befehlen und den entsprechenden Aktionen
# die entsprechenden Methoden befinden sich in:
# '/app/views/project.py', '/app/views/file.py', '/app/views/folder.py' und '/app/views/collaboration.py'
available_commands = {
'projectcreate': {
'command': project.projectCreate,
'parameters': [{'para': globalparas['name'], 'stringcheck': True}]
},
'projectclone': {
'command': project.projectClone,
'parameters': [{'para': globalparas['id'], 'type': Project, 'requirerights': ['owner', 'collaborator']},
{'para': globalparas['name'], 'stringcheck': True}]
},
'projectrm': {
'command': project.projectRm,
'parameters': [{'para': globalparas['id'], 'type': Project}]
},
'projectrename': {
'command': project.projectRename,
'parameters': [{'para': globalparas['id'], 'type': Project},
{'para': globalparas['name'], 'stringcheck': True}]
},
'listprojects': {
'command': project.listProjects,
'parameters': []
},
'importzip': {
'command': project.importZip,
'parameters': []
},
'exportzip': {
'command': project.exportZip,
'parameters': [{'para': globalparas['id']}]
},
'inviteuser': {
'command': project.inviteUser,
'parameters': [{'para': globalparas['id'], 'type': Project},
{'para': globalparas['name'], 'stringcheck': True}]
},
'hasinvitedusers': {
'command': project.hasInvitedUsers,
'parameters': [{'para': globalparas['id'], 'type': Project}]
},
'listinvitedusers': {
'command': project.listInvitedUsers,
'parameters': [{'para': globalparas['id'], 'type': Project}]
},
'listunconfirmedcollaborativeprojects': {
'command': project.listUnconfirmedCollaborativeProjects,
'parameters': []
},
'activatecollaboration': {
'command': project.activateCollaboration,
'parameters': [{'para': globalparas['id'], 'type': Project, 'requirerights': ['owner', 'invitee']}]
},
'quitcollaboration': {
'command': project.quitCollaboration,
'parameters': [
{'para': globalparas['id'], 'type': Project, 'requirerights': ['owner', 'invitee', 'collaborator']}]
},
'cancelcollaboration': {
'command': project.cancelCollaboration,
'parameters': [{'para': globalparas['id'], 'type': Project},
{'para': globalparas['name'], 'stringcheck': True}]
},
'createtex': {
'command': file.createTexFile,
'parameters': [{'para': globalparas['id'], 'type': Folder, 'requirerights': ['owner', 'collaborator']},
{'para': globalparas['name'], 'filenamecheck': True}]
},
'updatefile': {
'command': file.updateFile,
'parameters': [{'para': globalparas['id'], 'type': PlainTextFile,
'requirerights': ['owner', 'collaborator'], 'lockcheck': False},
{'para': globalparas['content']}]
},
'deletefile': {
'command': file.deleteFile,
'parameters': [{'para': globalparas['id'], 'type': File,
'requirerights': ['owner', 'collaborator'], 'lockcheck': True}]
},
'renamefile': {
'command': file.renameFile,
'parameters': [{'para': globalparas['id'], 'type': File,
'requirerights': ['owner', 'collaborator'], 'lockcheck': True},
{'para': globalparas['name'], 'filenamecheck': True}]
},
'movefile': {
'command': file.moveFile,
'parameters': [{'para': globalparas['id'], 'type': File,
'requirerights': ['owner', 'collaborator'], 'lockcheck': True},
{'para': globalparas['folderid'], 'type': Folder, 'requirerights': ['owner', 'collaborator']}]
},
'uploadfiles': {
'command': file.uploadFiles,
'parameters': [{'para': globalparas['id'], 'type': Folder, 'requirerights': ['owner', 'collaborator']}]
},
'downloadfile': {
'command': file.downloadFile,
'parameters': [{'para': globalparas['id']}]
},
'gettext': {
'command': file.getText,
'parameters': [{'para': globalparas['id'], 'type': PlainTextFile, 'requirerights': ['owner', 'collaborator']}]
},
'fileinfo': {
'command': file.fileInfo,
'parameters': [{'para': globalparas['id'], 'type': File, 'requirerights': ['owner', 'collaborator']}]
},
'compile': {
'command': file.latexCompile,
'parameters': [{'para': globalparas['id'], 'type': TexFile,
'requirerights': ['owner', 'collaborator'], 'lockcheck': True},
{'para': globalparas['formatid']},
# {'para': globalparas['compilerid']},
{'para': globalparas['forcecompile']}]
},
'lockfile': {
'command': file.lockFile,
'parameters': [{'para': globalparas['id'], 'type': File, 'requirerights': ['owner', 'collaborator']}]
},
'unlockfile': {
'command': file.unlockFile,
'parameters': [{'para': globalparas['id'], 'type': File, 'requirerights': ['owner', 'collaborator']}]
},
'getlog': {
'command': file.getLog,
'parameters': [{'para': globalparas['id'], 'type': TexFile, 'requirerights': ['owner', 'collaborator']}]
},
'createdir': {
'command': folder.createDir,
'parameters': [{'para': globalparas['id'], 'type': Folder, 'requirerights': ['owner', 'collaborator']},
{'para': globalparas['name'], 'stringcheck': True}]
},
'rmdir': {
'command': folder.rmDir,
'parameters': [{'para': globalparas['id'], 'type': Folder,
'requirerights': ['owner', 'collaborator'], 'lockcheck': True}]
},
'renamedir': {
'command': folder.renameDir,
'parameters': [{'para': globalparas['id'], 'type': Folder,
'requirerights': ['owner', 'collaborator']},
{'para': globalparas['name'], 'stringcheck': True}]
},
'movedir': {
'command': folder.moveDir,
'parameters': [{'para': globalparas['id'], 'type': Folder,
'requirerights': ['owner', 'collaborator'], 'lockcheck': True},
{'para': globalparas['folderid'], 'type': Folder, 'requirerights': ['owner', 'collaborator']}]
},
'listfiles': {
'command': folder.listFiles,
'parameters': [{'para': globalparas['id'], 'type': Folder, 'requirerights': ['owner', 'collaborator']}]
},
'template2project': {
'command': template.template2Project,
'parameters': [{'para': globalparas['id'], 'type': ProjectTemplate},
{'para': globalparas['name'], 'stringcheck': True}]
},
'project2template': {
'command': template.project2Template,
'parameters': [{'para': globalparas['id'], 'type': Project, 'requirerights': ['owner', 'collaborator']},
{'para': globalparas['name'], 'stringcheck': True}]
},
'templaterm': {
'command': template.templateRm,
'parameters': [{'para': globalparas['id'], 'type': ProjectTemplate}]
},
'templaterename': {
'command': template.templateRename,
'parameters': [{'para': globalparas['id'], 'type': ProjectTemplate},
{'para': globalparas['name'], 'stringcheck': True}]
},
'listtemplates': {
'command': template.listTemplates,
'parameters': []
}
}
available_commands_output = {}
for key, value in available_commands.items():
parameters = []
for paras in value['parameters']:
globalparainfo = (paras['para']).copy()
value = {'para': globalparainfo}
if globalparainfo.get('type'):
del globalparainfo['type']
parameters.append(value)
if key == 'uploadfiles' or key == 'importzip':
parameters.append({'para': {'name': 'files'}})
available_commands_output.update({key: parameters})
@login_required
def debug(request):
return render(request, 'documentPoster.html')
# Schnittstellenfunktion
# bietet eine Schnittstelle zur Kommunikation zwischen Client und Server
# liest den vom Client per POST Data übergebenen Befehl ein
# und führt die entsprechende Methode aus
@login_required
@require_http_methods(['POST', 'GET'])
def execute(request):
if request.method == 'POST' and 'command' in request.POST:
# hole den aktuellen Benutzer
user = request.user
# wenn der Schlüssel nicht gefunden wurde
# gib Fehlermeldung zurück
if request.POST['command'] not in available_commands:
return util.jsonErrorResponse(ERROR_MESSAGES['COMMANDNOTFOUND'], request)
args = []
# aktueller Befehl
c = available_commands[request.POST['command']]
# Parameter dieses Befehls
paras = c['parameters']
# durchlaufe alle Parameter des Befehls
for para in paras:
# wenn der Parameter nicht gefunden wurde oder ein Parameter, welcher eine id angeben sollte
# Zeichen enthält, die keine Zahlen sind, gib Fehlermeldung zurück
if request.POST.get(para['para']['name']) is None:
return util.jsonErrorResponse(ERROR_MESSAGES['MISSINGPARAMETER'] % (para['para']), request)
elif para['para']['type'] == int and (not request.POST.get(para['para']['name']).isdigit()):
return util.jsonErrorResponse(ERROR_MESSAGES['MISSINGPARAMETER'] % (para['para']), request)
# sonst füge den Parameter zu der Argumentliste hinzu
else:
args.append(request.POST[para['para']['name']])
# Teste auf ungültige strings
if para.get('stringcheck'):
failstring, failurereturn = util.checkObjectForInvalidString(
request.POST.get(para['para']['name']), request)
if not failstring:
return failurereturn
elif para.get('filenamecheck'):
failstring, failurereturn = util.checkFileForInvalidString(
request.POST.get(para['para']['name']), request)
if not failstring:
return failurereturn
# Teste, dass der User rechte auf das Objekt mit der angegebenen id
# hat und diese existiert
if para.get('type') and para['para']['type'] == int:
objType = para.get('type')
objId = request.POST.get(para['para']['name'])
requireRights = para.get('requirerights', ['owner'])
lockcheck = para.get('lockcheck', False)
if objType == Project:
rights, failurereturn = util.checkIfProjectExistsAndUserHasRights(objId, user, request,
requireRights)
if not rights:
return failurereturn
elif objType == Folder:
rights, failurereturn = util.checkIfDirExistsAndUserHasRights(objId, user, request, requireRights, lockcheck)
if not rights:
return failurereturn
elif objType == File:
rights, failurereturn = util.checkIfFileExistsAndUserHasRights(objId, user, request, requireRights, lockcheck,
objecttype=File)
if not rights:
return failurereturn
elif objType == TexFile:
rights, failurereturn = util.checkIfFileExistsAndUserHasRights(objId, user, request, requireRights, lockcheck,
objecttype=TexFile)
if not rights:
return failurereturn
elif objType == PlainTextFile:
rights, failurereturn = util.checkIfFileExistsAndUserHasRights(objId, user, request, requireRights, lockcheck,
objecttype=PlainTextFile)
if not rights:
return failurereturn
elif objType == ProjectTemplate:
# Überprüfe, ob Vorlage existiert und der User darauf Rechte hat
emptystring, failurereturn = util.checkIfTemplateExistsAndUserHasRights(objId, user, request)
if not emptystring:
return failurereturn
# führe den übergebenen Befehl aus
return c['command'](request, user, *args)
elif request.method == 'GET' and request.GET.get('command'):
command = request.GET.get('command')
pdfid = request.GET.get('id')
texid = request.GET.get('texid')
defaultpdfPath = filepath = os.path.join(settings.BASE_DIR, 'app', 'static', 'default.pdf')
if (pdfid and not pdfid.isdigit()) or (texid and not texid.isdigit()):
return serve(request, os.path.basename(defaultpdfPath), os.path.dirname(defaultpdfPath))
if command == 'getpdf' and pdfid:
requireRights = ['owner', 'collaborator']
rights, failurereturn = util.checkIfFileExistsAndUserHasRights(pdfid, request.user, request, requireRights, lockcheck=False,
objecttype=PDF)
if not rights:
return serve(request, os.path.basename(defaultpdfPath), os.path.dirname(defaultpdfPath))
return file.getPDF(request, request.user, pdfid=pdfid, default=defaultpdfPath)
elif command == 'getpdf' and texid:
requireRights = ['owner', 'collaborator']
rights, failurereturn = util.checkIfFileExistsAndUserHasRights(texid, request.user, request, requireRights, lockcheck=False,
objecttype=TexFile)
if not rights:
return serve(request, os.path.basename(defaultpdfPath), os.path.dirname(defaultpdfPath))
return file.getPDF(request, request.user, texid=texid, default=defaultpdfPath)
return util.jsonErrorResponse(ERROR_MESSAGES['MISSINGPARAMETER'] % 'unknown', request)
| gpl-3.0 | 953,337,725,685,583,200 | 42.63388 | 136 | 0.568503 | false |
psyonara/agonizomai | sermons/models.py | 1 | 5153 | from __future__ import unicode_literals
from django.db import models
from django.template.defaultfilters import slugify
from bible.models import BibleBook
from useraccounts.models import UserAccount
class Author(models.Model):
name = models.CharField(null=False, blank=False, max_length=50)
name_slug = models.SlugField(max_length=50, null=True, blank=True, db_index=True)
def __str__(self):
return self.name
def save(self, *args, **kwargs):
if self.name_slug is None or self.name_slug == "":
self.name_slug = slugify(self.name)
super(Author, self).save(*args, **kwargs)
class AuthorSetting(models.Model):
"""
Holds user settings specific to an author.
"""
author = models.ForeignKey(Author, on_delete=models.CASCADE)
user = models.ForeignKey("useraccounts.UserAccount", on_delete=models.CASCADE)
name = models.CharField(max_length=30, db_index=True)
value = models.CharField(max_length=50)
class Series(models.Model):
name = models.CharField(null=False, blank=False, max_length=100)
name_slug = models.SlugField(max_length=100, null=True, blank=True, db_index=True)
author = models.ForeignKey(Author, null=False, blank=False, on_delete=models.CASCADE)
complete = models.BooleanField(default=False)
def __str__(self):
return "%s (%s)" % (self.name, self.author.name)
def save(self, *args, **kwargs):
if self.name_slug is None or self.name_slug == "":
self.name_slug = slugify(self.name)
super(Series, self).save(*args, **kwargs)
class Sermon(models.Model):
date_added = models.DateTimeField(auto_now_add=True)
date_preached = models.DateField(null=True, blank=True)
author = models.ForeignKey(Author, related_name="sermons", on_delete=models.CASCADE)
title = models.CharField(null=False, blank=False, max_length=100)
title_slug = models.SlugField(max_length=100, null=True, blank=True, db_index=True)
series = models.ForeignKey(
Series, null=True, blank=True, related_name="sermons", on_delete=models.CASCADE
)
ref = models.CharField(max_length=20, null=True, blank=True)
def get_audio_file(self):
files = self.media_files.filter(media_type=1)
return files[0] if len(files) > 0 else None
def __str__(self):
return "%s (by %s)" % (self.title, self.author.name)
def save(self, *args, **kwargs):
if self.title_slug is None or self.title_slug == "":
self.title_slug = slugify(self.title)
super(Sermon, self).save(*args, **kwargs)
class Meta:
ordering = ["-date_preached"]
class ScriptureRef(models.Model):
sermon = models.ForeignKey(Sermon, related_name="scripture_refs", on_delete=models.CASCADE)
bible_book = models.ForeignKey(BibleBook, on_delete=models.CASCADE)
chapter_begin = models.PositiveSmallIntegerField()
chapter_end = models.PositiveSmallIntegerField()
verse_begin = models.PositiveSmallIntegerField(null=True, blank=True)
verse_end = models.PositiveSmallIntegerField(null=True, blank=True)
def __str__(self):
end_string = ""
if self.chapter_begin == self.chapter_end:
end_string += "%s %s" % (self.bible_book.name, self.chapter_begin)
if self.verse_begin is not None and self.verse_end is not None:
if self.verse_begin == self.verse_end:
end_string += ":%s" % (self.verse_begin)
else:
end_string += ":%s-%s" % (self.verse_begin, self.verse_end)
else:
end_string += "%s %s" % (self.bible_book.name, self.chapter_begin)
if self.verse_begin is None and self.verse_end is None:
end_string += "-%s" % (self.chapter_end)
else:
end_string += ":%s-%s:%s" % (self.verse_begin, self.chapter_end, self.verse_end)
return end_string
class MediaFile(models.Model):
MEDIA_TYPE_CHOICES = ((1, "audio"), (2, "video"), (3, "text"), (4, "pdf"))
LOCATION_TYPE_CHOICES = ((1, "url"),)
sermon = models.ForeignKey(Sermon, related_name="media_files", on_delete=models.CASCADE)
media_type = models.PositiveSmallIntegerField(choices=MEDIA_TYPE_CHOICES, null=False, default=1)
file_size = models.PositiveIntegerField(null=True, blank=True)
location_type = models.PositiveSmallIntegerField(
choices=LOCATION_TYPE_CHOICES, null=False, default=1
)
location = models.CharField(null=False, max_length=250)
def __str__(self):
return "%s (%s)" % (self.location, self.sermon.title)
class SermonSession(models.Model):
sermon = models.ForeignKey(Sermon, related_name="sessions", on_delete=models.CASCADE)
session_started = models.DateTimeField(auto_now_add=True)
session_updated = models.DateTimeField(auto_now=True)
position = models.PositiveSmallIntegerField(default=0) # in seconds from start of file
total_duration = models.PositiveSmallIntegerField(default=0) # in seconds
user = models.ForeignKey(UserAccount, on_delete=models.CASCADE)
completed = models.BooleanField(default=False)
| mit | -3,994,879,931,140,667,400 | 39.896825 | 100 | 0.663497 | false |
funshine/rpidemo | mqtt_oled/oled_test_luma.py | 1 | 1273 | #!/usr/bin/python/
# coding: utf-8
import time
import datetime
from luma.core.interface.serial import i2c, spi
from luma.core.render import canvas
from luma.oled.device import ssd1306, ssd1325, ssd1331, sh1106
def do_nothing(obj):
pass
# rev.1 users set port=0
# substitute spi(device=0, port=0) below if using that interface
# serial = i2c(port=1, address=0x3C)
serial = spi(device=0, port=0)
# substitute ssd1331(...) or sh1106(...) below if using that device
# device = ssd1306(serial, rotate=1)
device = sh1106(serial)
# device.cleanup = do_nothing
print("Testing display Hello World")
with canvas(device) as draw:
draw.rectangle(device.bounding_box, outline="white", fill="black")
draw.text((30, 40), "Hello World", fill="white")
time.sleep(3)
print("Testing display ON/OFF...")
for _ in range(5):
time.sleep(0.5)
device.hide()
time.sleep(0.5)
device.show()
print("Testing clear display...")
time.sleep(2)
device.clear()
print("Testing screen updates...")
time.sleep(2)
for x in range(40):
with canvas(device) as draw:
now = datetime.datetime.now()
draw.text((x, 4), str(now.date()), fill="white")
draw.text((10, 16), str(now.time()), fill="white")
time.sleep(0.1)
print("Quit, cleanup...")
| mit | 2,993,683,248,832,655,000 | 23.018868 | 70 | 0.671642 | false |
jantman/nagios-scripts | check_icinga_ido.py | 1 | 6939 | #!/usr/bin/env python
"""
Script to check last update of core programstatus
and service checks in Icinga ido2db Postgres database
"""
#
# The latest version of this script lives at:
# <https://github.com/jantman/nagios-scripts/blob/master/check_puppetdb_agent_run.py>
#
# Please file bug/feature requests and submit patches through
# the above GitHub repository. Feedback and patches are greatly
# appreciated; patches are preferred as GitHub pull requests, but
# emailed patches are also accepted.
#
# Copyright 2014 Jason Antman <[email protected]> all rights reserved.
# See the above git repository's LICENSE file for license terms (GPLv3).
#
import sys
from datetime import datetime
import pytz
import logging
import argparse
from math import ceil
import nagiosplugin
import psycopg2
import pprint
_log = logging.getLogger('nagiosplugin')
utc = pytz.utc
class IdoStatus(nagiosplugin.Resource):
"""Check age of ido2db programstatus and last service check in postgres database"""
def __init__(self, db_host, db_name, db_user, db_pass, db_port=5432):
self.db_host = db_host
self.db_user = db_user
self.db_pass = db_pass
self.db_port = db_port
self.db_name = db_name
def probe(self):
_log.info("connecting to Postgres DB %s on %s" % (self.db_name, self.db_host))
try:
conn_str = "dbname='%s' user='%s' host='%s' password='%s' port='%s' application_name='%s'" % (
self.db_name,
self.db_user,
self.db_host,
self.db_pass,
self.db_port,
"check_icinga_ido_core.py",
)
_log.debug("psycopg2 connect string: %s" % conn_str)
conn = psycopg2.connect(conn_str)
except psycopg2.OperationalError, e:
_log.info("got psycopg2.OperationalError: %s" % e.__str__())
raise nagiosplugin.CheckError(e.__str__())
_log.info("connected to database")
# these queries come from https://wiki.icinga.org/display/testing/Special+IDOUtils+Queries
cur = conn.cursor()
_log.debug("got cursor")
sql = "SELECT EXTRACT(EPOCH FROM (NOW()-status_update_time)) AS age from icinga_programstatus where (UNIX_TIMESTAMP(status_update_time) > UNIX_TIMESTAMP(NOW())-60);"
_log.debug("executing query: %s" % sql)
cur.execute(sql)
row = cur.fetchone()
_log.debug("result: %s" % row)
programstatus_age = ceil(row[0])
sql = "select (UNIX_TIMESTAMP(NOW())-UNIX_TIMESTAMP(ss.status_update_time)) as age from icinga_servicestatus ss join icinga_objects os on os.object_id=ss.service_object_id order by status_update_time desc limit 1;"
_log.debug("executing query: %s" % sql)
cur.execute(sql)
row = cur.fetchone()
_log.debug("result: %s" % row)
last_check_age = ceil(row[0])
return [
nagiosplugin.Metric('programstatus_age', programstatus_age, uom='s', min=0),
nagiosplugin.Metric('last_check_age', last_check_age, uom='s', min=0),
]
class LoadSummary(nagiosplugin.Summary):
"""LoadSummary is used to provide custom outputs to the check"""
def __init__(self, db_name):
self.db_name = db_name
def _human_time(self, seconds):
"""convert an integer seconds into human-readable hms"""
mins, secs = divmod(seconds, 60)
hours, mins = divmod(mins, 60)
return '%02d:%02d:%02d' % (hours, mins, secs)
def _state_marker(self, state):
"""return a textual marker for result states"""
if type(state) == type(nagiosplugin.state.Critical):
return " (Crit)"
if type(state) == type(nagiosplugin.state.Warn):
return " (Warn)"
if type(state) == type(nagiosplugin.state.Unknown):
return " (Unk)"
return ""
def status_line(self, results):
if type(results.most_significant_state) == type(nagiosplugin.state.Unknown):
# won't have perf values, so special handling
return results.most_significant[0].hint.splitlines()[0]
return "Last Programstatus Update %s ago%s; Last Service Status Update %s ago%s (%s)" % (
self._human_time(results['programstatus_age'].metric.value),
self._state_marker(results['programstatus_age'].state),
self._human_time(results['last_check_age'].metric.value),
self._state_marker(results['last_check_age'].state),
self.db_name)
def ok(self, results):
return self.status_line(results)
def problem(self, results):
return self.status_line(results)
@nagiosplugin.guarded
def main():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('-H', '--hostname', dest='hostname',
help='Postgres server hostname')
parser.add_argument('-p', '--port', dest='port',
default='5432',
help='Postgres port (Default: 5432)')
parser.add_argument('-u', '--username', dest='username',
default='icinga-ido',
help='Postgres username (Default: icinga-ido)')
parser.add_argument('-a', '--password', dest='password',
default='icinga',
help='Postgres password (Default: icinga)')
parser.add_argument('-n', '--db-name', dest='db_name',
default='icinga_ido',
help='Postgres database name (Default: icinga_ido)')
parser.add_argument('-w', '--warning', dest='warning',
default='120',
help='warning threshold for age of last programstatus or service status update, in seconds (Default: 120 / 2m)')
parser.add_argument('-c', '--critical', dest='critical',
default='600',
help='critical threshold for age of last programstatus or service status update, in seconds (Default: 600 / 10m)')
parser.add_argument('-v', '--verbose', action='count', default=0,
help='increase output verbosity (use up to 3 times)')
parser.add_argument('-t', '--timeout', dest='timeout',
default=30,
help='timeout (in seconds) for the command (Default: 30)')
args = parser.parse_args()
if not args.hostname:
raise nagiosplugin.CheckError('hostname (-H|--hostname) must be provided')
check = nagiosplugin.Check(
IdoStatus(args.hostname, args.db_name, args.username, args.password, args.port),
nagiosplugin.ScalarContext('programstatus_age', args.warning, args.critical),
nagiosplugin.ScalarContext('last_check_age', args.warning, args.critical),
LoadSummary(args.db_name))
check.main(args.verbose, args.timeout)
if __name__ == '__main__':
main()
| gpl-3.0 | 4,422,069,438,603,399,000 | 41.833333 | 222 | 0.608157 | false |
3DLIRIOUS/BlendSCAD | examples/example014.scad.py | 1 | 1763 | # OpenSCAD example, ported by Michael Mlivoncic
# a beautiful dice...
# an interesting test case, to get the Boolean operations somehow fixed (TODO)
#import sys
#sys.path.append("O:/BlenderStuff")
import blendscad
#import imp
#imp.reload(blendscad)
#imp.reload(blendscad.core)
#imp.reload(blendscad.primitives)
blendscad.initns( globals() ) # try to add BlendSCAD names to current namespace .. as if they would be in this file...
## Clear the open .blend file!!!
clearAllObjects()
###### End of Header ##############################################################################
#OpenSCAD' intersection_for() is only a work around. As standard "for" implies a union of its content, this one is a combination of
# for() and intersection() statements.
# Not really needed as we currently do not support implicit union()'s, but to demonstrate, how it would be rewritten.
# see: http://en.wikibooks.org/wiki/OpenSCAD_User_Manual/The_OpenSCAD_Language#Intersection_For_Loop
# intersection_for(i = [
# [0, 0, 0],
# [10, 20, 300],
# [200, 40, 57],
# [20, 88, 57]
# ])
# rotate(i) cube([100, 20, 20], center = true)
# example 2 - rotation:
#intersection_for(i = [ ]
tmp = None
rnge = [ [ 0, 0, 0],
[ 10, 20, 300],
[200, 40, 57],
[ 20, 88, 57] ]
for i in rnge:
tmp = intersection(
rotate(i ,
cube([100, 20, 20], center = true))
, tmp);
###### Begin of Footer ##############################################################################
color(rands(0,1,3)) # random color last object. to see "FINISH" :-)
# print timestamp and finish - sometimes it is easier to see differences in console then :-)
import time
import datetime
st = datetime.datetime.fromtimestamp( time.time() ).strftime('%Y-%m-%d %H:%M:%S')
echo ("FINISH", st)
| gpl-3.0 | -6,193,139,217,817,806,000 | 26.546875 | 131 | 0.614861 | false |
stackunderflow-stackptr/stackptr_web | crossbarconnect/client.py | 1 | 8527 | ###############################################################################
##
## Copyright (C) 2012-2014 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
__all__ = ['Client']
try:
import ssl
_HAS_SSL = True
except ImportError:
_HAS_SSL = False
import sys
_HAS_SSL_CLIENT_CONTEXT = sys.version_info >= (2,7,9)
import json
import hmac
import hashlib
import base64
import random
from datetime import datetime
import six
from six.moves.urllib import parse
from six.moves.http_client import HTTPConnection, HTTPSConnection
def _utcnow():
"""
Get current time in UTC as ISO 8601 string.
:returns str -- Current time as string in ISO 8601 format.
"""
now = datetime.utcnow()
return now.strftime("%Y-%m-%dT%H:%M:%S.%f")[:-3] + "Z"
def _parse_url(url):
"""
Parses a Crossbar.io HTTP bridge URL.
"""
parsed = parse.urlparse(url)
if parsed.scheme not in ["http", "https"]:
raise Exception("invalid Push URL scheme '%s'" % parsed.scheme)
if parsed.port is None or parsed.port == "":
if parsed.scheme == "http":
port = 80
elif parsed.scheme == "https":
port = 443
else:
raise Exception("logic error")
else:
port = int(parsed.port)
if parsed.fragment is not None and parsed.fragment != "":
raise Exception("invalid Push URL: non-empty fragment '%s" % parsed.fragment)
if parsed.query is not None and parsed.query != "":
raise Exception("invalid Push URL: non-empty query string '%s" % parsed.query)
if parsed.path is not None and parsed.path != "":
ppath = parsed.path
path = parse.unquote(ppath)
else:
ppath = "/"
path = ppath
return {'secure': parsed.scheme == "https",
'host': parsed.hostname,
'port': port,
'path': path}
class Client:
"""
Crossbar.io HTTP bridge client.
"""
def __init__(self, url, key = None, secret = None, timeout = 5, context = None):
"""
Create a new Crossbar.io push client.
The only mandatory argument is the Push service endpoint of the Crossbar.io
instance to push to.
For signed pushes, provide authentication key and secret. If those are not
given, unsigned pushes are performed.
:param url: URL of the HTTP bridge of Crossbar.io (e.g. http://example.com:8080/push).
:type url: str
:param key: Optional key to use for signing requests.
:type key: str
:param secret: When using signed request, the secret corresponding to key.
:type secret: str
:param timeout: Timeout for requests.
:type timeout: int
:param context: If the HTTP bridge is running on HTTPS (that is securely over TLS),
then the context provides the SSL settings the client should use (e.g. the
certificate chain against which to verify the server certificate). This parameter
is only available on Python 2.7.9+ and Python 3 (otherwise the parameter is silently
ignored!). See: https://docs.python.org/2/library/ssl.html#ssl.SSLContext
:type context: obj or None
"""
if six.PY2:
if type(url) == str:
url = six.u(url)
if type(key) == str:
key = six.u(key)
if type(secret) == str:
secret = six.u(secret)
assert(type(url) == six.text_type)
assert((key and secret) or (not key and not secret))
assert(key is None or type(key) == six.text_type)
assert(secret is None or type(secret) == six.text_type)
assert(type(timeout) == int)
if _HAS_SSL and _HAS_SSL_CLIENT_CONTEXT:
assert(context is None or isinstance(context, ssl.SSLContext))
self._seq = 1
self._key = key
self._secret = secret
self._endpoint = _parse_url(url)
self._endpoint['headers'] = {
"Content-type": "application/json",
"User-agent": "crossbarconnect-python"
}
if self._endpoint['secure']:
if not _HAS_SSL:
raise Exception("Bridge URL is using HTTPS, but Python SSL module is missing")
if _HAS_SSL_CLIENT_CONTEXT:
self._connection = HTTPSConnection(self._endpoint['host'],
self._endpoint['port'], timeout = timeout, context = context)
else:
self._connection = HTTPSConnection(self._endpoint['host'],
self._endpoint['port'], timeout = timeout)
else:
self._connection = HTTPConnection(self._endpoint['host'],
self._endpoint['port'], timeout = timeout)
def publish(self, topic, *args, **kwargs):
"""
Publish an event to subscribers on specified topic via Crossbar.io HTTP bridge.
The event payload (positional and keyword) can be of any type that can be
serialized to JSON.
If `kwargs` contains an `options` attribute, this is expected to
be a dictionary with the following possible parameters:
* `exclude`: A list of WAMP session IDs to exclude from receivers.
* `eligible`: A list of WAMP session IDs eligible as receivers.
:param topic: Topic to push to.
:type topic: str
:param args: Arbitrary application payload for the event (positional arguments).
:type args: list
:param kwargs: Arbitrary application payload for the event (keyword arguments).
:type kwargs: dict
:returns int -- The event publication ID assigned by the broker.
"""
if six.PY2 and type(topic) == str:
topic = six.u(topic)
assert(type(topic) == six.text_type)
## this will get filled and later serialized into HTTP/POST body
##
event = {
'topic': topic
}
if 'options' in kwargs:
event['options'] = kwargs.pop('options')
assert(type(event['options']) == dict)
if args:
event['args'] = args
if kwargs:
event['kwargs'] = kwargs
try:
body = json.dumps(event, separators = (',',':'))
if six.PY3:
body = body.encode('utf8')
except Exception as e:
raise Exception("invalid event payload - not JSON serializable: {0}".format(e))
params = {
'timestamp': _utcnow(),
'seq': self._seq,
}
if self._key:
## if the request is to be signed, create extra fields and signature
params['key'] = self._key
params['nonce'] = random.randint(0, 9007199254740992)
# HMAC[SHA256]_{secret} (key | timestamp | seq | nonce | body) => signature
hm = hmac.new(self._secret.encode('utf8'), None, hashlib.sha256)
hm.update(params['key'].encode('utf8'))
hm.update(params['timestamp'].encode('utf8'))
hm.update(u"{0}".format(params['seq']).encode('utf8'))
hm.update(u"{0}".format(params['nonce']).encode('utf8'))
hm.update(body)
signature = base64.urlsafe_b64encode(hm.digest())
params['signature'] = signature
self._seq += 1
path = "{0}?{1}".format(parse.quote(self._endpoint['path']), parse.urlencode(params))
## now issue the HTTP/POST
##
self._connection.request('POST', path, body, self._endpoint['headers'])
response = self._connection.getresponse()
response_body = response.read()
if response.status not in [200, 202]:
raise Exception("publication request failed {0} [{1}] - {2}".format(response.status, response.reason, response_body))
try:
res = json.loads(response_body)
except Exception as e:
raise Exception("publication request bogus result - {0}".format(e))
return res['id']
| agpl-3.0 | 3,583,649,226,256,367,000 | 32.108 | 126 | 0.587194 | false |
VahidooX/DeepCCA | objectives.py | 1 | 2281 | import theano.tensor as T
def cca_loss(outdim_size, use_all_singular_values):
"""
The main loss function (inner_cca_objective) is wrapped in this function due to
the constraints imposed by Keras on objective functions
"""
def inner_cca_objective(y_true, y_pred):
"""
It is the loss function of CCA as introduced in the original paper. There can be other formulations.
It is implemented by Theano tensor operations, and does not work on Tensorflow backend
y_true is just ignored
"""
r1 = 1e-4
r2 = 1e-4
eps = 1e-12
o1 = o2 = y_pred.shape[1]//2
# unpack (separate) the output of networks for view 1 and view 2
H1 = y_pred[:, 0:o1].T
H2 = y_pred[:, o1:o1+o2].T
m = H1.shape[1]
H1bar = H1 - (1.0 / m) * T.dot(H1, T.ones([m, m]))
H2bar = H2 - (1.0 / m) * T.dot(H2, T.ones([m, m]))
SigmaHat12 = (1.0 / (m - 1)) * T.dot(H1bar, H2bar.T)
SigmaHat11 = (1.0 / (m - 1)) * T.dot(H1bar, H1bar.T) + r1 * T.eye(o1)
SigmaHat22 = (1.0 / (m - 1)) * T.dot(H2bar, H2bar.T) + r2 * T.eye(o2)
# Calculating the root inverse of covariance matrices by using eigen decomposition
[D1, V1] = T.nlinalg.eigh(SigmaHat11)
[D2, V2] = T.nlinalg.eigh(SigmaHat22)
# Added to increase stability
posInd1 = T.gt(D1, eps).nonzero()[0]
D1 = D1[posInd1]
V1 = V1[:, posInd1]
posInd2 = T.gt(D2, eps).nonzero()[0]
D2 = D2[posInd2]
V2 = V2[:, posInd2]
SigmaHat11RootInv = T.dot(T.dot(V1, T.nlinalg.diag(D1 ** -0.5)), V1.T)
SigmaHat22RootInv = T.dot(T.dot(V2, T.nlinalg.diag(D2 ** -0.5)), V2.T)
Tval = T.dot(T.dot(SigmaHat11RootInv, SigmaHat12), SigmaHat22RootInv)
if use_all_singular_values:
# all singular values are used to calculate the correlation
corr = T.sqrt(T.nlinalg.trace(T.dot(Tval.T, Tval)))
else:
# just the top outdim_size singular values are used
[U, V] = T.nlinalg.eigh(T.dot(Tval.T, Tval))
U = U[T.gt(U, eps).nonzero()[0]]
U = U.sort()
corr = T.sum(T.sqrt(U[0:outdim_size]))
return -corr
return inner_cca_objective
| mit | 6,360,399,072,148,163,000 | 34.640625 | 108 | 0.562034 | false |
openstack/ironic | ironic/common/release_mappings.py | 1 | 12857 | # Copyright 2016 Intel Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ironic.common.i18n import _
# NOTE(xek): This decides the version cap of RPC messages sent to conductor
# and objects during rolling upgrades, when [DEFAULT]/pin_release_version
# configuration is set.
#
# Remember to add a new entry for the new version that is shipping in a new
# release.
#
# We support a rolling upgrade between adjacent named releases, as well as
# between a release and master, so old, unsupported releases can be removed,
# together with the supporting code, which is typically found in an object's
# make_compatible methods and RPC client code.
# NOTE(xek): The format of this dict is:
# { '<release version>': {
# 'api': '<Bare Metal API version>',
# 'rpc': '<RPC API version>',
# 'objects': {
# '<object class name>': ['<object version>'],
# }
# },
# }
# The list should contain all objects which are persisted in the database and
# sent over RPC. Notifications/Payloads are not being included here since we
# don't need to pin them during rolling upgrades.
#
# For each object, list the versions that the object can be in for a particular
# release. That is, any new versions that were added in that release. If there
# were no new versions, it should have the same (latest) version as the
# previous release.
# NOTE(rloo): We need a list, not just the latest version, for the DB queries
# that filter for objects that are not in particular versions; for more info,
# see comments after L1128 of
# https://review.opendev.org/#/c/408556/52/ironic/db/sqlalchemy/api.py.
#
# There should always be a 'master' entry that reflects the objects in the
# master branch.
#
# Just before doing a release, copy the 'master' entry, and rename the first
# 'master' entry to the (semver) version being released.
#
# Just after doing a named release, delete any entries associated with the
# oldest named release.
RELEASE_MAPPING = {
'9.2': {
'rpc': '1.41',
'api': '1.35',
'objects': {
'Node': ['1.21'],
'Conductor': ['1.2'],
'Chassis': ['1.3'],
'Port': ['1.7'],
'Portgroup': ['1.3'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
'10.0': {
'api': '1.36',
'rpc': '1.42',
'objects': {
'Node': ['1.22'],
'Conductor': ['1.2'],
'Chassis': ['1.3'],
'Port': ['1.7'],
'Portgroup': ['1.3'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
'10.1': {
'api': '1.38',
'rpc': '1.44',
'objects': {
'Node': ['1.23'],
'Conductor': ['1.2'],
'Chassis': ['1.3'],
'Port': ['1.7'],
'Portgroup': ['1.3'],
'Trait': ['1.0'],
'TraitList': ['1.0'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
'11.0': {
'api': '1.43',
'rpc': '1.44',
'objects': {
'Node': ['1.25', '1.24'],
'Conductor': ['1.2'],
'Chassis': ['1.3'],
'Port': ['1.8'],
'Portgroup': ['1.4'],
'Trait': ['1.0'],
'TraitList': ['1.0'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
'11.1': {
'api': '1.46',
'rpc': '1.47',
'objects': {
'Node': ['1.27', '1.26'],
'Conductor': ['1.3'],
'Chassis': ['1.3'],
'Port': ['1.8'],
'Portgroup': ['1.4'],
'Trait': ['1.0'],
'TraitList': ['1.0'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
'12.0': {
'api': '1.49',
'rpc': '1.47',
'objects': {
'Node': ['1.29', '1.28'],
'Conductor': ['1.3'],
'Chassis': ['1.3'],
'Port': ['1.8'],
'Portgroup': ['1.4'],
'Trait': ['1.0'],
'TraitList': ['1.0'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
'12.1': {
'api': '1.56',
'rpc': '1.48',
'objects': {
'Allocation': ['1.0'],
'Node': ['1.32', '1.31', '1.30'],
'Conductor': ['1.3'],
'Chassis': ['1.3'],
'DeployTemplate': ['1.0', '1.1'],
'Port': ['1.9'],
'Portgroup': ['1.4'],
'Trait': ['1.0'],
'TraitList': ['1.0'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
'12.2': {
'api': '1.58',
'rpc': '1.48',
'objects': {
'Allocation': ['1.0'],
'Node': ['1.32'],
'Conductor': ['1.3'],
'Chassis': ['1.3'],
'DeployTemplate': ['1.1'],
'Port': ['1.9'],
'Portgroup': ['1.4'],
'Trait': ['1.0'],
'TraitList': ['1.0'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
'13.0': {
'api': '1.58',
'rpc': '1.48',
'objects': {
'Allocation': ['1.0'],
'Node': ['1.32'],
'Conductor': ['1.3'],
'Chassis': ['1.3'],
'DeployTemplate': ['1.1'],
'Port': ['1.9'],
'Portgroup': ['1.4'],
'Trait': ['1.0'],
'TraitList': ['1.0'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
'14.0': {
'api': '1.61',
'rpc': '1.48',
'objects': {
'Allocation': ['1.1'],
'Node': ['1.33', '1.32'],
'Conductor': ['1.3'],
'Chassis': ['1.3'],
'DeployTemplate': ['1.1'],
'Port': ['1.9'],
'Portgroup': ['1.4'],
'Trait': ['1.0'],
'TraitList': ['1.0'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
'15.0': {
'api': '1.65',
'rpc': '1.50',
'objects': {
'Allocation': ['1.1'],
'Node': ['1.34', '1.33', '1.32'],
'Conductor': ['1.3'],
'Chassis': ['1.3'],
'DeployTemplate': ['1.1'],
'Port': ['1.9'],
'Portgroup': ['1.4'],
'Trait': ['1.0'],
'TraitList': ['1.0'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
'15.1': {
'api': '1.67',
'rpc': '1.50',
'objects': {
'Allocation': ['1.1'],
'Node': ['1.35', '1.34'],
'Conductor': ['1.3'],
'Chassis': ['1.3'],
'DeployTemplate': ['1.1'],
'Port': ['1.9'],
'Portgroup': ['1.4'],
'Trait': ['1.0'],
'TraitList': ['1.0'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
'16.0': {
'api': '1.68',
'rpc': '1.51',
'objects': {
'Allocation': ['1.1'],
'Node': ['1.35'],
'Conductor': ['1.3'],
'Chassis': ['1.3'],
'Deployment': ['1.0'],
'DeployTemplate': ['1.1'],
'Port': ['1.9'],
'Portgroup': ['1.4'],
'Trait': ['1.0'],
'TraitList': ['1.0'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
'16.1': {
'api': '1.68',
'rpc': '1.51',
'objects': {
'Allocation': ['1.1'],
'Node': ['1.35'],
'Conductor': ['1.3'],
'Chassis': ['1.3'],
'Deployment': ['1.0'],
'DeployTemplate': ['1.1'],
'Port': ['1.9'],
'Portgroup': ['1.4'],
'Trait': ['1.0'],
'TraitList': ['1.0'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
'16.2': {
'api': '1.69',
'rpc': '1.52',
'objects': {
'Allocation': ['1.1'],
'Node': ['1.35'],
'Conductor': ['1.3'],
'Chassis': ['1.3'],
'Deployment': ['1.0'],
'DeployTemplate': ['1.1'],
'Port': ['1.10'],
'Portgroup': ['1.4'],
'Trait': ['1.0'],
'TraitList': ['1.0'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
'17.0': {
'api': '1.72',
'rpc': '1.54',
'objects': {
'Allocation': ['1.1'],
'Node': ['1.35'],
'Conductor': ['1.3'],
'Chassis': ['1.3'],
'Deployment': ['1.0'],
'DeployTemplate': ['1.1'],
'Port': ['1.10'],
'Portgroup': ['1.4'],
'Trait': ['1.0'],
'TraitList': ['1.0'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
'18.0': {
'api': '1.74',
'rpc': '1.54',
'objects': {
'Allocation': ['1.1'],
'BIOSSetting': ['1.1'],
'Node': ['1.35'],
'Conductor': ['1.3'],
'Chassis': ['1.3'],
'Deployment': ['1.0'],
'DeployTemplate': ['1.1'],
'Port': ['1.10'],
'Portgroup': ['1.4'],
'Trait': ['1.0'],
'TraitList': ['1.0'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
'master': {
'api': '1.74',
'rpc': '1.54',
'objects': {
'Allocation': ['1.1'],
'BIOSSetting': ['1.1'],
'Node': ['1.35'],
'Conductor': ['1.3'],
'Chassis': ['1.3'],
'Deployment': ['1.0'],
'DeployTemplate': ['1.1'],
'Port': ['1.10'],
'Portgroup': ['1.4'],
'Trait': ['1.0'],
'TraitList': ['1.0'],
'VolumeConnector': ['1.0'],
'VolumeTarget': ['1.0'],
}
},
}
# NOTE(xek): Assign each named release to the appropriate semver.
#
# Just before we do a new named release (more specifically, create
# a stable/<release> branch), add a mapping for the new named
# release. This is needed; otherwise CI: a unit test (common.
# ReleaseMappingsTestCase.test_contains_current_release_entry())
# and grenade that tests old/new (new-release -> master) will fail.
#
# Just after we do a new named release, delete the oldest named
# release (that we are no longer supporting for a rolling upgrade).
#
# There should be at most two named mappings here.
# NOTE(mgoddard): remove victoria prior to the xena release.
RELEASE_MAPPING['victoria'] = RELEASE_MAPPING['16.0']
RELEASE_MAPPING['wallaby'] = RELEASE_MAPPING['17.0']
# List of available versions with named versions first; 'master' is excluded.
RELEASE_VERSIONS = sorted(set(RELEASE_MAPPING) - {'master'}, reverse=True)
# List of available (version, description) tuples.
RELEASE_VERSIONS_DESCS = [(v, _('"%s" release') % v) for v in RELEASE_VERSIONS]
def get_object_versions(releases=None, objects=None):
"""Gets the supported versions for all objects.
Supported versions are from the RELEASE_MAPPINGs.
:param releases: a list of release names; if empty/None, versions from all
releases are returned (the default).
:param objects: a list of names of objects of interest. If empty/None,
versions of all objects are returned (the default).
:returns: a dictionary where the key is the object name and the value is
a set of supported versions.
"""
if not releases:
releases = list(RELEASE_MAPPING)
versions = {}
for release in releases:
object_mapping = RELEASE_MAPPING[release]['objects']
for obj, version_list in object_mapping.items():
if not objects or obj in objects:
versions.setdefault(obj, set()).update(version_list)
return versions
| apache-2.0 | -6,038,370,052,262,682,000 | 30.435208 | 79 | 0.442327 | false |
childresslab/MicrocavityExp1 | gui/manager/managergui.py | 1 | 25022 | # -*- coding: utf-8 -*-
""" This module contains a GUI through which the Manager core class can be controlled.
It can load and reload modules, show the configuration, and re-open closed windows.
Qudi is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Qudi is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Qudi. If not, see <http://www.gnu.org/licenses/>.
Copyright (c) the Qudi Developers. See the COPYRIGHT.txt file at the
top-level directory of this distribution and at <https://github.com/Ulm-IQO/qudi/>
"""
import core.logger
import logging
import numpy as np
import os
from collections import OrderedDict
from core.module import StatusVar
from .errordialog import ErrorDialog
from gui.guibase import GUIBase
from qtpy import QtCore, QtWidgets, uic
from qtpy.QtGui import QPalette
from qtpy.QtWidgets import QWidget
try:
from qtconsole.inprocess import QtInProcessKernelManager
except ImportError:
from IPython.qt.inprocess import QtInProcessKernelManager
try:
from git import Repo
except:
pass
try:
import pyqtgraph as pg
_has_pyqtgraph = True
except:
_has_pyqtgraph = False
# Rather than import the ui*.py file here, the ui*.ui file itself is
# loaded by uic.loadUI in the QtGui classes below.
class ManagerGui(GUIBase):
"""This class provides a GUI to the Qudi manager.
@signal sigStartAll: sent when all modules should be loaded
@signal str str sigStartThis: load a specific module
@signal str str sigReloadThis reload a specific module from Python code
@signal str str sigStopThis: stop all actions of a module and remove
references
It supports module loading, reloading, logging and other
administrative tasks.
"""
# status vars
consoleFontSize = StatusVar('console_font_size', 10)
# signals
sigStartAll = QtCore.Signal()
sigStartModule = QtCore.Signal(str, str)
sigReloadModule = QtCore.Signal(str, str)
sigCleanupStatus = QtCore.Signal(str, str)
sigStopModule = QtCore.Signal(str, str)
sigLoadConfig = QtCore.Signal(str, bool)
sigSaveConfig = QtCore.Signal(str)
sigRealQuit = QtCore.Signal()
def __init__(self, **kwargs):
"""Create an instance of the module.
@param object manager:
@param str name:
@param dict config:
"""
super().__init__(**kwargs)
self.modlist = list()
self.modules = set()
def on_activate(self):
""" Activation method called on change to active state.
This method creates the Manager main window.
"""
if _has_pyqtgraph:
# set background of pyqtgraph
testwidget = QWidget()
testwidget.ensurePolished()
bgcolor = testwidget.palette().color(QPalette.Normal,
testwidget.backgroundRole())
# set manually the background color in hex code according to our
# color scheme:
pg.setConfigOption('background', bgcolor)
# opengl usage
if 'useOpenGL' in self._manager.tree['global']:
pg.setConfigOption('useOpenGL',
self._manager.tree['global']['useOpenGL'])
self._mw = ManagerMainWindow()
self.restoreWindowPos(self._mw)
self.errorDialog = ErrorDialog(self)
self._about = AboutDialog()
version = self.getSoftwareVersion()
configFile = self._manager.configFile
self._about.label.setText(
'<a href=\"https://github.com/Ulm-IQO/qudi/commit/{0}\"'
' style=\"color: cyan;\"> {0} </a>, on branch {1}.'.format(
version[0], version[1]))
self.versionLabel = QtWidgets.QLabel()
self.versionLabel.setText(
'<a href=\"https://github.com/Ulm-IQO/qudi/commit/{0}\"'
' style=\"color: cyan;\"> {0} </a>,'
' on branch {1}, configured from {2}'.format(
version[0], version[1], configFile))
self.versionLabel.setOpenExternalLinks(True)
self._mw.statusBar().addWidget(self.versionLabel)
# Connect up the buttons.
self._mw.actionQuit.triggered.connect(self._manager.quit)
self._mw.actionLoad_configuration.triggered.connect(self.getLoadFile)
self._mw.actionReload_current_configuration.triggered.connect(self.reloadConfig)
self._mw.actionSave_configuration.triggered.connect(self.getSaveFile)
self._mw.action_Load_all_modules.triggered.connect(self._manager.startAllConfiguredModules)
self._mw.actionAbout_Qt.triggered.connect(QtWidgets.QApplication.aboutQt)
self._mw.actionAbout_Qudi.triggered.connect(self.showAboutQudi)
self._mw.actionReset_to_default_layout.triggered.connect(self.resetToDefaultLayout)
self._manager.sigShowManager.connect(self.show)
self._manager.sigConfigChanged.connect(self.updateConfigWidgets)
self._manager.sigModulesChanged.connect(self.updateConfigWidgets)
self._manager.sigShutdownAcknowledge.connect(self.promptForShutdown)
# Log widget
self._mw.logwidget.setManager(self._manager)
for loghandler in logging.getLogger().handlers:
if isinstance(loghandler, core.logger.QtLogHandler):
loghandler.sigLoggedMessage.connect(self.handleLogEntry)
# Module widgets
self.sigStartModule.connect(self._manager.startModule)
self.sigReloadModule.connect(self._manager.restartModuleRecursive)
self.sigCleanupStatus.connect(self._manager.removeStatusFile)
self.sigStopModule.connect(self._manager.deactivateModule)
self.sigLoadConfig.connect(self._manager.loadConfig)
self.sigSaveConfig.connect(self._manager.saveConfig)
self.sigRealQuit.connect(self._manager.realQuit)
# Module state display
self.checkTimer = QtCore.QTimer()
self.checkTimer.start(1000)
self.updateGUIModuleList()
# IPython console widget
self.startIPython()
self.updateIPythonModuleList()
self.startIPythonWidget()
# thread widget
self._mw.threadWidget.threadListView.setModel(self._manager.tm)
# remote widget
self._mw.remoteWidget.hostLabel.setText('URL:')
self._mw.remoteWidget.portLabel.setText(
'rpyc://{0}:{1}/'.format(self._manager.rm.host,
self._manager.rm.server.port))
self._mw.remoteWidget.remoteModuleListView.setModel(
self._manager.rm.remoteModules)
self._mw.remoteWidget.sharedModuleListView.setModel(
self._manager.rm.sharedModules)
self._mw.configDisplayDockWidget.hide()
self._mw.remoteDockWidget.hide()
self._mw.threadDockWidget.hide()
self._mw.show()
def on_deactivate(self):
"""Close window and remove connections.
"""
self.stopIPythonWidget()
self.stopIPython()
self.checkTimer.stop()
if len(self.modlist) > 0:
self.checkTimer.timeout.disconnect()
self.sigStartModule.disconnect()
self.sigReloadModule.disconnect()
self.sigStopModule.disconnect()
self.sigLoadConfig.disconnect()
self.sigSaveConfig.disconnect()
self._mw.actionQuit.triggered.disconnect()
self._mw.actionLoad_configuration.triggered.disconnect()
self._mw.actionSave_configuration.triggered.disconnect()
self._mw.action_Load_all_modules.triggered.disconnect()
self._mw.actionAbout_Qt.triggered.disconnect()
self._mw.actionAbout_Qudi.triggered.disconnect()
self.saveWindowPos(self._mw)
self._mw.close()
def show(self):
"""Show the window and bring it t the top.
"""
QtWidgets.QMainWindow.show(self._mw)
self._mw.activateWindow()
self._mw.raise_()
def showAboutQudi(self):
"""Show a dialog with details about Qudi.
"""
self._about.show()
@QtCore.Slot(bool, bool)
def promptForShutdown(self, locked, broken):
""" Display a dialog, asking the user to confirm shutdown. """
text = "Some modules are locked right now, really quit?"
result = QtWidgets.QMessageBox.question(
self._mw,
'Qudi: Really Quit?',
text,
QtWidgets.QMessageBox.Yes,
QtWidgets.QMessageBox.No
)
if result == QtWidgets.QMessageBox.Yes:
self.sigRealQuit.emit()
def resetToDefaultLayout(self):
""" Return the dockwidget layout and visibility to its default state """
self._mw.configDisplayDockWidget.setVisible(False)
self._mw.consoleDockWidget.setVisible(True)
self._mw.remoteDockWidget.setVisible(False)
self._mw.threadDockWidget.setVisible(False)
self._mw.logDockWidget.setVisible(True)
self._mw.actionConfigurationView.setChecked(False)
self._mw.actionConsoleView.setChecked(True)
self._mw.actionRemoteView.setChecked(False)
self._mw.actionThreadsView.setChecked(False)
self._mw.actionLogView.setChecked(True)
self._mw.configDisplayDockWidget.setFloating(False)
self._mw.consoleDockWidget.setFloating(False)
self._mw.remoteDockWidget.setFloating(False)
self._mw.threadDockWidget.setFloating(False)
self._mw.logDockWidget.setFloating(False)
self._mw.addDockWidget(QtCore.Qt.DockWidgetArea(8), self._mw.configDisplayDockWidget)
self._mw.addDockWidget(QtCore.Qt.DockWidgetArea(2), self._mw.consoleDockWidget)
self._mw.addDockWidget(QtCore.Qt.DockWidgetArea(8), self._mw.remoteDockWidget)
self._mw.addDockWidget(QtCore.Qt.DockWidgetArea(8), self._mw.threadDockWidget)
self._mw.addDockWidget(QtCore.Qt.DockWidgetArea(8), self._mw.logDockWidget)
def handleLogEntry(self, entry):
""" Forward log entry to log widget and show an error popup if it is
an error message.
@param dict entry: Log entry
"""
self._mw.logwidget.addEntry(entry)
if entry['level'] == 'error' or entry['level'] == 'critical':
self.errorDialog.show(entry)
def startIPython(self):
""" Create an IPython kernel manager and kernel.
Add modules to its namespace.
"""
# make sure we only log errors and above from ipython
logging.getLogger('ipykernel').setLevel(logging.WARNING)
self.log.debug('IPy activation in thread {0}'.format(
QtCore.QThread.currentThreadId()))
self.kernel_manager = QtInProcessKernelManager()
self.kernel_manager.start_kernel()
self.kernel = self.kernel_manager.kernel
self.namespace = self.kernel.shell.user_ns
self.namespace.update({
'np': np,
'config': self._manager.tree['defined'],
'manager': self._manager
})
if _has_pyqtgraph:
self.namespace['pg'] = pg
self.updateIPythonModuleList()
self.kernel.gui = 'qt4'
self.log.info('IPython has kernel {0}'.format(
self.kernel_manager.has_kernel))
self.log.info('IPython kernel alive {0}'.format(
self.kernel_manager.is_alive()))
self._manager.sigModulesChanged.connect(self.updateIPythonModuleList)
def startIPythonWidget(self):
""" Create an IPython console widget and connect it to an IPython
kernel.
"""
if (_has_pyqtgraph):
banner_modules = 'The numpy and pyqtgraph modules have already ' \
'been imported as ''np'' and ''pg''.'
else:
banner_modules = 'The numpy module has already been imported ' \
'as ''np''.'
banner = """
This is an interactive IPython console. {0}
Configuration is in 'config', the manager is 'manager' and all loaded modules are in this namespace with their configured name.
View the current namespace with dir().
Go, play.
""".format(banner_modules)
self._mw.consolewidget.banner = banner
# font size
self.consoleSetFontSize(self.consoleFontSize)
# settings
self._csd = ConsoleSettingsDialog()
self._csd.accepted.connect(self.consoleApplySettings)
self._csd.rejected.connect(self.consoleKeepSettings)
self._csd.buttonBox.button(
QtWidgets.QDialogButtonBox.Apply).clicked.connect(
self.consoleApplySettings)
self._mw.actionConsoleSettings.triggered.connect(self._csd.exec_)
self.consoleKeepSettings()
self._mw.consolewidget.kernel_manager = self.kernel_manager
self._mw.consolewidget.kernel_client = \
self._mw.consolewidget.kernel_manager.client()
self._mw.consolewidget.kernel_client.start_channels()
# the linux style theme which is basically the monokai theme
self._mw.consolewidget.set_default_style(colors='linux')
def stopIPython(self):
""" Stop the IPython kernel.
"""
self.log.debug('IPy deactivation: {0}'.format(QtCore.QThread.currentThreadId()))
self.kernel_manager.shutdown_kernel()
def stopIPythonWidget(self):
""" Disconnect the IPython widget from the kernel.
"""
self._mw.consolewidget.kernel_client.stop_channels()
def updateIPythonModuleList(self):
"""Remove non-existing modules from namespace,
add new modules to namespace, update reloaded modules
"""
currentModules = set()
newNamespace = dict()
for base in ['hardware', 'logic', 'gui']:
for module in self._manager.tree['loaded'][base]:
currentModules.add(module)
newNamespace[module] = self._manager.tree[
'loaded'][base][module]
discard = self.modules - currentModules
self.namespace.update(newNamespace)
for module in discard:
self.namespace.pop(module, None)
self.modules = currentModules
def consoleKeepSettings(self):
""" Write old values into config dialog.
"""
self._csd.fontSizeBox.setProperty('value', self.consoleFontSize)
def consoleApplySettings(self):
""" Apply values from config dialog to console.
"""
self.consoleSetFontSize(self._csd.fontSizeBox.value())
def consoleSetFontSize(self, fontsize):
self._mw.consolewidget.font_size = fontsize
self.consoleFontSize = fontsize
self._mw.consolewidget.reset_font()
def updateConfigWidgets(self):
""" Clear and refill the tree widget showing the configuration.
"""
self.fillTreeWidget(self._mw.treeWidget, self._manager.tree)
def updateGUIModuleList(self):
""" Clear and refill the module list widget
"""
# self.clearModuleList(self)
self.fillModuleList(self._mw.guilayout, 'gui')
self.fillModuleList(self._mw.logiclayout, 'logic')
self.fillModuleList(self._mw.hwlayout, 'hardware')
def fillModuleList(self, layout, base):
""" Fill the module list widget with module widgets for defined gui
modules.
@param QLayout layout: layout of th module list widget where
module widgest should be addad
@param str base: module category to fill
"""
for module in self._manager.tree['defined'][base]:
if not module in self._manager.tree['global']['startup']:
widget = ModuleListItem(self._manager, base, module)
self.modlist.append(widget)
layout.addWidget(widget)
widget.sigLoadThis.connect(self.sigStartModule)
widget.sigReloadThis.connect(self.sigReloadModule)
widget.sigDeactivateThis.connect(self.sigStopModule)
widget.sigCleanupStatus.connect(self.sigCleanupStatus)
self.checkTimer.timeout.connect(widget.checkModuleState)
def fillTreeItem(self, item, value):
""" Recursively fill a QTreeWidgeItem with the contents from a
dictionary.
@param QTreeWidgetItem item: the widget item to fill
@param (dict, list, etc) value: value to fill in
"""
item.setExpanded(True)
if type(value) is OrderedDict or type(value) is dict:
for key in value:
child = QtWidgets.QTreeWidgetItem()
child.setText(0, key)
item.addChild(child)
self.fillTreeItem(child, value[key])
elif type(value) is list:
for val in value:
child = QtWidgets.QTreeWidgetItem()
item.addChild(child)
if type(val) is dict:
child.setText(0, '[dict]')
self.fillTreeItem(child, val)
elif type(val) is OrderedDict:
child.setText(0, '[odict]')
self.fillTreeItem(child, val)
elif type(val) is list:
child.setText(0, '[list]')
self.fillTreeItem(child, val)
else:
child.setText(0, str(val))
child.setExpanded(True)
else:
child = QtWidgets.QTreeWidgetItem()
child.setText(0, str(value))
item.addChild(child)
def getSoftwareVersion(self):
""" Try to determine the software version in case the program is in
a git repository.
"""
try:
repo = Repo(self.get_main_dir())
branch = repo.active_branch
rev = str(repo.head.commit)
return (rev, str(branch))
except Exception as e:
print('Could not get git repo because:', e)
return ('unknown', -1)
def fillTreeWidget(self, widget, value):
""" Fill a QTreeWidget with the content of a dictionary
@param QTreeWidget widget: the tree widget to fill
@param dict,OrderedDict value: the dictionary to fill in
"""
widget.clear()
self.fillTreeItem(widget.invisibleRootItem(), value)
def reloadConfig(self):
""" Reload the current config. """
reply = QtWidgets.QMessageBox.question(
self._mw,
'Restart',
'Do you want to restart the current configuration?',
QtWidgets.QMessageBox.Yes,
QtWidgets.QMessageBox.No
)
configFile = self._manager._getConfigFile()
restart = (reply == QtWidgets.QMessageBox.Yes)
self.sigLoadConfig.emit(configFile, restart)
def getLoadFile(self):
""" Ask the user for a file where the configuration should be loaded
from
"""
defaultconfigpath = os.path.join(self.get_main_dir(), 'config')
filename = QtWidgets.QFileDialog.getOpenFileName(
self._mw,
'Load Configration',
defaultconfigpath,
'Configuration files (*.cfg)')[0]
if filename != '':
reply = QtWidgets.QMessageBox.question(
self._mw,
'Restart',
'Do you want to restart to use the configuration?',
QtWidgets.QMessageBox.Yes,
QtWidgets.QMessageBox.No
)
restart = (reply == QtWidgets.QMessageBox.Yes)
self.sigLoadConfig.emit(filename, restart)
def getSaveFile(self):
""" Ask the user for a file where the configuration should be saved
to.
"""
defaultconfigpath = os.path.join(self.get_main_dir(), 'config')
filename = QtWidgets.QFileDialog.getSaveFileName(
self._mw,
'Save Configration',
defaultconfigpath,
'Configuration files (*.cfg)')[0]
if filename != '':
self.sigSaveConfig.emit(filename)
class ManagerMainWindow(QtWidgets.QMainWindow):
""" This class represents the Manager Window.
"""
def __init__(self):
""" Create the Manager Window.
"""
# Get the path to the *.ui file
this_dir = os.path.dirname(__file__)
ui_file = os.path.join(this_dir, 'ui_manager_window.ui')
# Load it
super(ManagerMainWindow, self).__init__()
uic.loadUi(ui_file, self)
self.show()
# Set up the layout
# this really cannot be done in Qt designer, you cannot set a layout
# on an empty widget
self.guilayout = QtWidgets.QVBoxLayout(self.guiscroll)
self.logiclayout = QtWidgets.QVBoxLayout(self.logicscroll)
self.hwlayout = QtWidgets.QVBoxLayout(self.hwscroll)
class AboutDialog(QtWidgets.QDialog):
""" This class represents the Qudi About dialog.
"""
def __init__(self):
""" Create Qudi About Dialog.
"""
# Get the path to the *.ui file
this_dir = os.path.dirname(__file__)
ui_file = os.path.join(this_dir, 'ui_about.ui')
# Load it
super().__init__()
uic.loadUi(ui_file, self)
class ConsoleSettingsDialog(QtWidgets.QDialog):
""" Create the SettingsDialog window, based on the corresponding *.ui
file.
"""
def __init__(self):
# Get the path to the *.ui file
this_dir = os.path.dirname(__file__)
ui_file = os.path.join(this_dir, 'ui_console_settings.ui')
# Load it
super().__init__()
uic.loadUi(ui_file, self)
class ModuleListItem(QtWidgets.QFrame):
""" This class represents a module widget in the Qudi module list.
@signal str str sigLoadThis: gives signal with base and name of module
to be loaded
@signal str str sigReloadThis: gives signal with base and name of
module to be reloaded
@signal str str sigStopThis: gives signal with base and name of module
to be deactivated
"""
sigLoadThis = QtCore.Signal(str, str)
sigReloadThis = QtCore.Signal(str, str)
sigDeactivateThis = QtCore.Signal(str, str)
sigCleanupStatus = QtCore.Signal(str, str)
def __init__(self, manager, basename, modulename):
""" Create a module widget.
@param str basename: module category
@param str modulename: unique module name
"""
# Get the path to the *.ui file
this_dir = os.path.dirname(__file__)
ui_file = os.path.join(this_dir, 'ui_module_widget.ui')
# Load it
super().__init__()
uic.loadUi(ui_file, self)
self.manager = manager
self.name = modulename
self.base = basename
self.loadButton.setText('Load {0}'.format(self.name))
# connect buttons
self.loadButton.clicked.connect(self.loadButtonClicked)
self.reloadButton.clicked.connect(self.reloadButtonClicked)
self.deactivateButton.clicked.connect(self.deactivateButtonClicked)
self.cleanupButton.clicked.connect(self.cleanupButtonClicked)
def loadButtonClicked(self):
""" Send signal to load and activate this module.
"""
self.sigLoadThis.emit(self.base, self.name)
if self.base == 'gui':
self.loadButton.setText('Show {0}'.format(self.name))
def reloadButtonClicked(self):
""" Send signal to reload this module.
"""
self.sigReloadThis.emit(self.base, self.name)
def deactivateButtonClicked(self):
""" Send signal to deactivate this module.
"""
self.sigDeactivateThis.emit(self.base, self.name)
def cleanupButtonClicked(self):
""" Send signal to deactivate this module.
"""
self.sigCleanupStatus.emit(self.base, self.name)
def checkModuleState(self):
""" Get the state of this module and display it in the statusLabel
"""
state = ''
if self.statusLabel.text() != 'exception, cannot get state':
try:
if (self.base in self.manager.tree['loaded']
and self.name in self.manager.tree['loaded'][self.base]):
state = self.manager.tree['loaded'][self.base][self.name].getState()
else:
state = 'not loaded'
except:
state = 'exception, cannot get state'
self.statusLabel.setText(state)
| gpl-3.0 | -612,792,644,216,739,500 | 37.975078 | 127 | 0.622812 | false |
googleads/googleads-python-lib | examples/ad_manager/v202011/activity_group_service/get_active_activity_groups.py | 1 | 1957 | #!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all active activity groups.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
def main(client):
# Initialize appropriate service.
activity_group_service = client.GetService(
'ActivityGroupService', version='v202011')
# Create a statement to select activity groups.
statement = (ad_manager.StatementBuilder(version='v202011')
.Where('status = :status')
.WithBindVariable('status', 'ACTIVE'))
# Retrieve a small amount of activity groups at a time, paging
# through until all activity groups have been retrieved.
while True:
response = activity_group_service.getActivityGroupsByStatement(
statement.ToStatement())
if 'results' in response and len(response['results']):
for activity_group in response['results']:
# Print out some information for each activity group.
print('Activity group with ID "%d" and name "%s" was found.\n' %
(activity_group['id'], activity_group['name']))
statement.offset += statement.limit
else:
break
print('\nNumber of results found: %s' % response['totalResultSetSize'])
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client)
| apache-2.0 | 721,227,215,421,250,300 | 36.634615 | 74 | 0.709249 | false |
macioosch/dynamo-hard-spheres-sim | convergence-plot.py | 1 | 6346 | #!/usr/bin/env python2
# encoding=utf-8
from __future__ import division, print_function
from glob import glob
from itertools import izip
from matplotlib import pyplot as plt
import numpy as np
input_files = glob("csv/convergence-256000-0.*.csv")
#input_files = glob("csv/convergence-500000-0.*.csv")
#input_files = glob("csv/convergence-1000188-0.*.csv")
#plotted_parameter = "msds_diffusion"
plotted_parameter = "pressures_collision"
#plotted_parameter = "pressures_virial"
#plotted_parameter = "msds_val"
#plotted_parameter = "times"
legend_names = []
tight_layout = False
show_legend = False
for file_number, file_name in enumerate(sorted(input_files)):
data = np.genfromtxt(file_name, delimiter='\t', names=[
"packings","densities","collisions","n_atoms","pressures_virial",
"pressures_collision","msds_val","msds_diffusion","times",
"std_pressures_virial","std_pressures_collision","std_msds_val",
"std_msds_diffusion","std_times"])
n_atoms = data["n_atoms"][0]
density = data["densities"][0]
equilibrated_collisions = data["collisions"] - 2*data["collisions"][0] \
+ data["collisions"][1]
"""
### 5 graphs: D(CPS) ###
tight_layout = True
skip_points = 0
ax = plt.subplot(3, 2, file_number+1)
plt.fill_between((equilibrated_collisions / n_atoms)[skip_points:],
data[plotted_parameter][skip_points:]
- data["std_" + plotted_parameter][skip_points:],
data[plotted_parameter][skip_points:]
+ data["std_" + plotted_parameter][skip_points:], alpha=0.3)
plt.plot((equilibrated_collisions / n_atoms)[skip_points:],
data[plotted_parameter][skip_points:], lw=2)
if plotted_parameter == "msds_diffusion":
plt.ylim(0.990*data[plotted_parameter][-1],
1.005*data[plotted_parameter][-1])
plt.xlim([0, 1e5])
plt.legend(["Density {}".format(data["densities"][0])], loc="lower right")
ax.yaxis.set_major_formatter(plt.FormatStrFormatter('%.4f'))
plt.xlabel("Collisions per sphere")
plt.ylabel("D")
"""
### 5 graphs: relative D(CPS) ###
tight_layout = True
skip_points = 0
ax = plt.subplot(3, 2, file_number+1)
plt.fill_between((equilibrated_collisions / n_atoms)[skip_points:],
-1 + (data[plotted_parameter][skip_points:]
- data["std_" + plotted_parameter][skip_points:])/data[plotted_parameter][-1],
-1 + (data[plotted_parameter][skip_points:]
+ data["std_" + plotted_parameter][skip_points:])/data[plotted_parameter][-1], alpha=0.3)
plt.plot((equilibrated_collisions / n_atoms)[skip_points:],
-1 + data[plotted_parameter][skip_points:]/data[plotted_parameter][-1], lw=2)
plt.ylim(data["std_" + plotted_parameter][-1]*20*np.array([-1, 1])/data[plotted_parameter][-1])
#plt.xscale("log")
plt.xlim([0, 1e5])
plt.legend(["$\\rho\\sigma^3=\\ {}$".format(data["densities"][0])], loc="lower right")
ax.yaxis.set_major_formatter(plt.FormatStrFormatter('%.2e'))
plt.xlabel("$C/N$")
plt.ylabel("$[Z_{MD}(C) / Z_{MD}(C=10^5 N)] - 1$")
"""
### 1 graph: D(t) ###
show_legend = True
skip_points = 0
plt.title("D(t) for 5 densities")
plt.loglog(data["times"][skip_points:],
data[plotted_parameter][skip_points:])
legend_names.append(data["densities"][0])
plt.xlabel("Time")
plt.ylabel("D")
"""
"""
### 1 graph: D(t) / Dinf ###
show_legend = True
skip_points = 0
#plt.fill_between(data["times"][skip_points:],
# (data[plotted_parameter] - data["std_" + plotted_parameter])
# / data[plotted_parameter][-1] - 1,
# (data[plotted_parameter] + data["std_" + plotted_parameter])
# / data[plotted_parameter][-1] - 1, color="grey", alpha=0.4)
plt.plot(data["times"][skip_points:],
data[plotted_parameter] / data[plotted_parameter][-1] - 1, lw=1)
legend_names.append(data["densities"][0])
#plt.xscale("log")
plt.xlabel("Time")
plt.ylabel("D / D(t --> inf)")
"""
"""
### 5 graphs: D(1/CPS) ###
tight_layout = True
skip_points = 40
ax = plt.subplot(3, 2, file_number+1)
plt.fill_between((n_atoms / equilibrated_collisions)[skip_points:],
data[plotted_parameter][skip_points:]
- data["std_" + plotted_parameter][skip_points:],
data[plotted_parameter][skip_points:]
+ data["std_" + plotted_parameter][skip_points:], alpha=0.3)
plt.plot((n_atoms / equilibrated_collisions)[skip_points:],
data[plotted_parameter][skip_points:], lw=2)
plt.title("Density {}:".format(data["densities"][0]))
ax.yaxis.set_major_formatter(plt.FormatStrFormatter('%.7f'))
plt.xlim(xmin=0)
plt.xlabel("1 / Collisions per sphere")
plt.ylabel("D")
"""
"""
### 1 graph: D(CPS) / Dinf ###
show_legend = True
plt.fill_between(equilibrated_collisions / n_atoms,
(data[plotted_parameter] - data["std_" + plotted_parameter])
/ data[plotted_parameter][-1] - 1,
(data[plotted_parameter] + data["std_" + plotted_parameter])
/ data[plotted_parameter][-1] - 1, color="grey", alpha=0.4)
plt.plot(equilibrated_collisions / n_atoms,
data[plotted_parameter] / data[plotted_parameter][-1] - 1, lw=2)
legend_names.append(data["densities"][0])
plt.xlabel("Collisions per sphere")
plt.ylabel("D / D(t --> inf)")
"""
"""
### 1 graph: D(1/CPS) / Dinf ###
show_legend = True
plt.fill_between(n_atoms / equilibrated_collisions,
(data[plotted_parameter] - data["std_" + plotted_parameter])
/ data[plotted_parameter][-1] - 1,
(data[plotted_parameter] + data["std_" + plotted_parameter])
/ data[plotted_parameter][-1] - 1, color="grey", alpha=0.4)
plt.plot( n_atoms / equilibrated_collisions,
data[plotted_parameter] / data[plotted_parameter][-1] - 1)
legend_names.append(data["densities"][0])
plt.xlabel(" 1 / Collisions per sphere")
plt.ylabel(plotted_parameter)
"""
#if tight_layout:
# plt.tight_layout(pad=0.0, w_pad=0.0, h_pad=0.0)
if show_legend:
plt.legend(legend_names, title="Density:", loc="lower right")
plt.show()
| gpl-3.0 | 1,205,206,185,801,680,600 | 39.941935 | 101 | 0.601954 | false |
amdouglas/OpenPNM | OpenPNM/Geometry/models/throat_misc.py | 1 | 1124 | r"""
===============================================================================
throat_misc -- Miscillaneous and generic functions to apply to throats
===============================================================================
"""
import scipy as _sp
def random(geometry, seed=None, num_range=[0, 1], **kwargs):
r"""
Assign random number to throats
note: should this be called 'poisson'?
"""
range_size = num_range[1] - num_range[0]
range_min = num_range[0]
_sp.random.seed(seed=seed)
value = _sp.random.rand(geometry.num_throats(),)
value = value*range_size + range_min
return value
def neighbor(geometry, network, pore_prop='pore.seed', mode='min', **kwargs):
r"""
Adopt a value based on the neighboring pores
"""
throats = network.throats(geometry.name)
P12 = network.find_connected_pores(throats)
pvalues = network[pore_prop][P12]
if mode == 'min':
value = _sp.amin(pvalues, axis=1)
if mode == 'max':
value = _sp.amax(pvalues, axis=1)
if mode == 'mean':
value = _sp.mean(pvalues, axis=1)
return value
| mit | 7,511,632,487,340,780,000 | 30.222222 | 79 | 0.536477 | false |
Digmaster/TicTacToe | Agent.py | 1 | 2030 | from random import randint
from random import getrandbits
from copy import deepcopy
# Agent that will either be the human player or a secondary agent for the dual agent play
class DumbAgent:
#initialize the board for the first player
def __init__(self, board):
self.board = board
def __str__(self):
return "Hi, Im dumb agent. I play randomly as player {0}".format(self.player)
# readin the next move for the human or secondary agent
def getNextMove(self, player):
board = deepcopy(self.board)
if(player!='X' and player!='O'):
raise ValueError('The only valid players are X and O')
while(True):
try:
square = randint(1, 9)
board.setSquare(square, player)
return square
except ValueError:
"""Do nothing"""
# Define the smart agent - uses the minimax algorithm
class SmartAgent:
def __init__(self, board):
self.board = board
self.signal = False
self.bestVal = None
def __str__(self):
return "Hi, Im smart agent. I whatever move will net me the most points, or avail my enemy of points. I'm {0}".format(self.player)
# to get the next move,call the decideMove function
def getNextMove(self, player):
self.decideMove(deepcopy(self.board), player)
return self.bestVal
def decideMove(self, board, player):
if(self.signal):
return 0
winner = board.testWin() # test for a winning solution to the current state
if(winner!='.'):
if(winner=='X'):
return 1.0
elif(winner=='T'):
return 0.0
else:
return -1.0
values = []
moves = {}
for i in range(1,10):
if(self.signal):
return 0
if(board.getSquare(i)=='.'):
nBoard = deepcopy(board)
nBoard.setSquare(i, player)
value = self.decideMove(nBoard, 'X' if player=='O' else 'O')
values.append(value)
moves[value] = i
if(player=='X'and value==1):
break
elif(player=='O' and value==-1):
break
# calculate the highest probability / best move
if(player=='X'):
sum = max(values)
else:
sum = min(values)
self.bestVal = moves[sum]
return sum
| apache-2.0 | 287,197,937,694,822,820 | 25.363636 | 132 | 0.666995 | false |
mfit/PdfTableAnnotator | script/csv-compare.py | 1 | 8051 | """
Copyright 2014 Matthias Frey
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
CSV-compare
-----------
Compare table data stored in CSV (comma seperated values) format.
"""
import re
import csv
import sys
import os
def _pr_list(l1, l2, replace_chars = '[\n ]'):
""" Calculate precision and recall regarding elements of a list.
When a 1:1 match cannot be achieved, the list pointers will be
moved forward until a match occurs (first of list A, then of list B).
The closest match will count, and matching will continue from those
list positions onwards.
The replace_chars parameter is used to remove characters from the
strings before comparing. The default will remove newlines and spaces.
"""
def _fnext(l, item):
item = re.sub(replace_chars, '', item).strip()
for i, txt in enumerate(l):
txt = re.sub(replace_chars, '', txt).strip()
if txt == item:
return i
return -1
if len(l2)==0 or len(l1)==0:
return 0, 0
i = 0
j = 0
match = 0
while len(l1)>i and len(l2)>j:
t1 = re.sub(replace_chars, '', l1[i]).strip()
t2 = re.sub(replace_chars, '', l2[j]).strip()
if t1 == t2:
match += 1
i += 1
j += 1
else:
ii = _fnext(l1[i:], l2[j])
jj = _fnext(l2[j:], l1[i])
if ii>=0 and (ii<jj or jj<0): i+=ii
elif jj>=0: j+=jj
else:
i+=1
j+=1
return float(match)/len(l2), float(match)/len(l1)
def clean_table(tab):
""" Remove trailing empty cells resulting from the way some
spreadsheet application output csv for multi table documents.
"""
if len(tab) == 0:
return []
n_empty=[]
for row in tab:
for n, val in enumerate(reversed(row)):
if val!='':
break
n_empty.append(n)
strip_cols = min(n_empty)
cleaned = []
for row in tab:
cleaned.append(row[0:len(row)-strip_cols])
return cleaned
def compare_tables(tab1, tab2):
""" Compare two tables (2dim lists).
"""
info = {'rows_a':len(tab1),
'rows_b':len(tab2),
'rows_match': 1 if len(tab1) == len(tab2) else 0,
}
sizesA = [len(l) for l in tab1]
sizesB = [len(l) for l in tab2]
info['dim_match'] = 1 if sizesA == sizesB else 0
info['size_a'] = sum(sizesA)
info['size_b'] = sum(sizesA)
if len(sizesA)>0 and len(sizesB)>0:
info['cols_match'] = 1 if min(sizesA) == max(sizesA) and \
min(sizesB) == max(sizesB) and min(sizesA) == min(sizesB) else 0
# 'flatten' tables
cellsA = []
cellsB = []
for r in tab1: cellsA += [c for c in r]
for r in tab2: cellsB += [c for c in r]
info['p'], info['r'] = _pr_list(cellsA, cellsB)
info['F1'] = F1(info['p'], info['r'])
return info
def compare_files_pr(file1, file2):
""" Calculate simple P/R .
Compare lists of cells, left to right , top to bottom.
"""
cells = [[], []]
for i, fname in enumerate([file1, file2]):
with file(fname) as csvfile:
rd = csv.reader(csvfile, delimiter=',', quotechar='"')
for r in rd:
cells[i] += [c for c in r]
return _pr_list(*cells)
def compare_files(file1, file2):
""" Compare two csv files.
"""
groundtruth = read_tables_from_file(file1)
try:
compare = read_tables_from_file(file2)
except:
compare = []
tbs = [groundtruth, compare]
finfo = {'tabcount_a': len(tbs[0]),
'tabcount_b': len(tbs[1]),
'tabcount_match': len(tbs[0]) == len(tbs[1]),
}
finfo['tables']=[]
for n in range(0, len(tbs[0])):
if finfo['tabcount_match']:
comp_info = compare_tables(tbs[0][n], tbs[1][n])
else:
if n < len(tbs[1]):
comp_info = compare_tables(tbs[0][n], tbs[1][n])
else:
comp_info = compare_tables(tbs[0][n], [[]])
comp_info['n']=n
finfo['tables'].append(comp_info)
return finfo
def output_compareinfo_csv(file, info, fields=['p', 'r', 'F1']):
""" Pre-format a row that holds measures about similarity of a table
to the ground truth.
"""
lines = []
tabmatch = 1 if info['tabcount_match'] else 0
for tinfo in info['tables']:
lines.append([file, str(tabmatch)] + [str(tinfo[k]) for k in fields])
return lines
def F1(p, r):
""" Calculate F1 score from precision and recall.
Returns zero if one of p, r is zero.
"""
return (2*p*r/(p+r)) if p != 0 and r != 0 else 0
def read_tables_from_file(csvfile):
""" Opens csvfile, returns all tables found.
Guesses csv format (delimiter, etc.)
Splits data into different tables at newline (or empty row).
Returns list of tables.
"""
tables=[]
table_id = 0
with file(csvfile) as f:
sniffer = csv.Sniffer()
dialect = sniffer.sniff(f.next())
rd = csv.reader(f, delimiter=dialect.delimiter,
quotechar=dialect.quotechar)
for r in rd:
if len(tables) <= table_id:
tables.append([])
# Begin next table if there is an empty line
if r == [] or sum([len(v) for v in r]) == 0:
if len(tables[table_id])>0:
table_id+=1
else:
tables[table_id].append(r)
return [clean_table(t) for t in tables if t!=[]]
if __name__ == '__main__':
""" Script usage.
"""
fields = [
#'rows_a', 'rows_b',
#'size_a', 'size_b',
'n',
'rows_match', 'cols_match', 'dim_match',
'p', 'r', 'F1',]
limitchar = ' & '
if len(sys.argv) < 3:
print "Specify two (csv-)files or directories"
quit(-1)
# Params 1 + 2 are files or directories
file1 = sys.argv[1]
file2 = sys.argv[2]
srcinfo = [os.path.basename(file1), os.path.basename(file2)]
# 3rd parameter becomes 'tooldef' (text cols to name rows),
# and 4th parameter tells whether to print headers
tooldef = sys.argv[3].split('-') if len(sys.argv) > 3 else ['na', 'na']
print_headers = len(sys.argv) > 4 and sys.argv[4] in ["1", "y", "yes"]
if print_headers:
print ','.join(['name', 'tool', 'src1', 'src2',
'filename', 'tabsmatch',] + fields)
if os.path.isfile(file1) and os.path.isfile(file2):
inf = compare_files(file1, file2)
lines = output_compareinfo_csv(file1, inf, fields)
for l in lines:
print ','.join(tooldef + srcinfo + l)
elif os.path.isdir(file1) and os.path.isdir(file2):
for f in [path for path in os.listdir(file1) if path[-4:]=='.csv']:
if os.path.isfile(file2 + '/' + f):
inf = compare_files(file1 + '/' + f, file2 + '/' + f)
lines = output_compareinfo_csv(f, inf, fields)
for l in lines:
print ','.join(tooldef + srcinfo + l)
else:
print ','.join(['','',] + srcinfo + ['', "Missing {} for {} {}".format(f, *tooldef)]) | apache-2.0 | -7,229,538,163,487,513,000 | 29.044776 | 101 | 0.527264 | false |
3dfxmadscientist/odoo-infrastructure | addons/infrastructure/hostname.py | 1 | 1468 | # -*- coding: utf-8 -*-
##############################################################################
#
# Infrastructure
# Copyright (C) 2014 Ingenieria ADHOC
# No email
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import re
from openerp import netsvc
from openerp.osv import osv, fields
class hostname(osv.osv):
""""""
_name = 'infrastructure.hostname'
_description = 'hostname'
_columns = {
'name': fields.char(string='name', required=True),
'server_id': fields.many2one('infrastructure.server', string='Server', ondelete='cascade', required=True),
}
_defaults = {
}
_constraints = [
]
hostname()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 2,010,866,711,856,777,500 | 27.784314 | 115 | 0.608311 | false |
locke105/mclib | examples/wsgi.py | 1 | 1781 |
import cgi
import json
from wsgiref import simple_server
import falcon
from mclib import mc_info
class MCInfo(object):
def on_get(self, req, resp):
host = req.get_param('host', required=True)
port = req.get_param_as_int('port', min=1024,
max=65565)
try:
if port is not None:
info = mc_info.get_info(host=host,
port=port)
else:
info = mc_info.get_info(host=host)
except Exception:
raise Exception('Couldn\'t retrieve info.')
if '.json' in req.uri:
resp.body = self.get_json(info)
return
preferred = req.client_prefers(['application/json', 'text/html'])
if 'html' in preferred:
resp.content_type = 'text/html'
resp.body = self.get_html(info)
else:
resp.body = self.get_json(info)
def get_html(self, info):
html = """<body>
<style>
table,th,td
{
border:1px solid black;
border-collapse:collapse
}
th,td
{
padding: 5px
}
</style>
<table>
"""
for k,v in info.iteritems():
items = {'key': cgi.escape(k)}
if isinstance(v, basestring):
items['val'] = cgi.escape(v)
else:
items['val'] = v
html = html + '<tr><td>%(key)s</td><td>%(val)s</td></tr>' % items
html = html + '</table></body>'
return html
def get_json(self, info):
return json.dumps(info)
app = falcon.API()
mcinfo = MCInfo()
app.add_route('/mcinfo', mcinfo)
app.add_route('/mcinfo.json', mcinfo)
if __name__ == '__main__':
httpd = simple_server.make_server('0.0.0.0', 3000, app)
httpd.serve_forever()
| apache-2.0 | 4,936,456,139,620,774,000 | 21.2625 | 77 | 0.521617 | false |
Meertecha/LearnPythonTheGame | pyGameEngine.py | 1 | 3565 | ### Imports
import pickle, os, platform, random
### Functions
def main():
curPlayer = loadPlayer( 'Tory' )
curGame = loadGame( 'Python_Tutorial' )
startGame(curPlayer, curGame)
def banner():
'''
if platform.system() == "Windows":
clearCmd = "cls"
elif platform.system() == "Linux":
clearCmd = "clear"
else:
print ("Unknown operating system detected. Some operations may not perform correctly!\n")
os.system(clearCmd)
'''
version = 0.1
banner = (" **Welcome to the Python Learning Environment\n\
**Written by Tory Clasen - Version: " + str(version) + " \n\
**For help at any time please type '?' or 'help' \n\
**To exit the program type 'exit' or 'quit' \n\n")
print banner
def startGame(curPlayer, curGame):
try:
curScore = curPlayer['score'][curGame['gameName']]
except:
curScore = 0
while True:
#banner()
print '----------------------------------------\n' + curGame['gameName'] + ' has been loaded'
print curGame['banner'] + '\n----------------------------------------'
try:
pickle.dump( curPlayer, open( ( str(curPlayer['Name']) + ".plep"), "wb" ) )
except:
print "Error! Unable to save player profile at current location!"
print 'Your current score is: ' + str(curScore) + ' out of a total possible score of: ' + str(len(curGame['gameData']))
print "Question " + str(curScore) + ": \n" + str(curGame['gameData'][curScore]["Q"]) + "\n"
temp = curGame['gameData'][curScore]["D"]
data = eval(str(curGame['gameData'][curScore]["D"]))
print "Data " + str(curScore) + ": \n" + data
print '----------------------------------------\n'
try:
myAnswer = eval(str(getInput('What command do you want to submit? ')))
if myAnswer == (eval(str(curGame['gameData'][curScore]["A"]))):
print "Correct!"
curScore = curScore + 1
else:
print "Incorrect!"
except:
print 'The answer you submitted crashed the program, so it was probably wrong'
#break
def getInput(prompt):
theInput = raw_input( str(prompt) + "\n" )
if theInput == '?' or theInput.lower() == 'help':
print "HELP! HELP!"
elif theInput.lower() == 'exit' or theInput.lower() == 'quit':
raise SystemExit
else:
return theInput
def loadPlayer(playerName = ''):
#banner()
curPlayer = {}
if playerName == '':
playerName = getInput("I would like to load your profile. \nWhat is your name? ")
try:
# Attempt to load the player file.
curPlayer = pickle.load( open( ( str(playerName) + ".plep"), "rb" ) )
print "Player profile found... loading player data..."
except:
# Ask the player if they want to try to create a new profile file.
createNew = getInput( "Player profile not found for '" + str(playerName) + "'\nWould you like to create a new one? [Y/N]").lower()
curPlayer = {'Name':playerName}
if createNew == "y":
try:
pickle.dump( curPlayer, open( ( str(playerName) + ".plep"), "wb" ) )
print "Player profile successfully created!"
except:
print "Error! Unable to create player profile at current location!"
else:
print "Progress will not be saved for you..."
return curPlayer
def loadGame(gameName = ''):
banner()
curGame = {}
while True:
if gameName == '':
gameName = getInput("What game would you like to load? ")
try:
# Attempt to load the player file.
curGame = pickle.load( open( ( str(gameName) + ".pleg"), "rb" ) )
print "Game module found... loading game data..."
gameName = ''
break
except:
gameName = ''
print "Game module not found... please try again..."
return curGame
main()
| mit | 4,891,151,655,040,956,000 | 31.409091 | 133 | 0.615708 | false |