repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
YeoLab/gscripts | tests/test_sailfish_index.py | 1 | 1318 | __author__ = 'olga'
import unittest
from gscripts.rnaseq.sailfish_index import SailfishIndex
import tests
import os
import shutil
import sys
class Test(unittest.TestCase):
out_dir = 'test_output'
def setUp(self):
os.mkdir(self.out_dir)
def tearDown(self):
shutil.rmtree(self.out_dir)
def test_sailfish_index(self):
job_name = 'sailfish_index'
out_sh = '{}/{}.sh'.format(self.out_dir, job_name)
SailfishIndex(fasta='data/test.fasta', kmer_size=31,
job_name=job_name, out_sh=out_sh, submit=False)
true_result = """#!/bin/bash
#PBS -N sailfish_index
#PBS -o test_output/sailfish_index.sh.out
#PBS -e test_output/sailfish_index.sh.err
#PBS -V
#PBS -l walltime=0:30:00
#PBS -l nodes=1:ppn=8
#PBS -A yeo-group
#PBS -q home
# Go to the directory from which the script was called
cd $PBS_O_WORKDIR
sailfish index --kmerSize 31 --threads 8 --transcripts data/test.fasta --out data/test.fasta_sailfish_index_k31
"""
true_result = true_result.split('\n')
# with open(out_sh) as f:
# for line in f:
# print line,
for true, test in zip(true_result, open(out_sh)):
self.assertEqual(true.strip().split(), test.strip().split())
if __name__ == "__main__":
unittest.main() | mit | 2,624,321,952,649,796,600 | 25.918367 | 111 | 0.628225 | false |
kronicz/ecommerce-2 | bin/player.py | 1 | 2211 | #!/Users/michelkrohn-dale/Desktop/ecommerce-2/bin/python
#
# The Python Imaging Library
# $Id$
#
from __future__ import print_function
try:
from tkinter import *
except ImportError:
from Tkinter import *
from PIL import Image, ImageTk
import sys
# --------------------------------------------------------------------
# an image animation player
class UI(Label):
def __init__(self, master, im):
if isinstance(im, list):
# list of images
self.im = im[1:]
im = self.im[0]
else:
# sequence
self.im = im
if im.mode == "1":
self.image = ImageTk.BitmapImage(im, foreground="white")
else:
self.image = ImageTk.PhotoImage(im)
Label.__init__(self, master, image=self.image, bg="black", bd=0)
self.update()
try:
duration = im.info["duration"]
except KeyError:
duration = 100
self.after(duration, self.next)
def next(self):
if isinstance(self.im, list):
try:
im = self.im[0]
del self.im[0]
self.image.paste(im)
except IndexError:
return # end of list
else:
try:
im = self.im
im.seek(im.tell() + 1)
self.image.paste(im)
except EOFError:
return # end of file
try:
duration = im.info["duration"]
except KeyError:
duration = 100
self.after(duration, self.next)
self.update_idletasks()
# --------------------------------------------------------------------
# script interface
if __name__ == "__main__":
if not sys.argv[1:]:
print("Syntax: python player.py imagefile(s)")
sys.exit(1)
filename = sys.argv[1]
root = Tk()
root.title(filename)
if len(sys.argv) > 2:
# list of images
print("loading...")
im = []
for filename in sys.argv[1:]:
im.append(Image.open(filename))
else:
# sequence
im = Image.open(filename)
UI(root, im).pack()
root.mainloop()
| mit | -4,403,060,269,132,047,400 | 20.676471 | 72 | 0.475351 | false |
yaukwankiu/armor | dataStreamTools/cwbFileNames.py | 1 | 2022 | """
to fix the file names for the data from cwb
201403111800f006_M15.txt -> WRF15.20140312.0000.txt
"""
#from armor import pattern
#import shutil
import os
import datetime, time
from armor import defaultParameters as dp
root = dp.defaultRootFolder
obsFolder = root + "data/march2014/QPESUMS/"
wrfsFolder1 = root + "data/march2014/WEPS/20140311/"
wrfsFolder2 = root + "data/march2014/WEPS/20140312/"
wrfsFolder3 = root + "data/march2014/WEPS/20140313/"
kongreywrf = root + "data/KONG-REY/WRFEPS/"
may19 = root + "data/may14/WRFEPS19[regridded]/"
may20 = root + "data/may14/WRFEPS20[regridded]/"
may21 = root + "data/may14/WRFEPS21[regridded]/"
may22 = root + "data/may14/WRFEPS22[regridded]/"
may23 = root + "data/may14/WRFEPS23[regridded]/"
folderList = [ may21, may22, may23] #<-- change here
#folderList=[may20]
count = 0
#for folder in [wrfsFolder1, wrfsFolder2, wrfsFolder3]:
for folder in folderList:
print "Folder", folder
os.makedirs(folder+"a/")
#time.sleep(2)
L = os.listdir(folder)
L = [v for v in L if v.endswith(".txt") and not v.startswith("WRF")]
L.sort()
for f1 in L:
count +=1
print count, f1,
path1 = folder + f1
year = int(f1[0:4])
month = int(f1[4:6])
day = int(f1[6:8])
hour = int(f1[8:10])
minute = int(f1[10:12])
hourDiff= int(f1[13:16])
modelNo = f1[18:20]
suffix = f1[20:]
T = datetime.datetime(year, month, day, hour, minute) + datetime.timedelta(hourDiff*1./24)
year2 = str(T.year)
month2 = ("0"+str(T.month))[-2:]
day2 = ("0"+str(T.day))[-2:]
hour2 = ("0"+str(T.hour))[-2:]
minute2 = ("0"+str(T.minute))[-2:]
f2 = "WRF" + modelNo + "." + year2 + month2 + day2 + "." + hour2 + minute2 + suffix
print "->", f2
try:
os.rename(folder+f1, folder+"a/"+f2)
except:
print f1, "not found!!"
| cc0-1.0 | 550,889,382,923,040,960 | 33.271186 | 104 | 0.577151 | false |
eSmelser/SnookR | SnookR/api/views.py | 1 | 3749 | import hashlib
from rest_framework.generics import ListCreateAPIView, UpdateAPIView, ListAPIView, RetrieveAPIView, CreateAPIView
from django.core.cache import caches
from rest_framework.response import Response
from substitutes.models import Session, SessionEvent, Sub
from accounts.models import CustomUser
from teams.models import Team, NonUserPlayer
from invites.models import SessionEventInvite, TeamInvite
from api.serializers import (
TeamInviteSerializer,
TeamInviteUpdateSerializer,
TeamSerializer,
CustomUserSerializer,
NonUserPlayerSerializer,
SessionSerializer,
SessionEventSerializer,
SubSerializer,
SessionEventInviteSerializer,
)
from api.permissions import TeamPermission, TeamInvitePermission
from api.filters import TeamFilter, TeamInviteFilter, UserFilter, SessionFilter, SessionEventFilter, SubFilter, \
SessionEventInviteFilter
class UserView(RetrieveAPIView):
serializer_class = CustomUserSerializer
queryset = CustomUser.objects.all()
def get_object(self):
return CustomUser.objects.get(username=self.request.user.username)
class UserSearchView(ListAPIView):
serializer_class = CustomUserSerializer
queryset = CustomUser.objects.all()
def list(self, request, *args, **kwargs):
import pdb;
pdb.set_trace()
class UserListView(ListAPIView):
serializer_class = CustomUserSerializer
queryset = CustomUser.objects.all()
filter_class = UserFilter
filter_fields = ('username', 'id', 'first_name', 'last_name')
class TeamView(ListCreateAPIView):
queryset = Team.objects.all()
serializer_class = TeamSerializer
permission_classes = (TeamPermission,)
filter_class = TeamFilter
filter_fields = ('id', 'name')
class TeamInviteListView(ListCreateAPIView):
queryset = TeamInvite.objects.all()
serializer_class = TeamInviteSerializer
permission_classes = (TeamInvitePermission,)
filter_class = TeamInviteFilter
filter_fields = ('invitee', 'team', 'status', 'id')
class TeamInviteUpdateView(UpdateAPIView):
queryset = TeamInvite.objects.all()
serializer_class = TeamInviteUpdateSerializer
class NonUserPlayerListCreateView(ListCreateAPIView):
queryset = NonUserPlayer.objects.all()
serializer_class = NonUserPlayerSerializer
class SessionListView(ListAPIView):
serializer_class = SessionSerializer
queryset = Session.objects.all()
filter_class = SessionFilter
filter_fields = tuple(['division'] + list(SessionFilter.Meta.fields.keys()))
class SubListView(ListCreateAPIView):
serializer_class = SubSerializer
queryset = Sub.objects.all()
filter_class = SubFilter
class SessionEventListView(ListAPIView):
queryset = SessionEvent.objects.all()
serializer_class = SessionEventSerializer
filter_class = SessionEventFilter
class SearchUserView(ListAPIView):
def list(self, request, *args, **kwargs):
cache = caches['default']
query = self.request.GET.get('query', '')
key = 'search_user_view:%s' % hashlib.md5(query.encode('ascii', 'ignore')).hexdigest()
objs = cache.get(key)
if objs is None:
objs = CustomUser.objects.search(query)
cache.set(key, objs, 60 * 3)
serializer = CustomUserSerializer(objs, many=True)
return Response(serializer.data)
class SessionEventInviteListView(ListCreateAPIView):
queryset = SessionEventInvite.objects.all()
serializer_class = SessionEventInviteSerializer
filter_class = SessionEventInviteFilter
class SessionEventInviteView(RetrieveAPIView):
queryset = SessionEventInvite.objects.all()
serializer_class = SessionEventInviteSerializer
filter_class = SessionEventInviteFilter
| mit | 78,620,710,748,288,240 | 30.504202 | 113 | 0.746599 | false |
alvare/python-funcional | parser-monad.py | 1 | 1535 | import sys
from pymonad.Reader import curry
from Parser import *
def item():
return Parser(lambda cs: [] if cs == "" else [(cs[0], cs[1:])])
def sat(cond):
return item() >> (lambda c: unit(Parser, c) if cond(c) else mzero(Parser))
def char(c):
return sat(lambda x: c == x)
def space():
return char(' ')
def oneOf(chars):
return sat(lambda x: x in chars)
def many(p):
return many1(p) | unit(Parser, [])
def many1(p):
return p >> (lambda x:
many(p) >> (lambda xs:
unit(Parser, [x] + xs)))
def sepby(p, sep):
return sepby1(p, sep) | unit(Parser, [])
def sepby1(p, sep):
return p >> (lambda x:
many(sep >> p) >> (lambda xs:
unit(Parser, [x] + xs)))
def string(s):
if s:
return char(s[0]) >>\
string(s[1:]) >>\
unit(Parser, s)
else:
return unit(Parser, '')
def chainl(p, op, a):
return chainl1(p, op) | unit(Parser, a)
def chainl1(p, op):
def rest(a):
return (op >> (lambda f: p >> (lambda b: rest(f(a, b))))) | unit(Parser, a)
return p >> rest
# examples
alpha = 'abcdefghijklmnopqrstuvwxyz'
num = '1234567890'
def word():
return many(oneOf(alpha)) >> (lambda w:
unit(Parser, reduce(lambda x, y: x + y, w, '')))
def array_parser():
return string('array') >>\
space() >>\
char('[') >>\
sepby(word(), char(',')) >> (lambda e:
char(']') >>
unit(Parser, e))
print(array_parser().parse(sys.argv[1]))
| mit | 4,481,513,220,722,752,500 | 21.573529 | 83 | 0.522476 | false |
Commonists/bingo | bingo/card_maker.py | 1 | 1930 | # -*- coding: utf-8 -*-
import os
from string import Template, ascii_uppercase
import itertools
class CardMaker(object):
def __init__(self, size):
self.size = size
self.elements = []
def make_card(self):
raise NotImplemented()
class MarkdownCardMaker(CardMaker):
def make_card(self):
res = []
for row_index in range(self.size):
row_items = self.elements[row_index:row_index + self.size]
res.append(self.make_row(row_items))
return '\n'.join(res)
def make_row(self, elements):
return "|".join(elements)
class LaTeXCardMaker(CardMaker):
def read_template(self):
path = os.path.dirname(os.path.realpath(__file__))
template_file = os.path.join(path, 'tex', 'template.tex')
return open(template_file, 'r').read()
def make_card(self):
contents = self.make_card_contents()
return contents
def make_card_contents(self):
template = Template(self.read_template())
node_definitions = self.make_node_definitions()
values = {
'title': "Wikimania 2015 bingo",
'size': self.size,
'sequence': '1/A, 2/B, 3/C, 4/D, 5/E',
'node_definitions': "\n".join(node_definitions)
}
return template.safe_substitute(values)
def get_node_list(self):
alphabet = ascii_uppercase
letters = alphabet[0:self.size]
cartesian_product = itertools.product(letters, letters)
node_list = ['%s%s' % (x, y) for (x, y) in cartesian_product]
node_list.remove('CC')
return node_list
def make_node_definitions(self):
nodes = self.get_node_list()
return [self.make_element(x, y) for (x, y) in zip(nodes, self.elements)]
def make_element(self, index, contents):
element = r"\newcommand{\Node%s}{%s}" % (index, contents)
return element
| mit | 8,819,562,063,728,641,000 | 27.80597 | 80 | 0.592746 | false |
mtivadar/qiew | TextDecorators.py | 1 | 13791 | from PyQt5 import QtGui, QtCore, QtWidgets
import re
import string
from time import time
import sys
import TextSelection
class CTextDecorator(object):
redPen = QtGui.QPen(QtGui.QColor(255, 0, 0))
greenPen = QtGui.QPen(QtGui.QColor(255, 255, 0))
whitePen = QtGui.QPen(QtGui.QColor(255, 255, 255))
normalPen = QtGui.QPen(QtGui.QColor(192, 192, 192), 1, QtCore.Qt.SolidLine)
MZbrush = QtGui.QBrush(QtGui.QColor(128, 0, 0))
grayBrush = QtGui.QBrush(QtGui.QColor(128, 128, 128))
def __init__(self):
pass
class TextDecorator(CTextDecorator):
def __init__(self, viewmode):
self.operations = []
self.dataModel = viewmode.getDataModel()
self.viewmode = viewmode
self.penMap = {}
self.brushMap = {}
self.PenInterval = []
self.normalPen = QtGui.QPen(QtGui.QColor(192, 192, 192), 1, QtCore.Qt.SolidLine)
# if we want to generate T/F table
self.Special = string.ascii_letters + string.digits + ' .;\':;=\"?-!()/\\_'
self.Special = [False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False, False, False, False, True, True, True, False, False, False, False, True, True,
True, False, False, False, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, False, True, False, True,
False, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True,
True, True, True, False, True, False, False, True, False, True, True, True, True, True, True, True, True, True, True, True, True, True, True, True,
True, True, True, True, True, True, True, True, True, True, True, False, False, False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False,
False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False, False]
def reset(self):
self.penMap = {}
self.brushMap = {}
self.PenInterval = []
def getDataModel(self):
return self.dataModel
def isText(self, c):
"""
D = []
for i in range(256):
b = False
if self.isText(chr(i)):
b = True
D.append(b)
print D
sys.exit()
"""
return self.Special[ord(c)]
def getChar(self, idx):
#self.page = self.getDataModel().getDisplayablePage()
if idx < len(self.page):
return self.page[idx]
return 0
def decorate(self, pageOffset=None):
if pageOffset:
self.page = self.viewmode.getDisplayablePage(pageOffset=pageOffset)
else:
self.page = self.viewmode.getDisplayablePage()
return self.page
def addPenInterval(self, a, b, pen, ignoreHighlights=True):
self.PenInterval.append((a, b, pen, ignoreHighlights))
def choosePen(self, idx):
key = self.dataModel.getOffset() + idx
# if we do have a pen with that index, return it if it's different than default pen
# otherwise, return the pen that was set in that interval
# the priority here is de pen from other transformations, than interval pen
for a, b, ignoreHighlights, pen in self.PenInterval:
# in interval
if a <= key <= b:
if ignoreHighlights:
return pen
if key in self.penMap:
if self.penMap[key] == self.normalPen:
return pen
else:
return self.penMap[key]
else:
return pen
if key in self.penMap:
return self.penMap[key]
return self.normalPen
def chooseBrush(self, idx):
off = self.dataModel.getOffset() + idx
if off in self.brushMap:
return self.brushMap[off]
return None
class PageDecorator(TextDecorator):
def __init__(self, decorated):
pass
def reset(self):
self.decorated.reset()
self.penMap = {}
self.brushMap = {}
self.PenInterval = []
def getBrushMap(self):
return self.brushMap
def getPenMap(self):
return self.penMap
def doit(self):
pass
def getDataModel(self):
return self.dataModel
class HighlightASCII(PageDecorator):
def __init__(self, decorated):
self.dataModel = decorated.getDataModel()
self.penMap = decorated.penMap
self.decorated = decorated
super(HighlightASCII, self).__init__(decorated)
self.dataModel = super(HighlightASCII, self).getDataModel()
def decorate(self, pageOffset=None):
page = self.decorated.decorate(pageOffset)
self.PenInterval = self.decorated.PenInterval
self.brushMap = self.decorated.brushMap
self.penMap = self.decorated.penMap
off = self.dataModel.getOffset()
Match = [(m.start(), m.end()) for m in re.finditer(rb'([a-zA-Z0-9\-\\.%*:/? _<>]){4,}', page)]
for s, e in Match:
for i in range(e-s):
idx = off + s + i
if idx not in self.penMap:
self.penMap[off + s + i] = self.redPen
self.page = page
return self.page
class HighlightPrefix(PageDecorator):
def __init__(self, decorated, text, additionalLength=0, brush=None, pen=None):
super(HighlightPrefix, self).__init__(decorated)
self.dataModel = decorated.getDataModel()
self.decorated = decorated
self.additionalLength = additionalLength
self.brush = brush
self.text = text
self.pen = pen
def decorate(self, pageOffset=None):
page = self.decorated.decorate(pageOffset)
self.PenInterval = self.decorated.PenInterval
self.brushMap = self.decorated.brushMap
self.penMap = self.decorated.penMap
self.page = self.highliteWithPrefix(page, self.text, self.additionalLength, self.brush, self.pen)
return self.page
def highliteWithPrefix(self, page, text, additionalLength=0, brush=None, pen=None):
# todo: nu am gasit o metoda mai eleganta pentru a selecta toate aparitiile ale lui text
# regexp nu merg, "bad re expression"
lenText = len(text)
M = []
idx = 0
if lenText > 0:
while idx < len(page):
idx = page.find(text.encode('utf-8'), idx, len(page))
if idx == -1:
break
M.append((idx, lenText + additionalLength))
idx += lenText + additionalLength
off = self.dataModel.getOffset()
for start, length in M:
for i in range(length):
self.penMap[off + start + i] = pen
self.brushMap[off + start + i] = brush
return page
class HighlightWideChar(PageDecorator):
def __init__(self, decorated):
super(HighlightWideChar, self).__init__(decorated)
self.dataModel = decorated.getDataModel()
self.decorated = decorated
def decorate(self, pageOffset=None):
self.page = self.decorated.decorate(pageOffset)
self.PenInterval = self.decorated.PenInterval
self.brushMap = self.decorated.brushMap
self.penMap = self.decorated.penMap
self.page = self.highliteWidechar2(self.page)
return self.page
def highliteWidechar2(self, page):
pageStart = self.dataModel.getOffset()
pageEnd = pageStart + len(page)
touched = False
#for s, e in self.Intervals:
# touched = True
if not touched:
# expand
Match = [(m.start(), m.end()) for m in re.finditer(rb'([a-zA-Z0-9\-\\.%*:/? ]\x00){4,}', page)]
for s, e in Match:
for i in range(e-s):
#print i
self.penMap[pageStart + s + i] = QtGui.QPen(QtGui.QColor(255, 255, 0))
# get rid of '\x00'
string = page[s:e:2]
l = len(string)
# copy string that has no zeros
page[s:s + l] = string
# fill with zeros the remaining space
page[s + l: s + 2*l] = b'\x00'*l
return page
### todo: other way to highlight widechar, should test and see which one is faster
"""
def _changeText(self, page, page_start, I):
page_end = page_start + len(page)
for obj in I:
if obj['s'] >= page_start and obj['e'] <= page_end:
page[obj['s']-page_start:obj['e']-page_start] = obj['text']
def _expand(self, page, off, start, end):
I = []
start = start - off
end = end - off
i = start
while i < end:
if i+1 < end:
if page[i+1] == 0 and self.isText(chr(page[i])):
k = 0
for j in xrange(i, end, 2):
if j + 1 < end:
if self.isText(chr(page[j])) and page[j+1] == 0:
k += 1
else:
break
if k > 4:
if i+k*2 <= end:
obj = {}
obj['s'] = off + i + 1
obj['e'] = off + i + k * 2
for idx, j in enumerate(range(i+1, i + k*2)):
if j > i + k:
page[j] = 0
#self.penMap[j] = self.greenPen
elif j+idx+1 < end:
page[j] = page[j + idx + 1]
self.penMap[off + j] = self.greenPen
obj['text'] = page[i+1:i+k*2]
I.append(obj)
self.penMap[off + i] = self.greenPen
i += k*2
i = i + 1
return I
pass
def highliteWidechar(self, page):
off = self.dataModel.getOffset()
page_end = off + len(page)
touched = False
#print '-------'
for idx, iv in enumerate(self.Intervals):
#print 'acum aici'
# in interval
s, e, I = iv
#print s ,e
#print page_end
page_start = off
if off >= s:
touched = True
if page_end <= e:
self._changeText(page, off, I)
else:
if off <= e:
I2 = self._expand(page, off, e, page_end)
for obj in I2:
I.append(obj)
e = page_end
self.Intervals[idx] = (s, e, I)
else:
# suntem cu mai multe pagini mai jos
touched = False
else:
if page_end <= e and page_end >= s:
# scrolled up
I2 = self._expand(page, off, page_start, s)
for obj in I2:
I.append(obj)
s = page_start
self.Intervals[idx] = (s, e, I)
touched = True
else:
# out of this interval
touched = False
if not touched or touched:
#print 'aici'
self.Intervals.append((off, page_end, self._expand(page, off, off, page_end)))
"""
class RangePen(PageDecorator):
def __init__(self, decorated, a, b, pen, ignoreHighlights=True):
super(RangePen, self).__init__(decorated)
self.dataModel = decorated.getDataModel()
self.decorated = decorated
self.a = a
self.b = b
self.pen = pen
self.already = False
self.ignoreHighlights = ignoreHighlights
def decorate(self, pageOffset=None):
self.page = self.decorated.decorate(pageOffset)
self.PenInterval = self.decorated.PenInterval
self.brushMap = self.decorated.brushMap
self.penMap = self.decorated.penMap
if not self.already:
self.addPenInterval(self.a, self.b, self.ignoreHighlights, self.pen)
self.already = True
return self.page
| gpl-2.0 | -3,535,276,325,798,863,400 | 31.992823 | 171 | 0.516279 | false |
google/makani | avionics/firmware/network/generate_net_send.py | 1 | 4158 | #!/usr/bin/python
# Copyright 2020 Makani Technologies LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Generate NetSendAioXXX functions to pack & send messages on the TMS570."""
import sys
import textwrap
import gflags
from makani.avionics.common import pack_avionics_messages
from makani.avionics.network import message_type
from makani.lib.python import c_helpers
gflags.DEFINE_string('source_file', None,
'Full path to output source file.',
short_name='s')
gflags.DEFINE_string('header_file', None,
'Full path to output header file.',
short_name='h')
FLAGS = gflags.FLAGS
message_type_helper = c_helpers.EnumHelper('MessageType', message_type)
def _GenStructName(message_type_name):
"""Generate C message structure for a given message type."""
return message_type_helper.ShortName(message_type_name) + 'Message'
def _GenPackFunctionName(message_type_name):
"""Generate C pack function name for a given message type."""
return 'Pack' + _GenStructName(message_type_name)
def _GenPackedSizeMacroName(message_type_name):
"""Generate packed size macro name for a given message type."""
return 'PACK_' + _GenStructName(message_type_name).upper() + '_SIZE'
def _WriteNetSendAioFunction(message_type_name, f):
"""Write NetSendAio<MessageName>() function."""
struct_name = _GenStructName(message_type_name)
size_macro = _GenPackedSizeMacroName(message_type_name)
pack_func = _GenPackFunctionName(message_type_name)
pack_cast = 'PackAioMessageFunction'
f.write(textwrap.dedent('''
COMPILE_ASSERT({1} <= MAX_AIO_PAYLOAD_SIZE,
{1}_must_fit_within_MAX_AIO_PAYLOAD_SIZE);
bool NetSendAio{0}(const {0} *msg) {{
return NetSendAioPacked({2}, ({3}){4}, msg);
}}
'''.format(struct_name, size_macro, message_type_name, pack_cast,
pack_func)))
def _WriteNetSendAioPrototype(message_type_name, f):
"""Write NetSendAio<MessageName>() prototype."""
struct_name = _GenStructName(message_type_name)
f.write('bool NetSendAio{0}(const {0} *msg);\n'.format(struct_name))
def _WriteSource(messages, f):
"""Write source file."""
f.write(textwrap.dedent('''
#include "avionics/firmware/network/net_send.h"
#include <stdbool.h>
#include "avionics/common/avionics_messages.h"
#include "avionics/common/pack_avionics_messages.h"
#include "avionics/firmware/network/net.h"
#include "avionics/network/message_type.h"
#include "common/macros.h"
''')[1:])
for m in messages:
_WriteNetSendAioFunction(m, f)
def _WriteHeader(messages, f):
"""Write header file."""
guard = 'AVIONICS_FIRMWARE_NETWORK_NET_SEND_H_'
f.write(textwrap.dedent('''
#ifndef {0}
#define {0}
#include <stdbool.h>
#include "avionics/common/avionics_messages.h"
#include "avionics/firmware/network/net.h"
'''.format(guard))[1:])
for m in messages:
_WriteNetSendAioPrototype(m, f)
f.write(textwrap.dedent('''
#endif // {0}'''.format(guard)))
def main(argv):
"""Entry point."""
try:
argv = FLAGS(argv)
except gflags.FlagsError, e:
print '{}\nUsage: {} ARGS\n{}'.format(e, sys.argv[0], FLAGS)
sys.exit(1)
messages = [m for (m, _) in message_type_helper
if _GenPackFunctionName(m) in pack_avionics_messages.__dict__]
if FLAGS.header_file:
with open(FLAGS.header_file, 'w') as f:
_WriteHeader(messages, f)
if FLAGS.source_file:
with open(FLAGS.source_file, 'w') as f:
_WriteSource(messages, f)
if __name__ == '__main__':
main(sys.argv)
| apache-2.0 | -7,501,108,895,089,029,000 | 30.984615 | 77 | 0.672198 | false |
thecotne/smart_less_build | executer.py | 1 | 10324 | import sublime, sublime_plugin
import os, sys
import threading
import subprocess
import functools
import time
class ProcessListener(object):
def on_data(self, proc, data):
pass
def on_finished(self, proc):
pass
# Encapsulates subprocess.Popen, forwarding stdout to a supplied
# ProcessListener (on a separate thread)
class AsyncProcess(object):
def __init__(self, cmd, shell_cmd, env, listener,
# "path" is an option in build systems
path="",
# "shell" is an options in build systems
shell=False):
if not shell_cmd and not cmd:
raise ValueError("shell_cmd or cmd is required")
if shell_cmd and not isinstance(shell_cmd, str):
raise ValueError("shell_cmd must be a string")
self.listener = listener
self.killed = False
self.start_time = time.time()
# Hide the console window on Windows
startupinfo = None
if os.name == "nt":
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
# Set temporary PATH to locate executable in cmd
if path:
old_path = os.environ["PATH"]
# The user decides in the build system whether he wants to append $PATH
# or tuck it at the front: "$PATH;C:\\new\\path", "C:\\new\\path;$PATH"
os.environ["PATH"] = os.path.expandvars(path)
proc_env = os.environ.copy()
proc_env.update(env)
for k, v in proc_env.items():
proc_env[k] = os.path.expandvars(v)
if shell_cmd and sys.platform == "win32":
# Use shell=True on Windows, so shell_cmd is passed through with the correct escaping
self.proc = subprocess.Popen(shell_cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, env=proc_env, shell=True)
elif shell_cmd and sys.platform == "darwin":
# Use a login shell on OSX, otherwise the users expected env vars won't be setup
self.proc = subprocess.Popen(["/bin/bash", "-l", "-c", shell_cmd], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, env=proc_env, shell=False)
elif shell_cmd and sys.platform == "linux":
# Explicitly use /bin/bash on Linux, to keep Linux and OSX as
# similar as possible. A login shell is explicitly not used for
# linux, as it's not required
self.proc = subprocess.Popen(["/bin/bash", "-c", shell_cmd], stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, env=proc_env, shell=False)
else:
# Old style build system, just do what it asks
self.proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, startupinfo=startupinfo, env=proc_env, shell=shell)
if path:
os.environ["PATH"] = old_path
if self.proc.stdout:
threading.Thread(target=self.read_stdout).start()
if self.proc.stderr:
threading.Thread(target=self.read_stderr).start()
def kill(self):
if not self.killed:
self.killed = True
if sys.platform == "win32":
# terminate would not kill process opened by the shell cmd.exe, it will only kill
# cmd.exe leaving the child running
startupinfo = subprocess.STARTUPINFO()
startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW
subprocess.Popen("taskkill /PID " + str(self.proc.pid), startupinfo=startupinfo)
else:
self.proc.terminate()
self.listener = None
def poll(self):
return self.proc.poll() == None
def exit_code(self):
return self.proc.poll()
def read_stdout(self):
while True:
data = os.read(self.proc.stdout.fileno(), 2**15)
if len(data) > 0:
if self.listener:
self.listener.on_data(self, data)
else:
self.proc.stdout.close()
if self.listener:
self.listener.on_finished(self)
break
def read_stderr(self):
while True:
data = os.read(self.proc.stderr.fileno(), 2**15)
if len(data) > 0:
if self.listener:
self.listener.on_data(self, data)
else:
self.proc.stderr.close()
break
class ExecuterCommand(sublime_plugin.WindowCommand, ProcessListener):
def run(self, cmd = None, shell_cmd = None, file_regex = "", line_regex = "", working_dir = "",
encoding = "utf-8", env = {}, quiet = False, kill = False,
word_wrap = True, syntax = "Packages/Text/Plain text.tmLanguage",
# Catches "path" and "shell"
**kwargs):
if kill:
if self.proc:
self.proc.kill()
self.proc = None
self.append_string(None, "[Cancelled]")
return
if not hasattr(self, 'output_view'):
# Try not to call get_output_panel until the regexes are assigned
self.output_view = self.window.create_output_panel("exec")
# Default the to the current files directory if no working directory was given
if (working_dir == "" and self.window.active_view()
and self.window.active_view().file_name()):
working_dir = os.path.dirname(self.window.active_view().file_name())
self.output_view.settings().set("result_file_regex", file_regex)
self.output_view.settings().set("result_line_regex", line_regex)
self.output_view.settings().set("result_base_dir", working_dir)
self.output_view.settings().set("word_wrap", word_wrap)
self.output_view.settings().set("line_numbers", False)
self.output_view.settings().set("gutter", False)
self.output_view.settings().set("scroll_past_end", False)
self.output_view.assign_syntax(syntax)
# Call create_output_panel a second time after assigning the above
# settings, so that it'll be picked up as a result buffer
self.window.create_output_panel("exec")
self.encoding = encoding
self.quiet = quiet
self.proc = None
if not self.quiet:
if shell_cmd:
print("Running " + shell_cmd)
else:
print("Running " + " ".join(cmd))
sublime.status_message("Building")
# show_panel_on_build = sublime.load_settings("Preferences.sublime-settings").get("show_panel_on_build", True)
# if show_panel_on_build:
# self.window.run_command("show_panel", {"panel": "output.exec"})
merged_env = env.copy()
if self.window.active_view():
user_env = self.window.active_view().settings().get('build_env')
if user_env:
merged_env.update(user_env)
# Change to the working dir, rather than spawning the process with it,
# so that emitted working dir relative path names make sense
if working_dir != "":
os.chdir(working_dir)
self.debug_text = ""
if shell_cmd:
self.debug_text += "[shell_cmd: " + shell_cmd + "]\n"
else:
self.debug_text += "[cmd: " + str(cmd) + "]\n"
self.debug_text += "[dir: " + str(os.getcwd()) + "]\n"
# if "PATH" in merged_env:
# self.debug_text += "[path: " + str(merged_env["PATH"]) + "]"
# else:
# self.debug_text += "[path: " + str(os.environ["PATH"]) + "]"
try:
# Forward kwargs to AsyncProcess
self.proc = AsyncProcess(cmd, shell_cmd, merged_env, self, **kwargs)
except Exception as e:
self.append_string(None, str(e) + "\n")
self.append_string(None, self.debug_text + "\n")
if not self.quiet:
self.append_string(None, "[Finished]")
def is_enabled(self, kill = False):
if kill:
return hasattr(self, 'proc') and self.proc and self.proc.poll()
else:
return True
def append_data(self, proc, data):
if proc != self.proc:
# a second call to exec has been made before the first one
# finished, ignore it instead of intermingling the output.
if proc:
proc.kill()
return
try:
str = data.decode(self.encoding)
except:
str = "[Decode error - output not " + self.encoding + "]\n"
proc = None
# Normalize newlines, Sublime Text always uses a single \n separator
# in memory.
str = str.replace('\r\n', '\n').replace('\r', '\n')
self.output_view.run_command('append', {'characters': str, 'force': True, 'scroll_to_end': True})
def append_string(self, proc, str):
self.append_data(proc, str.encode(self.encoding))
def finish(self, proc):
if not self.quiet:
elapsed = time.time() - proc.start_time
exit_code = proc.exit_code()
if exit_code == 0 or exit_code == None:
# self.append_string(proc,
# ("[Finished in %.1fs]" % (elapsed)))
pass
else:
self.append_string(proc, ("[Finished in %.1fs with exit code %d]\n"
% (elapsed, exit_code)))
self.append_string(proc, self.debug_text)
self.window.run_command("show_panel", {"panel": "output.exec"})
if proc != self.proc:
return
errs = self.output_view.find_all_results()
if len(errs) == 0:
# sublime.status_message("Build finished")
sublime.status_message(("Build Finished in %.1fs" % (elapsed)))
# else:
# sublime.status_message(("Build finished with %d errors") % len(errs))
def on_data(self, proc, data):
sublime.set_timeout(functools.partial(self.append_data, proc, data), 0)
def on_finished(self, proc):
sublime.set_timeout(functools.partial(self.finish, proc), 0)
| gpl-3.0 | -967,074,813,617,226,400 | 38.106061 | 118 | 0.565866 | false |
google/dnae | lib/utils/utils.py | 1 | 6860 | # Copyright 2018 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility modules - Misc utility methods."""
import calendar
import datetime
import logging
import re
import time
class TextUtils(object):
"""Provides text/string related utility methods."""
def __init__(self):
pass
@classmethod
def toascii(cls, input_string):
temp_string = input_string
return str(re.sub(r"[^\x00-\x7F]+", "?", temp_string))
@classmethod
def removecommas(cls, input_string):
return cls.__replace(input_string, ",", " ")
@classmethod
def removequotes(cls, input_string):
return cls.__replace(input_string, '"', "")
@classmethod
def removenewlines(cls, input_string):
temp_string = cls.__replace(input_string, "\r", "")
temp_string = cls.__replace(temp_string, "\n", "")
return temp_string
@classmethod
def timestamp(cls, mode=None):
if mode == "short":
out = time.strftime("%Y%m%d")
else:
out = time.strftime("%Y%m%d_%H%M%S")
return out
@classmethod
def toidentifier(cls, input_string):
out = cls.toascii(input_string)
out = (out.replace(" ", "_").replace("&", "n").replace("@", "_")
.replace("#", "hash").replace("%", "x100").replace("'", "_")
.replace("+", "plus").replace("-", "minus").replace("*", "x")
.replace("/", "div").replace(".", "point").replace(",", "comma")
.replace("(", "_").replace(")", "_").replace("[", "_")
.replace("]", "_").replace("{", "_").replace("}", "_"))
return out
@classmethod
def strlist2str(cls, string_list, sep):
out = ""
for item in string_list:
out += item
out += sep
return out[:-1]
@classmethod
def __replace(cls, input_string, old, new):
replacefcn = lambda s: s.replace(old, new)
if isinstance(input_string, str):
out = replacefcn(input_string)
elif isinstance(input_string, list):
out = list()
for item in input_string:
if isinstance(item, str):
out.append(replacefcn(item))
else:
out.append(item)
else:
out = None
return out
class DateUtils(object):
"""Provides Date related utility methods."""
today = datetime.datetime.today()
curryear = today.year
one_day = datetime.timedelta(days=1)
quartermap = dict()
quartermap[1] = [
datetime.datetime(curryear, 1, 1),
datetime.datetime(curryear, 3, 31, 23, 59, 59, 999999)
]
quartermap[2] = [
datetime.datetime(curryear, 4, 1),
datetime.datetime(curryear, 6, 30, 23, 59, 59, 999999)
]
quartermap[3] = [
datetime.datetime(curryear, 7, 1),
datetime.datetime(curryear, 9, 30, 23, 59, 59, 999999)
]
quartermap[4] = [
datetime.datetime(curryear, 10, 1),
datetime.datetime(curryear, 12, 31, 23, 59, 59, 999999)
]
def __init__(self):
pass
@classmethod
def usec2date(cls, usec):
d0 = datetime.datetime(year=1970, month=1, day=1) # unix time reference
delta = datetime.timedelta(microseconds=usec)
return d0 + delta
@classmethod
def quarterstartdate(cls):
curr_quarter = (DateUtils.today.month - 1) / 3 + 1
quarter_start_date = cls.quartermap[curr_quarter][0]
return quarter_start_date
@classmethod
def quarterenddate(cls):
curr_quarter = (DateUtils.today.month - 1) / 3 + 1
quarter_end_date = cls.quartermap[curr_quarter][1]
return quarter_end_date
@classmethod
def lastdayofmonth(cls, month):
return calendar.monthrange(cls.curryear, month)[1]
@classmethod
def dbmdate2sfdate(cls, datestr):
return datetime.datetime.strptime(datestr, "%Y/%m/%d").strftime("%Y-%m-%d")
@classmethod
def firstdayofquarter(cls):
t = datetime.datetime.today().toordinal()
if t in [
datetime.datetime(cls.curryear, 1, 1).toordinal(),
datetime.datetime(cls.curryear, 4, 1).toordinal(),
datetime.datetime(cls.curryear, 7, 1).toordinal(),
datetime.datetime(cls.curryear, 10, 1).toordinal()
]:
return True
else:
return False
@classmethod
def firstdayofyear(cls):
t = datetime.datetime.today().toordinal()
if t == datetime.datetime(cls.curryear, 1, 1).toordinal():
return True
else:
return False
@classmethod
def quarterdays(cls):
return cls.quarterenddate().toordinal() - cls.quarterstartdate().toordinal(
) + 1
@classmethod
def dayofquarter(cls, date=None, date_format=None):
if not date:
date = cls.today
else:
date = datetime.datetime.strptime(date, date_format)
q2 = (datetime.datetime.strptime("4/1/{0:4d}".format(date.year),
"%m/%d/%Y")).timetuple().tm_yday
q3 = (datetime.datetime.strptime("7/1/{0:4d}".format(date.year),
"%m/%d/%Y")).timetuple().tm_yday
q4 = (datetime.datetime.strptime("10/1/{0:4d}".format(date.year),
"%m/%d/%Y")).timetuple().tm_yday
cur_day = date.timetuple().tm_yday
if date.month < 4:
return cur_day
elif date.month < 7:
return cur_day - q2 + 1
elif date.month < 10:
return cur_day - q3 + 1
else:
return cur_day - q4 + 1
class SelfIncreasingIndex(object):
"""Provides utility methods to create and use a self-increasing index."""
def __init__(self):
self.__value = 0
def __call__(self, *args, **kwargs):
val = self.__value
self.__value += 1
return val
def start(self, init_value=0):
self.__value = init_value
return self.__value
def nextval(self):
self.__value += 1
return self.__value
# Decorators
def retry(some_function, *args, **kwargs):
_MAX_RETRY = 5
def wrapper(*args, **kwargs):
retval = None
retry_attempts = 0
done = False
while not done:
try:
retval = some_function(*args, **kwargs)
done = True
# pylint: disable=broad-except
except Exception as error:
retry_attempts += 1
if retry_attempts <= _MAX_RETRY:
seconds = 2 ** retry_attempts
logging.warning("Encountered an error - %s -, "
"retrying in %d seconds...", str(error), seconds)
time.sleep(seconds)
else:
raise error
# pylint: enable=broad-except
return retval
return wrapper
| apache-2.0 | 8,431,717,631,091,964,000 | 26.886179 | 79 | 0.61035 | false |
lmregus/Portfolio | python/design_patterns/env/lib/python3.7/site-packages/sphinx/builders/applehelp.py | 1 | 1258 | """
sphinx.builders.applehelp
~~~~~~~~~~~~~~~~~~~~~~~~~
Build Apple help books.
:copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import warnings
from sphinxcontrib.applehelp import (
AppleHelpCodeSigningFailed,
AppleHelpIndexerFailed,
AppleHelpBuilder,
)
from sphinx.deprecation import RemovedInSphinx40Warning, deprecated_alias
if False:
# For type annotation
from typing import Any, Dict # NOQA
from sphinx.application import Sphinx # NOQA
deprecated_alias('sphinx.builders.applehelp',
{
'AppleHelpCodeSigningFailed': AppleHelpCodeSigningFailed,
'AppleHelpIndexerFailed': AppleHelpIndexerFailed,
'AppleHelpBuilder': AppleHelpBuilder,
},
RemovedInSphinx40Warning)
def setup(app):
# type: (Sphinx) -> Dict[str, Any]
warnings.warn('sphinx.builders.applehelp has been moved to sphinxcontrib-applehelp.',
RemovedInSphinx40Warning)
app.setup_extension('sphinxcontrib.applehelp')
return {
'version': 'builtin',
'parallel_read_safe': True,
'parallel_write_safe': True,
}
| mit | -7,510,677,014,965,285,000 | 26.347826 | 89 | 0.638315 | false |
ikreymer/pywb-ia | tools/iaitemhandler.py | 1 | 3821 | from pywb.webapp.handlers import WBHandler
from pywb.cdx.zipnum import ZipNumCluster
from pywb.cdx.cdxserver import CDXServer
import os
import requests
import shutil
#=============================================================================
class ItemHandler(WBHandler):
def __init__(self, query_handler, config=None):
self.item_cdx_root = config.get('index_paths')
self.download_prefix = config.get('archive_paths')
if not os.path.isdir(self.item_cdx_root):
os.makedirs(self.item_cdx_root)
super(ItemHandler, self).__init__(query_handler, config)
def handle_request(self, wbrequest):
self.load_item_files(wbrequest)
return super(ItemHandler, self).handle_request(wbrequest)
def load_item_files(self, wbrequest):
item = wbrequest.coll
idx_file = os.path.join(self.item_cdx_root, item + '.cdx.idx')
cdx_file = os.path.join(self.item_cdx_root, item + '.cdx.gz')
# first, try to download idx and use remote cdx
if not os.path.isfile(idx_file) and not os.path.isfile(cdx_file):
idx_url = self.download_prefix + item + '/' + item + '.cdx.idx'
try:
self.download_file(idx_url, idx_file)
self.number_idx(idx_file)
idx_found = True
except:
idx_found = False
if idx_found:
return
# try to download cdx file if no idx
cdx_url = self.download_prefix + item + '/' + item + '.cdx.gz'
try:
self.download_file(cdx_url, cdx_file)
except:
raise
def download_file(self, url, filename):
""" Download cdx or idx file locally
"""
r = requests.get(url, stream=True)
r.raise_for_status()
with open(filename, 'wb') as f:
for chunk in r.iter_content():
if chunk:
f.write(chunk)
f.flush()
def number_idx(self, filename):
""" If idx doesn't have the last line number column, add it
to allow for much better search ops
"""
with open(filename) as fh:
firstline = fh.readline()
parts = firstline.split('\t')
# only add if there are 4 parts
if len(parts) != 4:
return
count = 1
def writeline(fho, line, count):
fho.write(line.rstrip() + '\t' + str(count) + '\n')
with open(filename + '.tmp', 'w+b') as fho:
writeline(fho, firstline, count)
count += 1
for line in fh:
writeline(fho, line, count)
count += 1
shutil.move(filename + '.tmp', filename)
#=============================================================================
class ItemCDXServer(CDXServer):
def _create_cdx_sources(self, paths, config):
src = ItemCDXIndex(paths, config)
self.sources = [src]
#=============================================================================
class ItemCDXIndex(ZipNumCluster):
def __init__(self, summary, config):
self.root_path = summary
super(ItemCDXIndex, self).__init__(summary, config)
self.prefix = config.get('archive_paths')
def resolve(part, query):
coll = query.params.get('coll')
local_cdx = os.path.join(self.root_path, coll + '.cdx.gz')
remote_cdx = self.prefix + coll + '/' + part
return [local_cdx, remote_cdx]
self.loc_resolver = resolve
def load_cdx(self, query):
coll = query.params.get('coll')
full = os.path.join(self.root_path, coll + '.cdx.idx')
return self._do_load_cdx(full, query)
| mit | 1,659,556,733,371,929,600 | 33.423423 | 78 | 0.516619 | false |
wegamekinglc/alpha-mind | alphamind/tests/portfolio/test_evolver.py | 1 | 1141 | # -*- coding: utf-8 -*-
"""
Created on 2017-11-23
@author: cheng.li
"""
import unittest
import numpy as np
from alphamind.portfolio.evolver import evolve_positions
class TestEvolver(unittest.TestCase):
def test_evolve_positions_with_all_positive_position(self):
positions = np.array([0.2, 0.2, 0.8])
dx_returns = np.array([0.06, 0.04, -0.10])
simple_return = np.exp(dx_returns)
curr_pos = positions * simple_return
expected_pos = curr_pos / curr_pos.sum() * positions.sum()
calculated_pos = evolve_positions(positions, dx_returns)
np.testing.assert_array_almost_equal(expected_pos, calculated_pos)
def test_evolve_positions_with_negative_position(self):
positions = np.array([0.2, 0.3, -0.8])
dx_returns = np.array([0.06, 0.04, -0.10])
simple_return = np.exp(dx_returns)
curr_pos = positions * simple_return
expected_pos = curr_pos / np.abs(curr_pos).sum() * np.abs(positions).sum()
calculated_pos = evolve_positions(positions, dx_returns)
np.testing.assert_array_almost_equal(expected_pos, calculated_pos)
| mit | 1,129,303,972,060,844,300 | 28.25641 | 82 | 0.648554 | false |
antivanov/chief-lunch-officer | find_cafe.py | 1 | 8229 | # Automatically fetches menus for today, grades predefined cafes and based on
# additional information (weather, cafe of choice yesterday) gives recommendations
# where to go for lunch.
# If there are problems with encoding set Python encoding correctly by executing:
# set PYTHONIOENCODING=utf-8
from chief_lunch_officer import ChiefLunchOfficer, WeatherOpinion, FoodTaste
from constants import TEMPERATURE, PRECIPITATION_CHANCE, PRECIPITATION_AMOUNT, WIND
from constants import NEPALESE, HIMA_SALI, DYLAN_MILK, FACTORY_SALMISAARI, PIHKA, ANTELL, SODEXO_ACQUA, SODEXO_EXPLORER
from preferences import FOOD_PREFERENCES
from cafes import CAFES
from decorators import get_ignore_errors_decorator
from pathlib import Path
from datetime import date, datetime, timedelta
from copy import deepcopy
import urllib.request
import json
import re
EmptyMenuOnError = get_ignore_errors_decorator(default_value='No menu. Data feed format for the cafe changed?')
HIMA_SALI_URL = 'http://www.himasali.com/p/lounaslista.html'
DYLAN_MILK_URL = 'http://dylan.fi/milk/'
PIHKA_URL = 'http://ruoholahti.pihka.fi/en/'
FACTORY_SALMISAARI_URL = 'http://www.ravintolafactory.com/ravintolat/helsinki-salmisaari/'
ANTELL_URL = 'http://www.antell.fi/lounaslistat/lounaslista.html?owner=146'
YLE_WEATHER_FORECAST_URL = 'http://yle.fi/saa/resources/ajax/saa-api/hourly-forecast.action?id=642554'
SODEXO_ACQUA_URL = 'http://www.sodexo.fi/carte/load/html/30/%s/day'
SODEXO_EXPLORER_URL = 'http://www.sodexo.fi/carte/load/html/31/%s/day'
def make_readable(content_with_html_tags, insert_new_lines=True, collapse_whitespace=False):
content_with_html_tags = re.sub('<br.*?>', '\n' if insert_new_lines else '', content_with_html_tags)
content_with_html_tags = re.sub('<.*?>', '', content_with_html_tags)
content_with_html_tags = re.sub('[ \t]+', ' ', content_with_html_tags)
content_with_html_tags = re.sub('\n+', '\n', content_with_html_tags)
if collapse_whitespace:
content_with_html_tags = re.sub('\s+', ' ', content_with_html_tags)
content_with_html_tags = re.sub("(.{80})", "\\1\n", content_with_html_tags, 0, re.DOTALL)
content_with_html_tags = content_with_html_tags.replace('&', '&').replace(' ', '')
return content_with_html_tags.encode('ascii', 'ignore').decode('ascii')
def get(url):
response = urllib.request.urlopen(url)
charset = response.headers.get_content_charset() if response.headers.get_content_charset() is not None else 'utf-8'
return response.read().decode(charset)
def get_and_find_all(url, regex):
html = get(url)
return re.findall(regex, html, re.MULTILINE | re.DOTALL)
def find_menu(url, date, regex, index=0):
weekday = date.weekday()
if (weekday > 4): #Saturday or Sunday
return 'Weekend: no menu'
found = get_and_find_all(url, regex)
if (len(found) == 0):
return 'No menu'
else:
return found[index]
@EmptyMenuOnError
def get_sodexo_explorer_menu(date):
menu_url = SODEXO_EXPLORER_URL % (date.strftime('%Y-%m-%d'))
menu = find_menu(menu_url, date, '(.*)')
menu = json.loads(menu)['foods']
return menu
@EmptyMenuOnError
def get_sodexo_acqua_menu(date):
menu_url = SODEXO_ACQUA_URL % (date.strftime('%Y-%m-%d'))
menu = find_menu(menu_url, date, '(.*)')
menu = json.loads(menu)['foods']
return menu
@EmptyMenuOnError
def get_antell_menu(date):
weekday = date.weekday()
return find_menu(ANTELL_URL, date, r'<h2[^>]+>(.*?)<img', weekday)
@EmptyMenuOnError
def get_hima_sali_menu(date):
date_label = '%d\\.%d\\.' % (date.day, date.month)
return find_menu(HIMA_SALI_URL, date, r'%s(.*?Wok.*?[\d\.]+)' % (date_label), -1)
@EmptyMenuOnError
def get_dylan_milk_menu(date):
return find_menu(DYLAN_MILK_URL, date, r'<div class="fbf_desc">(.*?)</div>')
@EmptyMenuOnError
def get_pihka_menu(date):
weekday = date.weekday()
found = get_and_find_all(PIHKA_URL, r'<div class="menu\-day.*?<ul>(.*?)</div>')
return found[weekday]
@EmptyMenuOnError
def get_factory_salmisaari_menu(date):
date_label = date.strftime('%d.%m.%Y')
found = get_and_find_all(FACTORY_SALMISAARI_URL, r'%s</h3>(.*?)</p>' % (date_label))
return found[0]
def get_todays_weather():
weather_response = get(YLE_WEATHER_FORECAST_URL)
forecast = json.loads(weather_response)['weatherInfos'][0]
return {
TEMPERATURE: forecast['temperature'],
PRECIPITATION_CHANCE: forecast['probabilityPrecipitation'],
PRECIPITATION_AMOUNT: forecast['precipitation1h'],
WIND: forecast['windSpeedMs']
}
def week_number(date):
return date.isocalendar()[1]
def parse_date(date_str):
return datetime.strptime(date_str, '%d.%m.%Y')
def get_current_week_history(today):
history_path = Path('history.json')
if not history_path.exists():
with history_path.open('w') as f:
f.write('{}')
with history_path.open('r') as f:
history = json.loads(f.read())
current_week = week_number(today)
def is_date_this_week_before_today(d):
return (current_week == week_number(d)
and d.date() < today)
current_week_history = {(k, v) for (k, v) in history.items() if is_date_this_week_before_today(parse_date(k))}
return dict(current_week_history)
def ordered_cafes(history):
sorted_dates = sorted(history)
cafes = []
for cafe_date in sorted_dates:
cafes.append(history[cafe_date])
return cafes
def store_history(history):
history_path = Path('history.json')
with history_path.open('w') as f:
f.write(json.dumps(history, sort_keys=True))
def update_history(history, today, todays_cafe):
history[today.strftime('%d.%m.%Y')] = todays_cafe
store_history(history)
today = date.today()
#today = today + timedelta(days=2)
print('Today %s\n' % today.strftime('%d.%m.%Y'))
sodexo_acqua_menu = get_sodexo_acqua_menu(today)
print('\nSodexo Acqua:\n\n%s' % make_readable(sodexo_acqua_menu, collapse_whitespace=True))
sodexo_explorer_menu = get_sodexo_explorer_menu(today)
print('\nSodexo Explorer:\n\n%s' % make_readable(sodexo_explorer_menu, collapse_whitespace=True))
antell_menu = get_antell_menu(today)
print('\nAntell:\n\n%s' % make_readable(antell_menu, collapse_whitespace=True))
hima_sali_menu = get_hima_sali_menu(today)
print('\nHima & Sali:\n\n%s' % make_readable(hima_sali_menu, insert_new_lines=False))
dylan_milk_menu = get_dylan_milk_menu(today)
print('\nDylan Milk:\n\n%s' % make_readable(dylan_milk_menu))
pihka_menu = get_pihka_menu(today)
print('\nPihka:\n\n%s' % make_readable(pihka_menu, collapse_whitespace=True))
factory_salmisaari_menu = get_factory_salmisaari_menu(today)
print('\nFactory Salmisaari:\n\n%s' % make_readable(factory_salmisaari_menu, insert_new_lines=False))
weather = get_todays_weather()
print('\nWeather:\n\n temperature %s C\n chance of precipitation %s percent\n precipitation amount %s mm\n wind %s m/s' % (weather[TEMPERATURE], weather[PRECIPITATION_CHANCE], weather[PRECIPITATION_AMOUNT], weather[WIND]))
lunch_history = get_current_week_history(today)
current_week_cafes = ordered_cafes(lunch_history)
print('\nLunch history for current week:\n\n %s' % ', '.join(current_week_cafes))
cafes = deepcopy(CAFES)
cafes[SODEXO_EXPLORER]['menu'] = sodexo_explorer_menu
cafes[SODEXO_ACQUA]['menu'] = sodexo_acqua_menu
cafes[ANTELL]['menu'] = antell_menu
cafes[HIMA_SALI]['menu'] = hima_sali_menu
cafes[DYLAN_MILK]['menu'] = dylan_milk_menu
cafes[PIHKA]['menu'] = pihka_menu
cafes[FACTORY_SALMISAARI]['menu'] = factory_salmisaari_menu
food_taste = FoodTaste().preferences(FOOD_PREFERENCES)
weather_opinion = WeatherOpinion().weather(weather)
clo = ChiefLunchOfficer(food_taste=food_taste, weather_opinion=weather_opinion)
clo.lunched(current_week_cafes).weather(weather).cafes(cafes).weekday(today.weekday())
todays_cafes = clo.decide()
todays_cafe = todays_cafes[0]
todays_cafe_address = CAFES[todays_cafe]['address']
update_history(lunch_history, today, todays_cafe)
print('\nRecommendation:\n\n %s, %s' % (todays_cafe, todays_cafe_address))
formatted_cafes = ', '.join(todays_cafes[0:5]) + '\n' + ', '.join(todays_cafes[5:-1])
print('\nAll lunch in preferred order:\n\n %s' % (formatted_cafes)) | apache-2.0 | 6,199,370,958,018,314,000 | 41.205128 | 222 | 0.697898 | false |
unicefuganda/edtrac | edtrac_project/rapidsms_xforms_src/pavement.py | 1 | 2360 | # -*- Import: -*-
from paver.easy import *
from paver.setuputils import setup
from setuptools import find_packages
try:
# Optional tasks, only needed for development
# -*- Optional import: -*-
from github.tools.task import *
import paver.doctools
import paver.virtual
import paver.misctasks
ALL_TASKS_LOADED = True
except ImportError, e:
info("some tasks could not not be imported.")
debug(str(e))
ALL_TASKS_LOADED = False
version = '0.1'
classifiers = [
# Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers
"Development Status :: 1 - Planning",
]
install_requires = [
# -*- Install requires: -*-
'setuptools',
]
entry_points="""
# -*- Entry points: -*-
"""
# compatible with distutils of python 2.3+ or later
setup(
name='rapidsms-xforms',
version=version,
description='Provides an interactive form builder and xform compatibility for RapidSMS.',
long_description=open('README.rst', 'r').read(),
classifiers=classifiers,
keywords='rapidsms xforms',
author='Nic Pottier',
author_email='[email protected]',
url='',
license='BSD',
packages = find_packages(exclude=['bootstrap', 'pavement',]),
include_package_data=True,
test_suite='nose.collector',
zip_safe=False,
install_requires=install_requires,
entry_points=entry_points,
)
options(
# -*- Paver options: -*-
minilib=Bunch(
extra_files=[
# -*- Minilib extra files: -*-
]
),
sphinx=Bunch(
docroot='docs',
builddir="_build",
sourcedir=""
),
virtualenv=Bunch(
packages_to_install=[
# -*- Virtualenv packages to install: -*-
'github-tools',
"nose",
"Sphinx>=0.6b1",
"pkginfo",
"virtualenv"],
dest_dir='./virtual-env/',
install_paver=True,
script_name='bootstrap.py',
paver_command_line=None
),
)
options.setup.package_data=paver.setuputils.find_package_data(
'rapidsms_xforms', package='rapidsms_xforms', only_in_packages=False)
if ALL_TASKS_LOADED:
@task
@needs('generate_setup', 'minilib', 'setuptools.command.sdist')
def sdist():
"""Overrides sdist to make sure that our setup.py is generated."""
| bsd-3-clause | -8,912,364,466,401,702,000 | 24.934066 | 93 | 0.607203 | false |
RedhawkSDR/rtl-demo-app | server/rtl_app/__init__.py | 1 | 1030 | #
# This file is protected by Copyright. Please refer to the COPYRIGHT file
# distributed with this source distribution.
#
# This file is part of REDHAWK server.
#
# REDHAWK server is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# REDHAWK server is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
from _common import BadDemodException, BadFrequencyException, DeviceUnavailableException
from rtl_app import RTLApp, AsyncRTLApp
from mock_rtl_app import AsyncRTLApp as MockAsyncRTLApp, RTLApp as MockRTLApp
| lgpl-3.0 | -6,564,324,000,110,991,000 | 45.818182 | 88 | 0.78835 | false |
brendangregg/bcc | examples/networking/http_filter/http-parse-complete.py | 1 | 11542 | #!/usr/bin/python
#
# Bertrone Matteo - Polytechnic of Turin
# November 2015
#
# eBPF application that parses HTTP packets
# and extracts (and prints on screen) the URL
# contained in the GET/POST request.
#
# eBPF program http_filter is used as SOCKET_FILTER attached to eth0 interface.
# Only packets of type ip and tcp containing HTTP GET/POST are
# returned to userspace, others dropped
#
# Python script uses bcc BPF Compiler Collection by
# iovisor (https://github.com/iovisor/bcc) and prints on stdout the first
# line of the HTTP GET/POST request containing the url
from __future__ import print_function
from bcc import BPF
from sys import argv
import socket
import os
import binascii
import time
CLEANUP_N_PACKETS = 50 # cleanup every CLEANUP_N_PACKETS packets received
MAX_URL_STRING_LEN = 8192 # max url string len (usually 8K)
MAX_AGE_SECONDS = 30 # max age entry in bpf_sessions map
# convert a bin string into a string of hex char
# helper function to print raw packet in hex
def toHex(s):
lst = ""
for ch in s:
hv = hex(ch).replace('0x', '')
if len(hv) == 1:
hv = '0' + hv
lst = lst + hv
return lst
# print str until CR+LF
def printUntilCRLF(s):
print(s.split(b'\r\n')[0].decode())
# cleanup function
def cleanup():
# get current time in seconds
current_time = int(time.time())
# looking for leaf having:
# timestap == 0 --> update with current timestamp
# AGE > MAX_AGE_SECONDS --> delete item
for key, leaf in bpf_sessions.items():
try:
current_leaf = bpf_sessions[key]
# set timestamp if timestamp == 0
if (current_leaf.timestamp == 0):
bpf_sessions[key] = bpf_sessions.Leaf(current_time)
else:
# delete older entries
if (current_time - current_leaf.timestamp > MAX_AGE_SECONDS):
del bpf_sessions[key]
except:
print("cleanup exception.")
return
# args
def usage():
print("USAGE: %s [-i <if_name>]" % argv[0])
print("")
print("Try '%s -h' for more options." % argv[0])
exit()
# help
def help():
print("USAGE: %s [-i <if_name>]" % argv[0])
print("")
print("optional arguments:")
print(" -h print this help")
print(" -i if_name select interface if_name. Default is eth0")
print("")
print("examples:")
print(" http-parse # bind socket to eth0")
print(" http-parse -i wlan0 # bind socket to wlan0")
exit()
# arguments
interface = "eth0"
if len(argv) == 2:
if str(argv[1]) == '-h':
help()
else:
usage()
if len(argv) == 3:
if str(argv[1]) == '-i':
interface = argv[2]
else:
usage()
if len(argv) > 3:
usage()
print("binding socket to '%s'" % interface)
# initialize BPF - load source code from http-parse-complete.c
bpf = BPF(src_file="http-parse-complete.c", debug=0)
# load eBPF program http_filter of type SOCKET_FILTER into the kernel eBPF vm
# more info about eBPF program types
# http://man7.org/linux/man-pages/man2/bpf.2.html
function_http_filter = bpf.load_func("http_filter", BPF.SOCKET_FILTER)
# create raw socket, bind it to interface
# attach bpf program to socket created
BPF.attach_raw_socket(function_http_filter, interface)
# get file descriptor of the socket previously
# created inside BPF.attach_raw_socket
socket_fd = function_http_filter.sock
# create python socket object, from the file descriptor
sock = socket.fromfd(socket_fd, socket.PF_PACKET,
socket.SOCK_RAW, socket.IPPROTO_IP)
# set it as blocking socket
sock.setblocking(True)
# get pointer to bpf map of type hash
bpf_sessions = bpf.get_table("sessions")
# packets counter
packet_count = 0
# dictionary containing association
# <key(ipsrc,ipdst,portsrc,portdst),payload_string>.
# if url is not entirely contained in only one packet,
# save the firt part of it in this local dict
# when I find \r\n in a next pkt, append and print the whole url
local_dictionary = {}
while 1:
# retrieve raw packet from socket
packet_str = os.read(socket_fd, 4096) # set packet length to max packet length on the interface
packet_count += 1
# DEBUG - print raw packet in hex format
# packet_hex = toHex(packet_str)
# print ("%s" % packet_hex)
# convert packet into bytearray
packet_bytearray = bytearray(packet_str)
# ethernet header length
ETH_HLEN = 14
# IP HEADER
# https://tools.ietf.org/html/rfc791
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# |Version| IHL |Type of Service| Total Length |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#
# IHL : Internet Header Length is the length of the internet header
# value to multiply * 4 byte
# e.g. IHL = 5 ; IP Header Length = 5 * 4 byte = 20 byte
#
# Total length: This 16-bit field defines the entire packet size,
# including header and data, in bytes.
# calculate packet total length
total_length = packet_bytearray[ETH_HLEN + 2] # load MSB
total_length = total_length << 8 # shift MSB
total_length = total_length + packet_bytearray[ETH_HLEN + 3] # add LSB
# calculate ip header length
ip_header_length = packet_bytearray[ETH_HLEN] # load Byte
ip_header_length = ip_header_length & 0x0F # mask bits 0..3
ip_header_length = ip_header_length << 2 # shift to obtain length
# retrieve ip source/dest
ip_src_str = packet_str[ETH_HLEN + 12: ETH_HLEN + 16] # ip source offset 12..15
ip_dst_str = packet_str[ETH_HLEN + 16:ETH_HLEN + 20] # ip dest offset 16..19
ip_src = int(toHex(ip_src_str), 16)
ip_dst = int(toHex(ip_dst_str), 16)
# TCP HEADER
# https://www.rfc-editor.org/rfc/rfc793.txt
# 12 13 14 15
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Data | |U|A|P|R|S|F| |
# | Offset| Reserved |R|C|S|S|Y|I| Window |
# | | |G|K|H|T|N|N| |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#
# Data Offset: This indicates where the data begins.
# The TCP header is an integral number of 32 bits long.
# value to multiply * 4 byte
# e.g. DataOffset = 5 ; TCP Header Length = 5 * 4 byte = 20 byte
# calculate tcp header length
tcp_header_length = packet_bytearray[ETH_HLEN + ip_header_length + 12] # load Byte
tcp_header_length = tcp_header_length & 0xF0 # mask bit 4..7
tcp_header_length = tcp_header_length >> 2 # SHR 4 ; SHL 2 -> SHR 2
# retrieve port source/dest
port_src_str = packet_str[ETH_HLEN + ip_header_length:ETH_HLEN + ip_header_length + 2]
port_dst_str = packet_str[ETH_HLEN + ip_header_length + 2:ETH_HLEN + ip_header_length + 4]
port_src = int(toHex(port_src_str), 16)
port_dst = int(toHex(port_dst_str), 16)
# calculate payload offset
payload_offset = ETH_HLEN + ip_header_length + tcp_header_length
# payload_string contains only packet payload
payload_string = packet_str[(payload_offset):(len(packet_bytearray))]
# CR + LF (substring to find)
crlf = b'\r\n'
# current_Key contains ip source/dest and port source/map
# useful for direct bpf_sessions map access
current_Key = bpf_sessions.Key(ip_src, ip_dst, port_src, port_dst)
# looking for HTTP GET/POST request
if ((payload_string[:3] == b'GET') or (payload_string[:4] == b'POST')
or (payload_string[:4] == b'HTTP') or (payload_string[:3] == b'PUT')
or (payload_string[:6] == b'DELETE') or (payload_string[:4] == b'HEAD')):
# match: HTTP GET/POST packet found
if (crlf in payload_string):
# url entirely contained in first packet -> print it all
printUntilCRLF(payload_string)
# delete current_Key from bpf_sessions, url already printed.
# current session not useful anymore
try:
del bpf_sessions[current_Key]
except:
print("error during delete from bpf map ")
else:
# url NOT entirely contained in first packet
# not found \r\n in payload.
# save current part of the payload_string in dictionary
# <key(ips,ipd,ports,portd),payload_string>
local_dictionary[binascii.hexlify(current_Key)] = payload_string
else:
# NO match: HTTP GET/POST NOT found
# check if the packet belong to a session saved in bpf_sessions
if (current_Key in bpf_sessions):
# check id the packet belong to a session saved in local_dictionary
# (local_dictionary maintains HTTP GET/POST url not
# printed yet because split in N packets)
if (binascii.hexlify(current_Key) in local_dictionary):
# first part of the HTTP GET/POST url is already present in
# local dictionary (prev_payload_string)
prev_payload_string = local_dictionary[binascii.hexlify(current_Key)]
# looking for CR+LF in current packet.
if (crlf in payload_string):
# last packet. containing last part of HTTP GET/POST
# url split in N packets. Append current payload
prev_payload_string += payload_string
# print HTTP GET/POST url
printUntilCRLF(prev_payload_string)
# clean bpf_sessions & local_dictionary
try:
del bpf_sessions[current_Key]
del local_dictionary[binascii.hexlify(current_Key)]
except:
print("error deleting from map or dictionary")
else:
# NOT last packet. Containing part of HTTP GET/POST url
# split in N packets.
# Append current payload
prev_payload_string += payload_string
# check if not size exceeding
# (usually HTTP GET/POST url < 8K )
if (len(prev_payload_string) > MAX_URL_STRING_LEN):
print("url too long")
try:
del bpf_sessions[current_Key]
del local_dictionary[binascii.hexlify(current_Key)]
except:
print("error deleting from map or dict")
# update dictionary
local_dictionary[binascii.hexlify(current_Key)] = prev_payload_string
else:
# first part of the HTTP GET/POST url is
# NOT present in local dictionary
# bpf_sessions contains invalid entry -> delete it
try:
del bpf_sessions[current_Key]
except:
print("error del bpf_session")
# check if dirty entry are present in bpf_sessions
if (((packet_count) % CLEANUP_N_PACKETS) == 0):
cleanup()
| apache-2.0 | 4,341,704,507,636,344,300 | 36.718954 | 100 | 0.574164 | false |
maximencia/python_traning | fixture/application.py | 1 | 1783 | # -*- coding: utf-8 -*-
__author__ = 'Maxim.Rumyantsev'
#from selenium.webdriver.firefox.webdriver import WebDriver
from selenium import webdriver
from fixture.session_f import SessionHelper
from fixture.group_f import GroupHelper
from fixture.contact_f import ContactHelper
class Application:
# проверка валидности фикстуры через возврат url
def fixture_is_valid(self):
try:
self.wd.current_url
return True
except:
return False
def __init__(self,browser, base_url):
# параметр отвечает за запуск браузера бля тестов
if browser=="firefox":
self.wd = webdriver.Firefox()
elif browser=="chrome":
self.wd = webdriver.Chrome()
elif browser=="ie":
self.wd = webdriver.Ie()
else:
raise ValueError("Unrecognized browser %s" %browser)
#self.wd.implicitly_wait(1)
self.session = SessionHelper(self)
self.group = GroupHelper(self)
self.contact = ContactHelper(self)
self.base_url= base_url
# заполнение тестовых полей name- имя поля; send_keys_parameters - текст для заполнения
def fill_text_field(self,name,send_keys_parameters):
wd = self.wd
if send_keys_parameters is not None:
wd.find_element_by_name(name).click()
wd.find_element_by_name(name).clear()
wd.find_element_by_name(name).send_keys(send_keys_parameters)
def open_home_page(self):
wd = self.wd
wd.get(self.base_url)
def destroy(self):
self.wd.quit()
| apache-2.0 | 7,473,063,612,288,819,000 | 27.052632 | 89 | 0.605072 | false |
googleapis/googleapis-gen | google/ads/googleads/v8/googleads-py/google/ads/googleads/v8/enums/types/location_group_radius_units.py | 1 | 1225 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package='google.ads.googleads.v8.enums',
marshal='google.ads.googleads.v8',
manifest={
'LocationGroupRadiusUnitsEnum',
},
)
class LocationGroupRadiusUnitsEnum(proto.Message):
r"""Container for enum describing unit of radius in location
group.
"""
class LocationGroupRadiusUnits(proto.Enum):
r"""The unit of radius distance in location group (e.g. MILES)"""
UNSPECIFIED = 0
UNKNOWN = 1
METERS = 2
MILES = 3
MILLI_MILES = 4
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 | 5,528,833,102,422,046,000 | 28.878049 | 74 | 0.685714 | false |
ArcherSys/ArcherSys | Lib/test/test_dis.py | 1 | 124487 | <<<<<<< HEAD
<<<<<<< HEAD
# Minimal tests for dis module
from test.support import run_unittest, captured_stdout
from test.bytecode_helper import BytecodeTestCase
import difflib
import unittest
import sys
import dis
import io
import re
import types
import contextlib
def get_tb():
def _error():
try:
1 / 0
except Exception as e:
tb = e.__traceback__
return tb
tb = _error()
while tb.tb_next:
tb = tb.tb_next
return tb
TRACEBACK_CODE = get_tb().tb_frame.f_code
class _C:
def __init__(self, x):
self.x = x == 1
dis_c_instance_method = """\
%-4d 0 LOAD_FAST 1 (x)
3 LOAD_CONST 1 (1)
6 COMPARE_OP 2 (==)
9 LOAD_FAST 0 (self)
12 STORE_ATTR 0 (x)
15 LOAD_CONST 0 (None)
18 RETURN_VALUE
""" % (_C.__init__.__code__.co_firstlineno + 1,)
dis_c_instance_method_bytes = """\
0 LOAD_FAST 1 (1)
3 LOAD_CONST 1 (1)
6 COMPARE_OP 2 (==)
9 LOAD_FAST 0 (0)
12 STORE_ATTR 0 (0)
15 LOAD_CONST 0 (0)
18 RETURN_VALUE
"""
def _f(a):
print(a)
return 1
dis_f = """\
%-4d 0 LOAD_GLOBAL 0 (print)
3 LOAD_FAST 0 (a)
6 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
9 POP_TOP
%-4d 10 LOAD_CONST 1 (1)
13 RETURN_VALUE
""" % (_f.__code__.co_firstlineno + 1,
_f.__code__.co_firstlineno + 2)
dis_f_co_code = """\
0 LOAD_GLOBAL 0 (0)
3 LOAD_FAST 0 (0)
6 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
9 POP_TOP
10 LOAD_CONST 1 (1)
13 RETURN_VALUE
"""
def bug708901():
for res in range(1,
10):
pass
dis_bug708901 = """\
%-4d 0 SETUP_LOOP 23 (to 26)
3 LOAD_GLOBAL 0 (range)
6 LOAD_CONST 1 (1)
%-4d 9 LOAD_CONST 2 (10)
12 CALL_FUNCTION 2 (2 positional, 0 keyword pair)
15 GET_ITER
>> 16 FOR_ITER 6 (to 25)
19 STORE_FAST 0 (res)
%-4d 22 JUMP_ABSOLUTE 16
>> 25 POP_BLOCK
>> 26 LOAD_CONST 0 (None)
29 RETURN_VALUE
""" % (bug708901.__code__.co_firstlineno + 1,
bug708901.__code__.co_firstlineno + 2,
bug708901.__code__.co_firstlineno + 3)
def bug1333982(x=[]):
assert 0, ([s for s in x] +
1)
pass
dis_bug1333982 = """\
%3d 0 LOAD_CONST 1 (0)
3 POP_JUMP_IF_TRUE 35
6 LOAD_GLOBAL 0 (AssertionError)
9 LOAD_CONST 2 (<code object <listcomp> at 0x..., file "%s", line %d>)
12 LOAD_CONST 3 ('bug1333982.<locals>.<listcomp>')
15 MAKE_FUNCTION 0
18 LOAD_FAST 0 (x)
21 GET_ITER
22 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
%3d 25 LOAD_CONST 4 (1)
28 BINARY_ADD
29 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
32 RAISE_VARARGS 1
%3d >> 35 LOAD_CONST 0 (None)
38 RETURN_VALUE
""" % (bug1333982.__code__.co_firstlineno + 1,
__file__,
bug1333982.__code__.co_firstlineno + 1,
bug1333982.__code__.co_firstlineno + 2,
bug1333982.__code__.co_firstlineno + 3)
_BIG_LINENO_FORMAT = """\
%3d 0 LOAD_GLOBAL 0 (spam)
3 POP_TOP
4 LOAD_CONST 0 (None)
7 RETURN_VALUE
"""
dis_module_expected_results = """\
Disassembly of f:
4 0 LOAD_CONST 0 (None)
3 RETURN_VALUE
Disassembly of g:
5 0 LOAD_CONST 0 (None)
3 RETURN_VALUE
"""
expr_str = "x + 1"
dis_expr_str = """\
1 0 LOAD_NAME 0 (x)
3 LOAD_CONST 0 (1)
6 BINARY_ADD
7 RETURN_VALUE
"""
simple_stmt_str = "x = x + 1"
dis_simple_stmt_str = """\
1 0 LOAD_NAME 0 (x)
3 LOAD_CONST 0 (1)
6 BINARY_ADD
7 STORE_NAME 0 (x)
10 LOAD_CONST 1 (None)
13 RETURN_VALUE
"""
compound_stmt_str = """\
x = 0
while 1:
x += 1"""
# Trailing newline has been deliberately omitted
dis_compound_stmt_str = """\
1 0 LOAD_CONST 0 (0)
3 STORE_NAME 0 (x)
2 6 SETUP_LOOP 14 (to 23)
3 >> 9 LOAD_NAME 0 (x)
12 LOAD_CONST 1 (1)
15 INPLACE_ADD
16 STORE_NAME 0 (x)
19 JUMP_ABSOLUTE 9
22 POP_BLOCK
>> 23 LOAD_CONST 2 (None)
26 RETURN_VALUE
"""
dis_traceback = """\
%-4d 0 SETUP_EXCEPT 12 (to 15)
%-4d 3 LOAD_CONST 1 (1)
6 LOAD_CONST 2 (0)
--> 9 BINARY_TRUE_DIVIDE
10 POP_TOP
11 POP_BLOCK
12 JUMP_FORWARD 46 (to 61)
%-4d >> 15 DUP_TOP
16 LOAD_GLOBAL 0 (Exception)
19 COMPARE_OP 10 (exception match)
22 POP_JUMP_IF_FALSE 60
25 POP_TOP
26 STORE_FAST 0 (e)
29 POP_TOP
30 SETUP_FINALLY 14 (to 47)
%-4d 33 LOAD_FAST 0 (e)
36 LOAD_ATTR 1 (__traceback__)
39 STORE_FAST 1 (tb)
42 POP_BLOCK
43 POP_EXCEPT
44 LOAD_CONST 0 (None)
>> 47 LOAD_CONST 0 (None)
50 STORE_FAST 0 (e)
53 DELETE_FAST 0 (e)
56 END_FINALLY
57 JUMP_FORWARD 1 (to 61)
>> 60 END_FINALLY
%-4d >> 61 LOAD_FAST 1 (tb)
64 RETURN_VALUE
""" % (TRACEBACK_CODE.co_firstlineno + 1,
TRACEBACK_CODE.co_firstlineno + 2,
TRACEBACK_CODE.co_firstlineno + 3,
TRACEBACK_CODE.co_firstlineno + 4,
TRACEBACK_CODE.co_firstlineno + 5)
class DisTests(unittest.TestCase):
def get_disassembly(self, func, lasti=-1, wrapper=True):
# We want to test the default printing behaviour, not the file arg
output = io.StringIO()
with contextlib.redirect_stdout(output):
if wrapper:
dis.dis(func)
else:
dis.disassemble(func, lasti)
return output.getvalue()
def get_disassemble_as_string(self, func, lasti=-1):
return self.get_disassembly(func, lasti, False)
def strip_addresses(self, text):
return re.sub(r'\b0x[0-9A-Fa-f]+\b', '0x...', text)
def do_disassembly_test(self, func, expected):
got = self.get_disassembly(func)
if got != expected:
got = self.strip_addresses(got)
self.assertEqual(got, expected)
def test_opmap(self):
self.assertEqual(dis.opmap["NOP"], 9)
self.assertIn(dis.opmap["LOAD_CONST"], dis.hasconst)
self.assertIn(dis.opmap["STORE_NAME"], dis.hasname)
def test_opname(self):
self.assertEqual(dis.opname[dis.opmap["LOAD_FAST"]], "LOAD_FAST")
def test_boundaries(self):
self.assertEqual(dis.opmap["EXTENDED_ARG"], dis.EXTENDED_ARG)
self.assertEqual(dis.opmap["STORE_NAME"], dis.HAVE_ARGUMENT)
def test_dis(self):
self.do_disassembly_test(_f, dis_f)
def test_bug_708901(self):
self.do_disassembly_test(bug708901, dis_bug708901)
def test_bug_1333982(self):
# This one is checking bytecodes generated for an `assert` statement,
# so fails if the tests are run with -O. Skip this test then.
if not __debug__:
self.skipTest('need asserts, run without -O')
self.do_disassembly_test(bug1333982, dis_bug1333982)
def test_big_linenos(self):
def func(count):
namespace = {}
func = "def foo():\n " + "".join(["\n "] * count + ["spam\n"])
exec(func, namespace)
return namespace['foo']
# Test all small ranges
for i in range(1, 300):
expected = _BIG_LINENO_FORMAT % (i + 2)
self.do_disassembly_test(func(i), expected)
# Test some larger ranges too
for i in range(300, 5000, 10):
expected = _BIG_LINENO_FORMAT % (i + 2)
self.do_disassembly_test(func(i), expected)
from test import dis_module
self.do_disassembly_test(dis_module, dis_module_expected_results)
def test_disassemble_str(self):
self.do_disassembly_test(expr_str, dis_expr_str)
self.do_disassembly_test(simple_stmt_str, dis_simple_stmt_str)
self.do_disassembly_test(compound_stmt_str, dis_compound_stmt_str)
def test_disassemble_bytes(self):
self.do_disassembly_test(_f.__code__.co_code, dis_f_co_code)
def test_disassemble_method(self):
self.do_disassembly_test(_C(1).__init__, dis_c_instance_method)
def test_disassemble_method_bytes(self):
method_bytecode = _C(1).__init__.__code__.co_code
self.do_disassembly_test(method_bytecode, dis_c_instance_method_bytes)
def test_dis_none(self):
try:
del sys.last_traceback
except AttributeError:
pass
self.assertRaises(RuntimeError, dis.dis, None)
def test_dis_traceback(self):
try:
del sys.last_traceback
except AttributeError:
pass
try:
1/0
except Exception as e:
tb = e.__traceback__
sys.last_traceback = tb
tb_dis = self.get_disassemble_as_string(tb.tb_frame.f_code, tb.tb_lasti)
self.do_disassembly_test(None, tb_dis)
def test_dis_object(self):
self.assertRaises(TypeError, dis.dis, object())
class DisWithFileTests(DisTests):
# Run the tests again, using the file arg instead of print
def get_disassembly(self, func, lasti=-1, wrapper=True):
output = io.StringIO()
if wrapper:
dis.dis(func, file=output)
else:
dis.disassemble(func, lasti, file=output)
return output.getvalue()
code_info_code_info = """\
Name: code_info
Filename: (.*)
Argument count: 1
Kw-only arguments: 0
Number of locals: 1
Stack size: 3
Flags: OPTIMIZED, NEWLOCALS, NOFREE
Constants:
0: %r
Names:
0: _format_code_info
1: _get_code_object
Variable names:
0: x""" % (('Formatted details of methods, functions, or code.',)
if sys.flags.optimize < 2 else (None,))
@staticmethod
def tricky(x, y, z=True, *args, c, d, e=[], **kwds):
def f(c=c):
print(x, y, z, c, d, e, f)
yield x, y, z, c, d, e, f
code_info_tricky = """\
Name: tricky
Filename: (.*)
Argument count: 3
Kw-only arguments: 3
Number of locals: 8
Stack size: 7
Flags: OPTIMIZED, NEWLOCALS, VARARGS, VARKEYWORDS, GENERATOR
Constants:
0: None
1: <code object f at (.*), file "(.*)", line (.*)>
2: 'tricky.<locals>.f'
Variable names:
0: x
1: y
2: z
3: c
4: d
5: e
6: args
7: kwds
Cell variables:
0: [edfxyz]
1: [edfxyz]
2: [edfxyz]
3: [edfxyz]
4: [edfxyz]
5: [edfxyz]"""
# NOTE: the order of the cell variables above depends on dictionary order!
co_tricky_nested_f = tricky.__func__.__code__.co_consts[1]
code_info_tricky_nested_f = """\
Name: f
Filename: (.*)
Argument count: 1
Kw-only arguments: 0
Number of locals: 1
Stack size: 8
Flags: OPTIMIZED, NEWLOCALS, NESTED
Constants:
0: None
Names:
0: print
Variable names:
0: c
Free variables:
0: [edfxyz]
1: [edfxyz]
2: [edfxyz]
3: [edfxyz]
4: [edfxyz]
5: [edfxyz]"""
code_info_expr_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 1
Names:
0: x"""
code_info_simple_stmt_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 1
1: None
Names:
0: x"""
code_info_compound_stmt_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 0
1: 1
2: None
Names:
0: x"""
class CodeInfoTests(unittest.TestCase):
test_pairs = [
(dis.code_info, code_info_code_info),
(tricky, code_info_tricky),
(co_tricky_nested_f, code_info_tricky_nested_f),
(expr_str, code_info_expr_str),
(simple_stmt_str, code_info_simple_stmt_str),
(compound_stmt_str, code_info_compound_stmt_str),
]
def test_code_info(self):
self.maxDiff = 1000
for x, expected in self.test_pairs:
self.assertRegex(dis.code_info(x), expected)
def test_show_code(self):
self.maxDiff = 1000
for x, expected in self.test_pairs:
with captured_stdout() as output:
dis.show_code(x)
self.assertRegex(output.getvalue(), expected+"\n")
output = io.StringIO()
dis.show_code(x, file=output)
self.assertRegex(output.getvalue(), expected)
def test_code_info_object(self):
self.assertRaises(TypeError, dis.code_info, object())
def test_pretty_flags_no_flags(self):
self.assertEqual(dis.pretty_flags(0), '0x0')
# Fodder for instruction introspection tests
# Editing any of these may require recalculating the expected output
def outer(a=1, b=2):
def f(c=3, d=4):
def inner(e=5, f=6):
print(a, b, c, d, e, f)
print(a, b, c, d)
return inner
print(a, b, '', 1, [], {}, "Hello world!")
return f
def jumpy():
# This won't actually run (but that's OK, we only disassemble it)
for i in range(10):
print(i)
if i < 4:
continue
if i > 6:
break
else:
print("I can haz else clause?")
while i:
print(i)
i -= 1
if i > 6:
continue
if i < 4:
break
else:
print("Who let lolcatz into this test suite?")
try:
1 / 0
except ZeroDivisionError:
print("Here we go, here we go, here we go...")
else:
with i as dodgy:
print("Never reach this")
finally:
print("OK, now we're done")
# End fodder for opinfo generation tests
expected_outer_line = 1
_line_offset = outer.__code__.co_firstlineno - 1
code_object_f = outer.__code__.co_consts[3]
expected_f_line = code_object_f.co_firstlineno - _line_offset
code_object_inner = code_object_f.co_consts[3]
expected_inner_line = code_object_inner.co_firstlineno - _line_offset
expected_jumpy_line = 1
# The following lines are useful to regenerate the expected results after
# either the fodder is modified or the bytecode generation changes
# After regeneration, update the references to code_object_f and
# code_object_inner before rerunning the tests
#_instructions = dis.get_instructions(outer, first_line=expected_outer_line)
#print('expected_opinfo_outer = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(outer(), first_line=expected_outer_line)
#print('expected_opinfo_f = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(outer()(), first_line=expected_outer_line)
#print('expected_opinfo_inner = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(jumpy, first_line=expected_jumpy_line)
#print('expected_opinfo_jumpy = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
Instruction = dis.Instruction
expected_opinfo_outer = [
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=3, argrepr='3', offset=0, starts_line=2, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=0, argval='a', argrepr='a', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=1, argval='b', argrepr='b', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_TUPLE', opcode=102, arg=2, argval=2, argrepr='', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=code_object_f, argrepr=repr(code_object_f), offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='outer.<locals>.f', argrepr="'outer.<locals>.f'", offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='MAKE_CLOSURE', opcode=134, arg=2, argval=2, argrepr='', offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='f', argrepr='f', offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=27, starts_line=7, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='a', argrepr='a', offset=30, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='b', argrepr='b', offset=33, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval='', argrepr="''", offset=36, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval=1, argrepr='1', offset=39, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_LIST', opcode=103, arg=0, argval=0, argrepr='', offset=42, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_MAP', opcode=105, arg=0, argval=0, argrepr='', offset=45, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval='Hello world!', argrepr="'Hello world!'", offset=48, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=7, argval=7, argrepr='7 positional, 0 keyword pair', offset=51, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=54, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='f', argrepr='f', offset=55, starts_line=8, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=58, starts_line=None, is_jump_target=False),
]
expected_opinfo_f = [
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=5, argrepr='5', offset=0, starts_line=3, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=6, argrepr='6', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=2, argval='a', argrepr='a', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=3, argval='b', argrepr='b', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=0, argval='c', argrepr='c', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=1, argval='d', argrepr='d', offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_TUPLE', opcode=102, arg=4, argval=4, argrepr='', offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=code_object_inner, argrepr=repr(code_object_inner), offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='outer.<locals>.f.<locals>.inner', argrepr="'outer.<locals>.f.<locals>.inner'", offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='MAKE_CLOSURE', opcode=134, arg=2, argval=2, argrepr='', offset=27, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='inner', argrepr='inner', offset=30, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=33, starts_line=5, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=2, argval='a', argrepr='a', offset=36, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=3, argval='b', argrepr='b', offset=39, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='c', argrepr='c', offset=42, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='d', argrepr='d', offset=45, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=4, argval=4, argrepr='4 positional, 0 keyword pair', offset=48, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=51, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='inner', argrepr='inner', offset=52, starts_line=6, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=55, starts_line=None, is_jump_target=False),
]
expected_opinfo_inner = [
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=0, starts_line=4, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='a', argrepr='a', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='b', argrepr='b', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=2, argval='c', argrepr='c', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=3, argval='d', argrepr='d', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='e', argrepr='e', offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=1, argval='f', argrepr='f', offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=6, argval=6, argrepr='6 positional, 0 keyword pair', offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=25, starts_line=None, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=28, starts_line=None, is_jump_target=False),
]
expected_opinfo_jumpy = [
Instruction(opname='SETUP_LOOP', opcode=120, arg=74, argval=77, argrepr='to 77', offset=0, starts_line=3, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='range', argrepr='range', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=10, argrepr='10', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='GET_ITER', opcode=68, arg=None, argval=None, argrepr='', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='FOR_ITER', opcode=93, arg=50, argval=66, argrepr='to 66', offset=13, starts_line=None, is_jump_target=True),
Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=16, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=19, starts_line=4, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=22, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=25, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=28, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=29, starts_line=5, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=32, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=35, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=47, argval=47, argrepr='', offset=38, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=41, starts_line=6, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=0, argval=47, argrepr='to 47', offset=44, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=47, starts_line=7, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=50, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=53, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=13, argval=13, argrepr='', offset=56, starts_line=None, is_jump_target=False),
Instruction(opname='BREAK_LOOP', opcode=80, arg=None, argval=None, argrepr='', offset=59, starts_line=8, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=60, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=63, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=66, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=67, starts_line=10, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=70, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=73, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=76, starts_line=None, is_jump_target=False),
Instruction(opname='SETUP_LOOP', opcode=120, arg=74, argval=154, argrepr='to 154', offset=77, starts_line=11, is_jump_target=True),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=80, starts_line=None, is_jump_target=True),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=143, argval=143, argrepr='', offset=83, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=86, starts_line=12, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=89, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=92, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=95, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=96, starts_line=13, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=99, starts_line=None, is_jump_target=False),
Instruction(opname='INPLACE_SUBTRACT', opcode=56, arg=None, argval=None, argrepr='', offset=102, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=103, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=106, starts_line=14, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=109, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=112, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=124, argval=124, argrepr='', offset=115, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=118, starts_line=15, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=0, argval=124, argrepr='to 124', offset=121, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=124, starts_line=16, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=127, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=130, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=80, argval=80, argrepr='', offset=133, starts_line=None, is_jump_target=False),
Instruction(opname='BREAK_LOOP', opcode=80, arg=None, argval=None, argrepr='', offset=136, starts_line=17, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=137, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=140, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=143, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=144, starts_line=19, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=147, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=150, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=153, starts_line=None, is_jump_target=False),
Instruction(opname='SETUP_FINALLY', opcode=122, arg=72, argval=229, argrepr='to 229', offset=154, starts_line=20, is_jump_target=True),
Instruction(opname='SETUP_EXCEPT', opcode=121, arg=12, argval=172, argrepr='to 172', offset=157, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=160, starts_line=21, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval=0, argrepr='0', offset=163, starts_line=None, is_jump_target=False),
Instruction(opname='BINARY_TRUE_DIVIDE', opcode=27, arg=None, argval=None, argrepr='', offset=166, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=167, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=168, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=28, argval=200, argrepr='to 200', offset=169, starts_line=None, is_jump_target=False),
Instruction(opname='DUP_TOP', opcode=4, arg=None, argval=None, argrepr='', offset=172, starts_line=22, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=2, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=173, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=10, argval='exception match', argrepr='exception match', offset=176, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=199, argval=199, argrepr='', offset=179, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=182, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=183, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=184, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=185, starts_line=23, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=8, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=188, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=191, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=194, starts_line=None, is_jump_target=False),
Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=195, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=26, argval=225, argrepr='to 225', offset=196, starts_line=None, is_jump_target=False),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=199, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=200, starts_line=25, is_jump_target=True),
Instruction(opname='SETUP_WITH', opcode=143, arg=17, argval=223, argrepr='to 223', offset=203, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=1, argval='dodgy', argrepr='dodgy', offset=206, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=209, starts_line=26, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=9, argval='Never reach this', argrepr="'Never reach this'", offset=212, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=215, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=218, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=219, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=220, starts_line=None, is_jump_target=False),
Instruction(opname='WITH_CLEANUP', opcode=81, arg=None, argval=None, argrepr='', offset=223, starts_line=None, is_jump_target=True),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=224, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=225, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=226, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=229, starts_line=28, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=232, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=235, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=238, starts_line=None, is_jump_target=False),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=239, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=240, starts_line=None, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=243, starts_line=None, is_jump_target=False),
]
# One last piece of inspect fodder to check the default line number handling
def simple(): pass
expected_opinfo_simple = [
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=0, starts_line=simple.__code__.co_firstlineno, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=3, starts_line=None, is_jump_target=False)
]
class InstructionTests(BytecodeTestCase):
def test_default_first_line(self):
actual = dis.get_instructions(simple)
self.assertEqual(list(actual), expected_opinfo_simple)
def test_first_line_set_to_None(self):
actual = dis.get_instructions(simple, first_line=None)
self.assertEqual(list(actual), expected_opinfo_simple)
def test_outer(self):
actual = dis.get_instructions(outer, first_line=expected_outer_line)
self.assertEqual(list(actual), expected_opinfo_outer)
def test_nested(self):
with captured_stdout():
f = outer()
actual = dis.get_instructions(f, first_line=expected_f_line)
self.assertEqual(list(actual), expected_opinfo_f)
def test_doubly_nested(self):
with captured_stdout():
inner = outer()()
actual = dis.get_instructions(inner, first_line=expected_inner_line)
self.assertEqual(list(actual), expected_opinfo_inner)
def test_jumpy(self):
actual = dis.get_instructions(jumpy, first_line=expected_jumpy_line)
self.assertEqual(list(actual), expected_opinfo_jumpy)
# get_instructions has its own tests above, so can rely on it to validate
# the object oriented API
class BytecodeTests(unittest.TestCase):
def test_instantiation(self):
# Test with function, method, code string and code object
for obj in [_f, _C(1).__init__, "a=1", _f.__code__]:
with self.subTest(obj=obj):
b = dis.Bytecode(obj)
self.assertIsInstance(b.codeobj, types.CodeType)
self.assertRaises(TypeError, dis.Bytecode, object())
def test_iteration(self):
for obj in [_f, _C(1).__init__, "a=1", _f.__code__]:
with self.subTest(obj=obj):
via_object = list(dis.Bytecode(obj))
via_generator = list(dis.get_instructions(obj))
self.assertEqual(via_object, via_generator)
def test_explicit_first_line(self):
actual = dis.Bytecode(outer, first_line=expected_outer_line)
self.assertEqual(list(actual), expected_opinfo_outer)
def test_source_line_in_disassembly(self):
# Use the line in the source code
actual = dis.Bytecode(simple).dis()[:3]
expected = "{:>3}".format(simple.__code__.co_firstlineno)
self.assertEqual(actual, expected)
# Use an explicit first line number
actual = dis.Bytecode(simple, first_line=350).dis()[:3]
self.assertEqual(actual, "350")
def test_info(self):
self.maxDiff = 1000
for x, expected in CodeInfoTests.test_pairs:
b = dis.Bytecode(x)
self.assertRegex(b.info(), expected)
def test_disassembled(self):
actual = dis.Bytecode(_f).dis()
self.assertEqual(actual, dis_f)
def test_from_traceback(self):
tb = get_tb()
b = dis.Bytecode.from_traceback(tb)
while tb.tb_next: tb = tb.tb_next
self.assertEqual(b.current_offset, tb.tb_lasti)
def test_from_traceback_dis(self):
tb = get_tb()
b = dis.Bytecode.from_traceback(tb)
self.assertEqual(b.dis(), dis_traceback)
if __name__ == "__main__":
unittest.main()
=======
# Minimal tests for dis module
from test.support import run_unittest, captured_stdout
from test.bytecode_helper import BytecodeTestCase
import difflib
import unittest
import sys
import dis
import io
import re
import types
import contextlib
def get_tb():
def _error():
try:
1 / 0
except Exception as e:
tb = e.__traceback__
return tb
tb = _error()
while tb.tb_next:
tb = tb.tb_next
return tb
TRACEBACK_CODE = get_tb().tb_frame.f_code
class _C:
def __init__(self, x):
self.x = x == 1
dis_c_instance_method = """\
%-4d 0 LOAD_FAST 1 (x)
3 LOAD_CONST 1 (1)
6 COMPARE_OP 2 (==)
9 LOAD_FAST 0 (self)
12 STORE_ATTR 0 (x)
15 LOAD_CONST 0 (None)
18 RETURN_VALUE
""" % (_C.__init__.__code__.co_firstlineno + 1,)
dis_c_instance_method_bytes = """\
0 LOAD_FAST 1 (1)
3 LOAD_CONST 1 (1)
6 COMPARE_OP 2 (==)
9 LOAD_FAST 0 (0)
12 STORE_ATTR 0 (0)
15 LOAD_CONST 0 (0)
18 RETURN_VALUE
"""
def _f(a):
print(a)
return 1
dis_f = """\
%-4d 0 LOAD_GLOBAL 0 (print)
3 LOAD_FAST 0 (a)
6 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
9 POP_TOP
%-4d 10 LOAD_CONST 1 (1)
13 RETURN_VALUE
""" % (_f.__code__.co_firstlineno + 1,
_f.__code__.co_firstlineno + 2)
dis_f_co_code = """\
0 LOAD_GLOBAL 0 (0)
3 LOAD_FAST 0 (0)
6 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
9 POP_TOP
10 LOAD_CONST 1 (1)
13 RETURN_VALUE
"""
def bug708901():
for res in range(1,
10):
pass
dis_bug708901 = """\
%-4d 0 SETUP_LOOP 23 (to 26)
3 LOAD_GLOBAL 0 (range)
6 LOAD_CONST 1 (1)
%-4d 9 LOAD_CONST 2 (10)
12 CALL_FUNCTION 2 (2 positional, 0 keyword pair)
15 GET_ITER
>> 16 FOR_ITER 6 (to 25)
19 STORE_FAST 0 (res)
%-4d 22 JUMP_ABSOLUTE 16
>> 25 POP_BLOCK
>> 26 LOAD_CONST 0 (None)
29 RETURN_VALUE
""" % (bug708901.__code__.co_firstlineno + 1,
bug708901.__code__.co_firstlineno + 2,
bug708901.__code__.co_firstlineno + 3)
def bug1333982(x=[]):
assert 0, ([s for s in x] +
1)
pass
dis_bug1333982 = """\
%3d 0 LOAD_CONST 1 (0)
3 POP_JUMP_IF_TRUE 35
6 LOAD_GLOBAL 0 (AssertionError)
9 LOAD_CONST 2 (<code object <listcomp> at 0x..., file "%s", line %d>)
12 LOAD_CONST 3 ('bug1333982.<locals>.<listcomp>')
15 MAKE_FUNCTION 0
18 LOAD_FAST 0 (x)
21 GET_ITER
22 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
%3d 25 LOAD_CONST 4 (1)
28 BINARY_ADD
29 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
32 RAISE_VARARGS 1
%3d >> 35 LOAD_CONST 0 (None)
38 RETURN_VALUE
""" % (bug1333982.__code__.co_firstlineno + 1,
__file__,
bug1333982.__code__.co_firstlineno + 1,
bug1333982.__code__.co_firstlineno + 2,
bug1333982.__code__.co_firstlineno + 3)
_BIG_LINENO_FORMAT = """\
%3d 0 LOAD_GLOBAL 0 (spam)
3 POP_TOP
4 LOAD_CONST 0 (None)
7 RETURN_VALUE
"""
dis_module_expected_results = """\
Disassembly of f:
4 0 LOAD_CONST 0 (None)
3 RETURN_VALUE
Disassembly of g:
5 0 LOAD_CONST 0 (None)
3 RETURN_VALUE
"""
expr_str = "x + 1"
dis_expr_str = """\
1 0 LOAD_NAME 0 (x)
3 LOAD_CONST 0 (1)
6 BINARY_ADD
7 RETURN_VALUE
"""
simple_stmt_str = "x = x + 1"
dis_simple_stmt_str = """\
1 0 LOAD_NAME 0 (x)
3 LOAD_CONST 0 (1)
6 BINARY_ADD
7 STORE_NAME 0 (x)
10 LOAD_CONST 1 (None)
13 RETURN_VALUE
"""
compound_stmt_str = """\
x = 0
while 1:
x += 1"""
# Trailing newline has been deliberately omitted
dis_compound_stmt_str = """\
1 0 LOAD_CONST 0 (0)
3 STORE_NAME 0 (x)
2 6 SETUP_LOOP 14 (to 23)
3 >> 9 LOAD_NAME 0 (x)
12 LOAD_CONST 1 (1)
15 INPLACE_ADD
16 STORE_NAME 0 (x)
19 JUMP_ABSOLUTE 9
22 POP_BLOCK
>> 23 LOAD_CONST 2 (None)
26 RETURN_VALUE
"""
dis_traceback = """\
%-4d 0 SETUP_EXCEPT 12 (to 15)
%-4d 3 LOAD_CONST 1 (1)
6 LOAD_CONST 2 (0)
--> 9 BINARY_TRUE_DIVIDE
10 POP_TOP
11 POP_BLOCK
12 JUMP_FORWARD 46 (to 61)
%-4d >> 15 DUP_TOP
16 LOAD_GLOBAL 0 (Exception)
19 COMPARE_OP 10 (exception match)
22 POP_JUMP_IF_FALSE 60
25 POP_TOP
26 STORE_FAST 0 (e)
29 POP_TOP
30 SETUP_FINALLY 14 (to 47)
%-4d 33 LOAD_FAST 0 (e)
36 LOAD_ATTR 1 (__traceback__)
39 STORE_FAST 1 (tb)
42 POP_BLOCK
43 POP_EXCEPT
44 LOAD_CONST 0 (None)
>> 47 LOAD_CONST 0 (None)
50 STORE_FAST 0 (e)
53 DELETE_FAST 0 (e)
56 END_FINALLY
57 JUMP_FORWARD 1 (to 61)
>> 60 END_FINALLY
%-4d >> 61 LOAD_FAST 1 (tb)
64 RETURN_VALUE
""" % (TRACEBACK_CODE.co_firstlineno + 1,
TRACEBACK_CODE.co_firstlineno + 2,
TRACEBACK_CODE.co_firstlineno + 3,
TRACEBACK_CODE.co_firstlineno + 4,
TRACEBACK_CODE.co_firstlineno + 5)
class DisTests(unittest.TestCase):
def get_disassembly(self, func, lasti=-1, wrapper=True):
# We want to test the default printing behaviour, not the file arg
output = io.StringIO()
with contextlib.redirect_stdout(output):
if wrapper:
dis.dis(func)
else:
dis.disassemble(func, lasti)
return output.getvalue()
def get_disassemble_as_string(self, func, lasti=-1):
return self.get_disassembly(func, lasti, False)
def strip_addresses(self, text):
return re.sub(r'\b0x[0-9A-Fa-f]+\b', '0x...', text)
def do_disassembly_test(self, func, expected):
got = self.get_disassembly(func)
if got != expected:
got = self.strip_addresses(got)
self.assertEqual(got, expected)
def test_opmap(self):
self.assertEqual(dis.opmap["NOP"], 9)
self.assertIn(dis.opmap["LOAD_CONST"], dis.hasconst)
self.assertIn(dis.opmap["STORE_NAME"], dis.hasname)
def test_opname(self):
self.assertEqual(dis.opname[dis.opmap["LOAD_FAST"]], "LOAD_FAST")
def test_boundaries(self):
self.assertEqual(dis.opmap["EXTENDED_ARG"], dis.EXTENDED_ARG)
self.assertEqual(dis.opmap["STORE_NAME"], dis.HAVE_ARGUMENT)
def test_dis(self):
self.do_disassembly_test(_f, dis_f)
def test_bug_708901(self):
self.do_disassembly_test(bug708901, dis_bug708901)
def test_bug_1333982(self):
# This one is checking bytecodes generated for an `assert` statement,
# so fails if the tests are run with -O. Skip this test then.
if not __debug__:
self.skipTest('need asserts, run without -O')
self.do_disassembly_test(bug1333982, dis_bug1333982)
def test_big_linenos(self):
def func(count):
namespace = {}
func = "def foo():\n " + "".join(["\n "] * count + ["spam\n"])
exec(func, namespace)
return namespace['foo']
# Test all small ranges
for i in range(1, 300):
expected = _BIG_LINENO_FORMAT % (i + 2)
self.do_disassembly_test(func(i), expected)
# Test some larger ranges too
for i in range(300, 5000, 10):
expected = _BIG_LINENO_FORMAT % (i + 2)
self.do_disassembly_test(func(i), expected)
from test import dis_module
self.do_disassembly_test(dis_module, dis_module_expected_results)
def test_disassemble_str(self):
self.do_disassembly_test(expr_str, dis_expr_str)
self.do_disassembly_test(simple_stmt_str, dis_simple_stmt_str)
self.do_disassembly_test(compound_stmt_str, dis_compound_stmt_str)
def test_disassemble_bytes(self):
self.do_disassembly_test(_f.__code__.co_code, dis_f_co_code)
def test_disassemble_method(self):
self.do_disassembly_test(_C(1).__init__, dis_c_instance_method)
def test_disassemble_method_bytes(self):
method_bytecode = _C(1).__init__.__code__.co_code
self.do_disassembly_test(method_bytecode, dis_c_instance_method_bytes)
def test_dis_none(self):
try:
del sys.last_traceback
except AttributeError:
pass
self.assertRaises(RuntimeError, dis.dis, None)
def test_dis_traceback(self):
try:
del sys.last_traceback
except AttributeError:
pass
try:
1/0
except Exception as e:
tb = e.__traceback__
sys.last_traceback = tb
tb_dis = self.get_disassemble_as_string(tb.tb_frame.f_code, tb.tb_lasti)
self.do_disassembly_test(None, tb_dis)
def test_dis_object(self):
self.assertRaises(TypeError, dis.dis, object())
class DisWithFileTests(DisTests):
# Run the tests again, using the file arg instead of print
def get_disassembly(self, func, lasti=-1, wrapper=True):
output = io.StringIO()
if wrapper:
dis.dis(func, file=output)
else:
dis.disassemble(func, lasti, file=output)
return output.getvalue()
code_info_code_info = """\
Name: code_info
Filename: (.*)
Argument count: 1
Kw-only arguments: 0
Number of locals: 1
Stack size: 3
Flags: OPTIMIZED, NEWLOCALS, NOFREE
Constants:
0: %r
Names:
0: _format_code_info
1: _get_code_object
Variable names:
0: x""" % (('Formatted details of methods, functions, or code.',)
if sys.flags.optimize < 2 else (None,))
@staticmethod
def tricky(x, y, z=True, *args, c, d, e=[], **kwds):
def f(c=c):
print(x, y, z, c, d, e, f)
yield x, y, z, c, d, e, f
code_info_tricky = """\
Name: tricky
Filename: (.*)
Argument count: 3
Kw-only arguments: 3
Number of locals: 8
Stack size: 7
Flags: OPTIMIZED, NEWLOCALS, VARARGS, VARKEYWORDS, GENERATOR
Constants:
0: None
1: <code object f at (.*), file "(.*)", line (.*)>
2: 'tricky.<locals>.f'
Variable names:
0: x
1: y
2: z
3: c
4: d
5: e
6: args
7: kwds
Cell variables:
0: [edfxyz]
1: [edfxyz]
2: [edfxyz]
3: [edfxyz]
4: [edfxyz]
5: [edfxyz]"""
# NOTE: the order of the cell variables above depends on dictionary order!
co_tricky_nested_f = tricky.__func__.__code__.co_consts[1]
code_info_tricky_nested_f = """\
Name: f
Filename: (.*)
Argument count: 1
Kw-only arguments: 0
Number of locals: 1
Stack size: 8
Flags: OPTIMIZED, NEWLOCALS, NESTED
Constants:
0: None
Names:
0: print
Variable names:
0: c
Free variables:
0: [edfxyz]
1: [edfxyz]
2: [edfxyz]
3: [edfxyz]
4: [edfxyz]
5: [edfxyz]"""
code_info_expr_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 1
Names:
0: x"""
code_info_simple_stmt_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 1
1: None
Names:
0: x"""
code_info_compound_stmt_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 0
1: 1
2: None
Names:
0: x"""
class CodeInfoTests(unittest.TestCase):
test_pairs = [
(dis.code_info, code_info_code_info),
(tricky, code_info_tricky),
(co_tricky_nested_f, code_info_tricky_nested_f),
(expr_str, code_info_expr_str),
(simple_stmt_str, code_info_simple_stmt_str),
(compound_stmt_str, code_info_compound_stmt_str),
]
def test_code_info(self):
self.maxDiff = 1000
for x, expected in self.test_pairs:
self.assertRegex(dis.code_info(x), expected)
def test_show_code(self):
self.maxDiff = 1000
for x, expected in self.test_pairs:
with captured_stdout() as output:
dis.show_code(x)
self.assertRegex(output.getvalue(), expected+"\n")
output = io.StringIO()
dis.show_code(x, file=output)
self.assertRegex(output.getvalue(), expected)
def test_code_info_object(self):
self.assertRaises(TypeError, dis.code_info, object())
def test_pretty_flags_no_flags(self):
self.assertEqual(dis.pretty_flags(0), '0x0')
# Fodder for instruction introspection tests
# Editing any of these may require recalculating the expected output
def outer(a=1, b=2):
def f(c=3, d=4):
def inner(e=5, f=6):
print(a, b, c, d, e, f)
print(a, b, c, d)
return inner
print(a, b, '', 1, [], {}, "Hello world!")
return f
def jumpy():
# This won't actually run (but that's OK, we only disassemble it)
for i in range(10):
print(i)
if i < 4:
continue
if i > 6:
break
else:
print("I can haz else clause?")
while i:
print(i)
i -= 1
if i > 6:
continue
if i < 4:
break
else:
print("Who let lolcatz into this test suite?")
try:
1 / 0
except ZeroDivisionError:
print("Here we go, here we go, here we go...")
else:
with i as dodgy:
print("Never reach this")
finally:
print("OK, now we're done")
# End fodder for opinfo generation tests
expected_outer_line = 1
_line_offset = outer.__code__.co_firstlineno - 1
code_object_f = outer.__code__.co_consts[3]
expected_f_line = code_object_f.co_firstlineno - _line_offset
code_object_inner = code_object_f.co_consts[3]
expected_inner_line = code_object_inner.co_firstlineno - _line_offset
expected_jumpy_line = 1
# The following lines are useful to regenerate the expected results after
# either the fodder is modified or the bytecode generation changes
# After regeneration, update the references to code_object_f and
# code_object_inner before rerunning the tests
#_instructions = dis.get_instructions(outer, first_line=expected_outer_line)
#print('expected_opinfo_outer = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(outer(), first_line=expected_outer_line)
#print('expected_opinfo_f = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(outer()(), first_line=expected_outer_line)
#print('expected_opinfo_inner = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(jumpy, first_line=expected_jumpy_line)
#print('expected_opinfo_jumpy = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
Instruction = dis.Instruction
expected_opinfo_outer = [
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=3, argrepr='3', offset=0, starts_line=2, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=0, argval='a', argrepr='a', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=1, argval='b', argrepr='b', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_TUPLE', opcode=102, arg=2, argval=2, argrepr='', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=code_object_f, argrepr=repr(code_object_f), offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='outer.<locals>.f', argrepr="'outer.<locals>.f'", offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='MAKE_CLOSURE', opcode=134, arg=2, argval=2, argrepr='', offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='f', argrepr='f', offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=27, starts_line=7, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='a', argrepr='a', offset=30, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='b', argrepr='b', offset=33, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval='', argrepr="''", offset=36, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval=1, argrepr='1', offset=39, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_LIST', opcode=103, arg=0, argval=0, argrepr='', offset=42, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_MAP', opcode=105, arg=0, argval=0, argrepr='', offset=45, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval='Hello world!', argrepr="'Hello world!'", offset=48, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=7, argval=7, argrepr='7 positional, 0 keyword pair', offset=51, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=54, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='f', argrepr='f', offset=55, starts_line=8, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=58, starts_line=None, is_jump_target=False),
]
expected_opinfo_f = [
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=5, argrepr='5', offset=0, starts_line=3, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=6, argrepr='6', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=2, argval='a', argrepr='a', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=3, argval='b', argrepr='b', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=0, argval='c', argrepr='c', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=1, argval='d', argrepr='d', offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_TUPLE', opcode=102, arg=4, argval=4, argrepr='', offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=code_object_inner, argrepr=repr(code_object_inner), offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='outer.<locals>.f.<locals>.inner', argrepr="'outer.<locals>.f.<locals>.inner'", offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='MAKE_CLOSURE', opcode=134, arg=2, argval=2, argrepr='', offset=27, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='inner', argrepr='inner', offset=30, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=33, starts_line=5, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=2, argval='a', argrepr='a', offset=36, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=3, argval='b', argrepr='b', offset=39, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='c', argrepr='c', offset=42, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='d', argrepr='d', offset=45, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=4, argval=4, argrepr='4 positional, 0 keyword pair', offset=48, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=51, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='inner', argrepr='inner', offset=52, starts_line=6, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=55, starts_line=None, is_jump_target=False),
]
expected_opinfo_inner = [
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=0, starts_line=4, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='a', argrepr='a', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='b', argrepr='b', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=2, argval='c', argrepr='c', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=3, argval='d', argrepr='d', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='e', argrepr='e', offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=1, argval='f', argrepr='f', offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=6, argval=6, argrepr='6 positional, 0 keyword pair', offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=25, starts_line=None, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=28, starts_line=None, is_jump_target=False),
]
expected_opinfo_jumpy = [
Instruction(opname='SETUP_LOOP', opcode=120, arg=74, argval=77, argrepr='to 77', offset=0, starts_line=3, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='range', argrepr='range', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=10, argrepr='10', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='GET_ITER', opcode=68, arg=None, argval=None, argrepr='', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='FOR_ITER', opcode=93, arg=50, argval=66, argrepr='to 66', offset=13, starts_line=None, is_jump_target=True),
Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=16, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=19, starts_line=4, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=22, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=25, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=28, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=29, starts_line=5, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=32, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=35, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=47, argval=47, argrepr='', offset=38, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=41, starts_line=6, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=0, argval=47, argrepr='to 47', offset=44, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=47, starts_line=7, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=50, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=53, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=13, argval=13, argrepr='', offset=56, starts_line=None, is_jump_target=False),
Instruction(opname='BREAK_LOOP', opcode=80, arg=None, argval=None, argrepr='', offset=59, starts_line=8, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=60, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=63, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=66, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=67, starts_line=10, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=70, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=73, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=76, starts_line=None, is_jump_target=False),
Instruction(opname='SETUP_LOOP', opcode=120, arg=74, argval=154, argrepr='to 154', offset=77, starts_line=11, is_jump_target=True),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=80, starts_line=None, is_jump_target=True),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=143, argval=143, argrepr='', offset=83, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=86, starts_line=12, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=89, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=92, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=95, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=96, starts_line=13, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=99, starts_line=None, is_jump_target=False),
Instruction(opname='INPLACE_SUBTRACT', opcode=56, arg=None, argval=None, argrepr='', offset=102, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=103, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=106, starts_line=14, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=109, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=112, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=124, argval=124, argrepr='', offset=115, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=118, starts_line=15, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=0, argval=124, argrepr='to 124', offset=121, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=124, starts_line=16, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=127, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=130, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=80, argval=80, argrepr='', offset=133, starts_line=None, is_jump_target=False),
Instruction(opname='BREAK_LOOP', opcode=80, arg=None, argval=None, argrepr='', offset=136, starts_line=17, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=137, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=140, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=143, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=144, starts_line=19, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=147, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=150, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=153, starts_line=None, is_jump_target=False),
Instruction(opname='SETUP_FINALLY', opcode=122, arg=72, argval=229, argrepr='to 229', offset=154, starts_line=20, is_jump_target=True),
Instruction(opname='SETUP_EXCEPT', opcode=121, arg=12, argval=172, argrepr='to 172', offset=157, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=160, starts_line=21, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval=0, argrepr='0', offset=163, starts_line=None, is_jump_target=False),
Instruction(opname='BINARY_TRUE_DIVIDE', opcode=27, arg=None, argval=None, argrepr='', offset=166, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=167, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=168, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=28, argval=200, argrepr='to 200', offset=169, starts_line=None, is_jump_target=False),
Instruction(opname='DUP_TOP', opcode=4, arg=None, argval=None, argrepr='', offset=172, starts_line=22, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=2, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=173, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=10, argval='exception match', argrepr='exception match', offset=176, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=199, argval=199, argrepr='', offset=179, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=182, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=183, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=184, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=185, starts_line=23, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=8, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=188, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=191, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=194, starts_line=None, is_jump_target=False),
Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=195, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=26, argval=225, argrepr='to 225', offset=196, starts_line=None, is_jump_target=False),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=199, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=200, starts_line=25, is_jump_target=True),
Instruction(opname='SETUP_WITH', opcode=143, arg=17, argval=223, argrepr='to 223', offset=203, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=1, argval='dodgy', argrepr='dodgy', offset=206, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=209, starts_line=26, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=9, argval='Never reach this', argrepr="'Never reach this'", offset=212, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=215, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=218, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=219, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=220, starts_line=None, is_jump_target=False),
Instruction(opname='WITH_CLEANUP', opcode=81, arg=None, argval=None, argrepr='', offset=223, starts_line=None, is_jump_target=True),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=224, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=225, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=226, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=229, starts_line=28, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=232, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=235, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=238, starts_line=None, is_jump_target=False),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=239, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=240, starts_line=None, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=243, starts_line=None, is_jump_target=False),
]
# One last piece of inspect fodder to check the default line number handling
def simple(): pass
expected_opinfo_simple = [
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=0, starts_line=simple.__code__.co_firstlineno, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=3, starts_line=None, is_jump_target=False)
]
class InstructionTests(BytecodeTestCase):
def test_default_first_line(self):
actual = dis.get_instructions(simple)
self.assertEqual(list(actual), expected_opinfo_simple)
def test_first_line_set_to_None(self):
actual = dis.get_instructions(simple, first_line=None)
self.assertEqual(list(actual), expected_opinfo_simple)
def test_outer(self):
actual = dis.get_instructions(outer, first_line=expected_outer_line)
self.assertEqual(list(actual), expected_opinfo_outer)
def test_nested(self):
with captured_stdout():
f = outer()
actual = dis.get_instructions(f, first_line=expected_f_line)
self.assertEqual(list(actual), expected_opinfo_f)
def test_doubly_nested(self):
with captured_stdout():
inner = outer()()
actual = dis.get_instructions(inner, first_line=expected_inner_line)
self.assertEqual(list(actual), expected_opinfo_inner)
def test_jumpy(self):
actual = dis.get_instructions(jumpy, first_line=expected_jumpy_line)
self.assertEqual(list(actual), expected_opinfo_jumpy)
# get_instructions has its own tests above, so can rely on it to validate
# the object oriented API
class BytecodeTests(unittest.TestCase):
def test_instantiation(self):
# Test with function, method, code string and code object
for obj in [_f, _C(1).__init__, "a=1", _f.__code__]:
with self.subTest(obj=obj):
b = dis.Bytecode(obj)
self.assertIsInstance(b.codeobj, types.CodeType)
self.assertRaises(TypeError, dis.Bytecode, object())
def test_iteration(self):
for obj in [_f, _C(1).__init__, "a=1", _f.__code__]:
with self.subTest(obj=obj):
via_object = list(dis.Bytecode(obj))
via_generator = list(dis.get_instructions(obj))
self.assertEqual(via_object, via_generator)
def test_explicit_first_line(self):
actual = dis.Bytecode(outer, first_line=expected_outer_line)
self.assertEqual(list(actual), expected_opinfo_outer)
def test_source_line_in_disassembly(self):
# Use the line in the source code
actual = dis.Bytecode(simple).dis()[:3]
expected = "{:>3}".format(simple.__code__.co_firstlineno)
self.assertEqual(actual, expected)
# Use an explicit first line number
actual = dis.Bytecode(simple, first_line=350).dis()[:3]
self.assertEqual(actual, "350")
def test_info(self):
self.maxDiff = 1000
for x, expected in CodeInfoTests.test_pairs:
b = dis.Bytecode(x)
self.assertRegex(b.info(), expected)
def test_disassembled(self):
actual = dis.Bytecode(_f).dis()
self.assertEqual(actual, dis_f)
def test_from_traceback(self):
tb = get_tb()
b = dis.Bytecode.from_traceback(tb)
while tb.tb_next: tb = tb.tb_next
self.assertEqual(b.current_offset, tb.tb_lasti)
def test_from_traceback_dis(self):
tb = get_tb()
b = dis.Bytecode.from_traceback(tb)
self.assertEqual(b.dis(), dis_traceback)
if __name__ == "__main__":
unittest.main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
=======
# Minimal tests for dis module
from test.support import run_unittest, captured_stdout
from test.bytecode_helper import BytecodeTestCase
import difflib
import unittest
import sys
import dis
import io
import re
import types
import contextlib
def get_tb():
def _error():
try:
1 / 0
except Exception as e:
tb = e.__traceback__
return tb
tb = _error()
while tb.tb_next:
tb = tb.tb_next
return tb
TRACEBACK_CODE = get_tb().tb_frame.f_code
class _C:
def __init__(self, x):
self.x = x == 1
dis_c_instance_method = """\
%-4d 0 LOAD_FAST 1 (x)
3 LOAD_CONST 1 (1)
6 COMPARE_OP 2 (==)
9 LOAD_FAST 0 (self)
12 STORE_ATTR 0 (x)
15 LOAD_CONST 0 (None)
18 RETURN_VALUE
""" % (_C.__init__.__code__.co_firstlineno + 1,)
dis_c_instance_method_bytes = """\
0 LOAD_FAST 1 (1)
3 LOAD_CONST 1 (1)
6 COMPARE_OP 2 (==)
9 LOAD_FAST 0 (0)
12 STORE_ATTR 0 (0)
15 LOAD_CONST 0 (0)
18 RETURN_VALUE
"""
def _f(a):
print(a)
return 1
dis_f = """\
%-4d 0 LOAD_GLOBAL 0 (print)
3 LOAD_FAST 0 (a)
6 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
9 POP_TOP
%-4d 10 LOAD_CONST 1 (1)
13 RETURN_VALUE
""" % (_f.__code__.co_firstlineno + 1,
_f.__code__.co_firstlineno + 2)
dis_f_co_code = """\
0 LOAD_GLOBAL 0 (0)
3 LOAD_FAST 0 (0)
6 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
9 POP_TOP
10 LOAD_CONST 1 (1)
13 RETURN_VALUE
"""
def bug708901():
for res in range(1,
10):
pass
dis_bug708901 = """\
%-4d 0 SETUP_LOOP 23 (to 26)
3 LOAD_GLOBAL 0 (range)
6 LOAD_CONST 1 (1)
%-4d 9 LOAD_CONST 2 (10)
12 CALL_FUNCTION 2 (2 positional, 0 keyword pair)
15 GET_ITER
>> 16 FOR_ITER 6 (to 25)
19 STORE_FAST 0 (res)
%-4d 22 JUMP_ABSOLUTE 16
>> 25 POP_BLOCK
>> 26 LOAD_CONST 0 (None)
29 RETURN_VALUE
""" % (bug708901.__code__.co_firstlineno + 1,
bug708901.__code__.co_firstlineno + 2,
bug708901.__code__.co_firstlineno + 3)
def bug1333982(x=[]):
assert 0, ([s for s in x] +
1)
pass
dis_bug1333982 = """\
%3d 0 LOAD_CONST 1 (0)
3 POP_JUMP_IF_TRUE 35
6 LOAD_GLOBAL 0 (AssertionError)
9 LOAD_CONST 2 (<code object <listcomp> at 0x..., file "%s", line %d>)
12 LOAD_CONST 3 ('bug1333982.<locals>.<listcomp>')
15 MAKE_FUNCTION 0
18 LOAD_FAST 0 (x)
21 GET_ITER
22 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
%3d 25 LOAD_CONST 4 (1)
28 BINARY_ADD
29 CALL_FUNCTION 1 (1 positional, 0 keyword pair)
32 RAISE_VARARGS 1
%3d >> 35 LOAD_CONST 0 (None)
38 RETURN_VALUE
""" % (bug1333982.__code__.co_firstlineno + 1,
__file__,
bug1333982.__code__.co_firstlineno + 1,
bug1333982.__code__.co_firstlineno + 2,
bug1333982.__code__.co_firstlineno + 3)
_BIG_LINENO_FORMAT = """\
%3d 0 LOAD_GLOBAL 0 (spam)
3 POP_TOP
4 LOAD_CONST 0 (None)
7 RETURN_VALUE
"""
dis_module_expected_results = """\
Disassembly of f:
4 0 LOAD_CONST 0 (None)
3 RETURN_VALUE
Disassembly of g:
5 0 LOAD_CONST 0 (None)
3 RETURN_VALUE
"""
expr_str = "x + 1"
dis_expr_str = """\
1 0 LOAD_NAME 0 (x)
3 LOAD_CONST 0 (1)
6 BINARY_ADD
7 RETURN_VALUE
"""
simple_stmt_str = "x = x + 1"
dis_simple_stmt_str = """\
1 0 LOAD_NAME 0 (x)
3 LOAD_CONST 0 (1)
6 BINARY_ADD
7 STORE_NAME 0 (x)
10 LOAD_CONST 1 (None)
13 RETURN_VALUE
"""
compound_stmt_str = """\
x = 0
while 1:
x += 1"""
# Trailing newline has been deliberately omitted
dis_compound_stmt_str = """\
1 0 LOAD_CONST 0 (0)
3 STORE_NAME 0 (x)
2 6 SETUP_LOOP 14 (to 23)
3 >> 9 LOAD_NAME 0 (x)
12 LOAD_CONST 1 (1)
15 INPLACE_ADD
16 STORE_NAME 0 (x)
19 JUMP_ABSOLUTE 9
22 POP_BLOCK
>> 23 LOAD_CONST 2 (None)
26 RETURN_VALUE
"""
dis_traceback = """\
%-4d 0 SETUP_EXCEPT 12 (to 15)
%-4d 3 LOAD_CONST 1 (1)
6 LOAD_CONST 2 (0)
--> 9 BINARY_TRUE_DIVIDE
10 POP_TOP
11 POP_BLOCK
12 JUMP_FORWARD 46 (to 61)
%-4d >> 15 DUP_TOP
16 LOAD_GLOBAL 0 (Exception)
19 COMPARE_OP 10 (exception match)
22 POP_JUMP_IF_FALSE 60
25 POP_TOP
26 STORE_FAST 0 (e)
29 POP_TOP
30 SETUP_FINALLY 14 (to 47)
%-4d 33 LOAD_FAST 0 (e)
36 LOAD_ATTR 1 (__traceback__)
39 STORE_FAST 1 (tb)
42 POP_BLOCK
43 POP_EXCEPT
44 LOAD_CONST 0 (None)
>> 47 LOAD_CONST 0 (None)
50 STORE_FAST 0 (e)
53 DELETE_FAST 0 (e)
56 END_FINALLY
57 JUMP_FORWARD 1 (to 61)
>> 60 END_FINALLY
%-4d >> 61 LOAD_FAST 1 (tb)
64 RETURN_VALUE
""" % (TRACEBACK_CODE.co_firstlineno + 1,
TRACEBACK_CODE.co_firstlineno + 2,
TRACEBACK_CODE.co_firstlineno + 3,
TRACEBACK_CODE.co_firstlineno + 4,
TRACEBACK_CODE.co_firstlineno + 5)
class DisTests(unittest.TestCase):
def get_disassembly(self, func, lasti=-1, wrapper=True):
# We want to test the default printing behaviour, not the file arg
output = io.StringIO()
with contextlib.redirect_stdout(output):
if wrapper:
dis.dis(func)
else:
dis.disassemble(func, lasti)
return output.getvalue()
def get_disassemble_as_string(self, func, lasti=-1):
return self.get_disassembly(func, lasti, False)
def strip_addresses(self, text):
return re.sub(r'\b0x[0-9A-Fa-f]+\b', '0x...', text)
def do_disassembly_test(self, func, expected):
got = self.get_disassembly(func)
if got != expected:
got = self.strip_addresses(got)
self.assertEqual(got, expected)
def test_opmap(self):
self.assertEqual(dis.opmap["NOP"], 9)
self.assertIn(dis.opmap["LOAD_CONST"], dis.hasconst)
self.assertIn(dis.opmap["STORE_NAME"], dis.hasname)
def test_opname(self):
self.assertEqual(dis.opname[dis.opmap["LOAD_FAST"]], "LOAD_FAST")
def test_boundaries(self):
self.assertEqual(dis.opmap["EXTENDED_ARG"], dis.EXTENDED_ARG)
self.assertEqual(dis.opmap["STORE_NAME"], dis.HAVE_ARGUMENT)
def test_dis(self):
self.do_disassembly_test(_f, dis_f)
def test_bug_708901(self):
self.do_disassembly_test(bug708901, dis_bug708901)
def test_bug_1333982(self):
# This one is checking bytecodes generated for an `assert` statement,
# so fails if the tests are run with -O. Skip this test then.
if not __debug__:
self.skipTest('need asserts, run without -O')
self.do_disassembly_test(bug1333982, dis_bug1333982)
def test_big_linenos(self):
def func(count):
namespace = {}
func = "def foo():\n " + "".join(["\n "] * count + ["spam\n"])
exec(func, namespace)
return namespace['foo']
# Test all small ranges
for i in range(1, 300):
expected = _BIG_LINENO_FORMAT % (i + 2)
self.do_disassembly_test(func(i), expected)
# Test some larger ranges too
for i in range(300, 5000, 10):
expected = _BIG_LINENO_FORMAT % (i + 2)
self.do_disassembly_test(func(i), expected)
from test import dis_module
self.do_disassembly_test(dis_module, dis_module_expected_results)
def test_disassemble_str(self):
self.do_disassembly_test(expr_str, dis_expr_str)
self.do_disassembly_test(simple_stmt_str, dis_simple_stmt_str)
self.do_disassembly_test(compound_stmt_str, dis_compound_stmt_str)
def test_disassemble_bytes(self):
self.do_disassembly_test(_f.__code__.co_code, dis_f_co_code)
def test_disassemble_method(self):
self.do_disassembly_test(_C(1).__init__, dis_c_instance_method)
def test_disassemble_method_bytes(self):
method_bytecode = _C(1).__init__.__code__.co_code
self.do_disassembly_test(method_bytecode, dis_c_instance_method_bytes)
def test_dis_none(self):
try:
del sys.last_traceback
except AttributeError:
pass
self.assertRaises(RuntimeError, dis.dis, None)
def test_dis_traceback(self):
try:
del sys.last_traceback
except AttributeError:
pass
try:
1/0
except Exception as e:
tb = e.__traceback__
sys.last_traceback = tb
tb_dis = self.get_disassemble_as_string(tb.tb_frame.f_code, tb.tb_lasti)
self.do_disassembly_test(None, tb_dis)
def test_dis_object(self):
self.assertRaises(TypeError, dis.dis, object())
class DisWithFileTests(DisTests):
# Run the tests again, using the file arg instead of print
def get_disassembly(self, func, lasti=-1, wrapper=True):
output = io.StringIO()
if wrapper:
dis.dis(func, file=output)
else:
dis.disassemble(func, lasti, file=output)
return output.getvalue()
code_info_code_info = """\
Name: code_info
Filename: (.*)
Argument count: 1
Kw-only arguments: 0
Number of locals: 1
Stack size: 3
Flags: OPTIMIZED, NEWLOCALS, NOFREE
Constants:
0: %r
Names:
0: _format_code_info
1: _get_code_object
Variable names:
0: x""" % (('Formatted details of methods, functions, or code.',)
if sys.flags.optimize < 2 else (None,))
@staticmethod
def tricky(x, y, z=True, *args, c, d, e=[], **kwds):
def f(c=c):
print(x, y, z, c, d, e, f)
yield x, y, z, c, d, e, f
code_info_tricky = """\
Name: tricky
Filename: (.*)
Argument count: 3
Kw-only arguments: 3
Number of locals: 8
Stack size: 7
Flags: OPTIMIZED, NEWLOCALS, VARARGS, VARKEYWORDS, GENERATOR
Constants:
0: None
1: <code object f at (.*), file "(.*)", line (.*)>
2: 'tricky.<locals>.f'
Variable names:
0: x
1: y
2: z
3: c
4: d
5: e
6: args
7: kwds
Cell variables:
0: [edfxyz]
1: [edfxyz]
2: [edfxyz]
3: [edfxyz]
4: [edfxyz]
5: [edfxyz]"""
# NOTE: the order of the cell variables above depends on dictionary order!
co_tricky_nested_f = tricky.__func__.__code__.co_consts[1]
code_info_tricky_nested_f = """\
Name: f
Filename: (.*)
Argument count: 1
Kw-only arguments: 0
Number of locals: 1
Stack size: 8
Flags: OPTIMIZED, NEWLOCALS, NESTED
Constants:
0: None
Names:
0: print
Variable names:
0: c
Free variables:
0: [edfxyz]
1: [edfxyz]
2: [edfxyz]
3: [edfxyz]
4: [edfxyz]
5: [edfxyz]"""
code_info_expr_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 1
Names:
0: x"""
code_info_simple_stmt_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 1
1: None
Names:
0: x"""
code_info_compound_stmt_str = """\
Name: <module>
Filename: <disassembly>
Argument count: 0
Kw-only arguments: 0
Number of locals: 0
Stack size: 2
Flags: NOFREE
Constants:
0: 0
1: 1
2: None
Names:
0: x"""
class CodeInfoTests(unittest.TestCase):
test_pairs = [
(dis.code_info, code_info_code_info),
(tricky, code_info_tricky),
(co_tricky_nested_f, code_info_tricky_nested_f),
(expr_str, code_info_expr_str),
(simple_stmt_str, code_info_simple_stmt_str),
(compound_stmt_str, code_info_compound_stmt_str),
]
def test_code_info(self):
self.maxDiff = 1000
for x, expected in self.test_pairs:
self.assertRegex(dis.code_info(x), expected)
def test_show_code(self):
self.maxDiff = 1000
for x, expected in self.test_pairs:
with captured_stdout() as output:
dis.show_code(x)
self.assertRegex(output.getvalue(), expected+"\n")
output = io.StringIO()
dis.show_code(x, file=output)
self.assertRegex(output.getvalue(), expected)
def test_code_info_object(self):
self.assertRaises(TypeError, dis.code_info, object())
def test_pretty_flags_no_flags(self):
self.assertEqual(dis.pretty_flags(0), '0x0')
# Fodder for instruction introspection tests
# Editing any of these may require recalculating the expected output
def outer(a=1, b=2):
def f(c=3, d=4):
def inner(e=5, f=6):
print(a, b, c, d, e, f)
print(a, b, c, d)
return inner
print(a, b, '', 1, [], {}, "Hello world!")
return f
def jumpy():
# This won't actually run (but that's OK, we only disassemble it)
for i in range(10):
print(i)
if i < 4:
continue
if i > 6:
break
else:
print("I can haz else clause?")
while i:
print(i)
i -= 1
if i > 6:
continue
if i < 4:
break
else:
print("Who let lolcatz into this test suite?")
try:
1 / 0
except ZeroDivisionError:
print("Here we go, here we go, here we go...")
else:
with i as dodgy:
print("Never reach this")
finally:
print("OK, now we're done")
# End fodder for opinfo generation tests
expected_outer_line = 1
_line_offset = outer.__code__.co_firstlineno - 1
code_object_f = outer.__code__.co_consts[3]
expected_f_line = code_object_f.co_firstlineno - _line_offset
code_object_inner = code_object_f.co_consts[3]
expected_inner_line = code_object_inner.co_firstlineno - _line_offset
expected_jumpy_line = 1
# The following lines are useful to regenerate the expected results after
# either the fodder is modified or the bytecode generation changes
# After regeneration, update the references to code_object_f and
# code_object_inner before rerunning the tests
#_instructions = dis.get_instructions(outer, first_line=expected_outer_line)
#print('expected_opinfo_outer = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(outer(), first_line=expected_outer_line)
#print('expected_opinfo_f = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(outer()(), first_line=expected_outer_line)
#print('expected_opinfo_inner = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
#_instructions = dis.get_instructions(jumpy, first_line=expected_jumpy_line)
#print('expected_opinfo_jumpy = [\n ',
#',\n '.join(map(str, _instructions)), ',\n]', sep='')
Instruction = dis.Instruction
expected_opinfo_outer = [
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=3, argrepr='3', offset=0, starts_line=2, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=0, argval='a', argrepr='a', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=1, argval='b', argrepr='b', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_TUPLE', opcode=102, arg=2, argval=2, argrepr='', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=code_object_f, argrepr=repr(code_object_f), offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='outer.<locals>.f', argrepr="'outer.<locals>.f'", offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='MAKE_CLOSURE', opcode=134, arg=2, argval=2, argrepr='', offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='f', argrepr='f', offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=27, starts_line=7, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='a', argrepr='a', offset=30, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='b', argrepr='b', offset=33, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval='', argrepr="''", offset=36, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval=1, argrepr='1', offset=39, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_LIST', opcode=103, arg=0, argval=0, argrepr='', offset=42, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_MAP', opcode=105, arg=0, argval=0, argrepr='', offset=45, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval='Hello world!', argrepr="'Hello world!'", offset=48, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=7, argval=7, argrepr='7 positional, 0 keyword pair', offset=51, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=54, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='f', argrepr='f', offset=55, starts_line=8, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=58, starts_line=None, is_jump_target=False),
]
expected_opinfo_f = [
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=5, argrepr='5', offset=0, starts_line=3, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=6, argrepr='6', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=2, argval='a', argrepr='a', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=3, argval='b', argrepr='b', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=0, argval='c', argrepr='c', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CLOSURE', opcode=135, arg=1, argval='d', argrepr='d', offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='BUILD_TUPLE', opcode=102, arg=4, argval=4, argrepr='', offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=code_object_inner, argrepr=repr(code_object_inner), offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='outer.<locals>.f.<locals>.inner', argrepr="'outer.<locals>.f.<locals>.inner'", offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='MAKE_CLOSURE', opcode=134, arg=2, argval=2, argrepr='', offset=27, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=2, argval='inner', argrepr='inner', offset=30, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=33, starts_line=5, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=2, argval='a', argrepr='a', offset=36, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=3, argval='b', argrepr='b', offset=39, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='c', argrepr='c', offset=42, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='d', argrepr='d', offset=45, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=4, argval=4, argrepr='4 positional, 0 keyword pair', offset=48, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=51, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=2, argval='inner', argrepr='inner', offset=52, starts_line=6, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=55, starts_line=None, is_jump_target=False),
]
expected_opinfo_inner = [
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='print', argrepr='print', offset=0, starts_line=4, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=0, argval='a', argrepr='a', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=1, argval='b', argrepr='b', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=2, argval='c', argrepr='c', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_DEREF', opcode=136, arg=3, argval='d', argrepr='d', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='e', argrepr='e', offset=15, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=1, argval='f', argrepr='f', offset=18, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=6, argval=6, argrepr='6 positional, 0 keyword pair', offset=21, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=24, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=25, starts_line=None, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=28, starts_line=None, is_jump_target=False),
]
expected_opinfo_jumpy = [
Instruction(opname='SETUP_LOOP', opcode=120, arg=74, argval=77, argrepr='to 77', offset=0, starts_line=3, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=0, argval='range', argrepr='range', offset=3, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=1, argval=10, argrepr='10', offset=6, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=9, starts_line=None, is_jump_target=False),
Instruction(opname='GET_ITER', opcode=68, arg=None, argval=None, argrepr='', offset=12, starts_line=None, is_jump_target=False),
Instruction(opname='FOR_ITER', opcode=93, arg=50, argval=66, argrepr='to 66', offset=13, starts_line=None, is_jump_target=True),
Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=16, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=19, starts_line=4, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=22, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=25, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=28, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=29, starts_line=5, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=32, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=35, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=47, argval=47, argrepr='', offset=38, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=41, starts_line=6, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=0, argval=47, argrepr='to 47', offset=44, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=47, starts_line=7, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=50, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=53, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=13, argval=13, argrepr='', offset=56, starts_line=None, is_jump_target=False),
Instruction(opname='BREAK_LOOP', opcode=80, arg=None, argval=None, argrepr='', offset=59, starts_line=8, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=60, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=13, argval=13, argrepr='', offset=63, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=66, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=67, starts_line=10, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=4, argval='I can haz else clause?', argrepr="'I can haz else clause?'", offset=70, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=73, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=76, starts_line=None, is_jump_target=False),
Instruction(opname='SETUP_LOOP', opcode=120, arg=74, argval=154, argrepr='to 154', offset=77, starts_line=11, is_jump_target=True),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=80, starts_line=None, is_jump_target=True),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=143, argval=143, argrepr='', offset=83, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=86, starts_line=12, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=89, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=92, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=95, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=96, starts_line=13, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=99, starts_line=None, is_jump_target=False),
Instruction(opname='INPLACE_SUBTRACT', opcode=56, arg=None, argval=None, argrepr='', offset=102, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=0, argval='i', argrepr='i', offset=103, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=106, starts_line=14, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=3, argval=6, argrepr='6', offset=109, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=4, argval='>', argrepr='>', offset=112, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=124, argval=124, argrepr='', offset=115, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=118, starts_line=15, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=0, argval=124, argrepr='to 124', offset=121, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=124, starts_line=16, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=2, argval=4, argrepr='4', offset=127, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=0, argval='<', argrepr='<', offset=130, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=80, argval=80, argrepr='', offset=133, starts_line=None, is_jump_target=False),
Instruction(opname='BREAK_LOOP', opcode=80, arg=None, argval=None, argrepr='', offset=136, starts_line=17, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=137, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_ABSOLUTE', opcode=113, arg=80, argval=80, argrepr='', offset=140, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=143, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=144, starts_line=19, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=6, argval='Who let lolcatz into this test suite?', argrepr="'Who let lolcatz into this test suite?'", offset=147, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=150, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=153, starts_line=None, is_jump_target=False),
Instruction(opname='SETUP_FINALLY', opcode=122, arg=72, argval=229, argrepr='to 229', offset=154, starts_line=20, is_jump_target=True),
Instruction(opname='SETUP_EXCEPT', opcode=121, arg=12, argval=172, argrepr='to 172', offset=157, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=5, argval=1, argrepr='1', offset=160, starts_line=21, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=7, argval=0, argrepr='0', offset=163, starts_line=None, is_jump_target=False),
Instruction(opname='BINARY_TRUE_DIVIDE', opcode=27, arg=None, argval=None, argrepr='', offset=166, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=167, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=168, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=28, argval=200, argrepr='to 200', offset=169, starts_line=None, is_jump_target=False),
Instruction(opname='DUP_TOP', opcode=4, arg=None, argval=None, argrepr='', offset=172, starts_line=22, is_jump_target=True),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=2, argval='ZeroDivisionError', argrepr='ZeroDivisionError', offset=173, starts_line=None, is_jump_target=False),
Instruction(opname='COMPARE_OP', opcode=107, arg=10, argval='exception match', argrepr='exception match', offset=176, starts_line=None, is_jump_target=False),
Instruction(opname='POP_JUMP_IF_FALSE', opcode=114, arg=199, argval=199, argrepr='', offset=179, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=182, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=183, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=184, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=185, starts_line=23, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=8, argval='Here we go, here we go, here we go...', argrepr="'Here we go, here we go, here we go...'", offset=188, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=191, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=194, starts_line=None, is_jump_target=False),
Instruction(opname='POP_EXCEPT', opcode=89, arg=None, argval=None, argrepr='', offset=195, starts_line=None, is_jump_target=False),
Instruction(opname='JUMP_FORWARD', opcode=110, arg=26, argval=225, argrepr='to 225', offset=196, starts_line=None, is_jump_target=False),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=199, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_FAST', opcode=124, arg=0, argval='i', argrepr='i', offset=200, starts_line=25, is_jump_target=True),
Instruction(opname='SETUP_WITH', opcode=143, arg=17, argval=223, argrepr='to 223', offset=203, starts_line=None, is_jump_target=False),
Instruction(opname='STORE_FAST', opcode=125, arg=1, argval='dodgy', argrepr='dodgy', offset=206, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=209, starts_line=26, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=9, argval='Never reach this', argrepr="'Never reach this'", offset=212, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=215, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=218, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=219, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=220, starts_line=None, is_jump_target=False),
Instruction(opname='WITH_CLEANUP', opcode=81, arg=None, argval=None, argrepr='', offset=223, starts_line=None, is_jump_target=True),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=224, starts_line=None, is_jump_target=False),
Instruction(opname='POP_BLOCK', opcode=87, arg=None, argval=None, argrepr='', offset=225, starts_line=None, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=226, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_GLOBAL', opcode=116, arg=1, argval='print', argrepr='print', offset=229, starts_line=28, is_jump_target=True),
Instruction(opname='LOAD_CONST', opcode=100, arg=10, argval="OK, now we're done", argrepr='"OK, now we\'re done"', offset=232, starts_line=None, is_jump_target=False),
Instruction(opname='CALL_FUNCTION', opcode=131, arg=1, argval=1, argrepr='1 positional, 0 keyword pair', offset=235, starts_line=None, is_jump_target=False),
Instruction(opname='POP_TOP', opcode=1, arg=None, argval=None, argrepr='', offset=238, starts_line=None, is_jump_target=False),
Instruction(opname='END_FINALLY', opcode=88, arg=None, argval=None, argrepr='', offset=239, starts_line=None, is_jump_target=False),
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=240, starts_line=None, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=243, starts_line=None, is_jump_target=False),
]
# One last piece of inspect fodder to check the default line number handling
def simple(): pass
expected_opinfo_simple = [
Instruction(opname='LOAD_CONST', opcode=100, arg=0, argval=None, argrepr='None', offset=0, starts_line=simple.__code__.co_firstlineno, is_jump_target=False),
Instruction(opname='RETURN_VALUE', opcode=83, arg=None, argval=None, argrepr='', offset=3, starts_line=None, is_jump_target=False)
]
class InstructionTests(BytecodeTestCase):
def test_default_first_line(self):
actual = dis.get_instructions(simple)
self.assertEqual(list(actual), expected_opinfo_simple)
def test_first_line_set_to_None(self):
actual = dis.get_instructions(simple, first_line=None)
self.assertEqual(list(actual), expected_opinfo_simple)
def test_outer(self):
actual = dis.get_instructions(outer, first_line=expected_outer_line)
self.assertEqual(list(actual), expected_opinfo_outer)
def test_nested(self):
with captured_stdout():
f = outer()
actual = dis.get_instructions(f, first_line=expected_f_line)
self.assertEqual(list(actual), expected_opinfo_f)
def test_doubly_nested(self):
with captured_stdout():
inner = outer()()
actual = dis.get_instructions(inner, first_line=expected_inner_line)
self.assertEqual(list(actual), expected_opinfo_inner)
def test_jumpy(self):
actual = dis.get_instructions(jumpy, first_line=expected_jumpy_line)
self.assertEqual(list(actual), expected_opinfo_jumpy)
# get_instructions has its own tests above, so can rely on it to validate
# the object oriented API
class BytecodeTests(unittest.TestCase):
def test_instantiation(self):
# Test with function, method, code string and code object
for obj in [_f, _C(1).__init__, "a=1", _f.__code__]:
with self.subTest(obj=obj):
b = dis.Bytecode(obj)
self.assertIsInstance(b.codeobj, types.CodeType)
self.assertRaises(TypeError, dis.Bytecode, object())
def test_iteration(self):
for obj in [_f, _C(1).__init__, "a=1", _f.__code__]:
with self.subTest(obj=obj):
via_object = list(dis.Bytecode(obj))
via_generator = list(dis.get_instructions(obj))
self.assertEqual(via_object, via_generator)
def test_explicit_first_line(self):
actual = dis.Bytecode(outer, first_line=expected_outer_line)
self.assertEqual(list(actual), expected_opinfo_outer)
def test_source_line_in_disassembly(self):
# Use the line in the source code
actual = dis.Bytecode(simple).dis()[:3]
expected = "{:>3}".format(simple.__code__.co_firstlineno)
self.assertEqual(actual, expected)
# Use an explicit first line number
actual = dis.Bytecode(simple, first_line=350).dis()[:3]
self.assertEqual(actual, "350")
def test_info(self):
self.maxDiff = 1000
for x, expected in CodeInfoTests.test_pairs:
b = dis.Bytecode(x)
self.assertRegex(b.info(), expected)
def test_disassembled(self):
actual = dis.Bytecode(_f).dis()
self.assertEqual(actual, dis_f)
def test_from_traceback(self):
tb = get_tb()
b = dis.Bytecode.from_traceback(tb)
while tb.tb_next: tb = tb.tb_next
self.assertEqual(b.current_offset, tb.tb_lasti)
def test_from_traceback_dis(self):
tb = get_tb()
b = dis.Bytecode.from_traceback(tb)
self.assertEqual(b.dis(), dis_traceback)
if __name__ == "__main__":
unittest.main()
>>>>>>> b875702c9c06ab5012e52ff4337439b03918f453
| mit | 9,050,017,357,533,923,000 | 48.695409 | 205 | 0.633255 | false |
lsst-ts/ts_wep | tests/bsc/test_nbrStar.py | 1 | 2710 | # This file is part of ts_wep.
#
# Developed for the LSST Telescope and Site Systems.
# This product includes software developed by the LSST Project
# (https://www.lsst.org).
# See the COPYRIGHT file at the top-level directory of this distribution
# for details of code ownership.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import numpy as np
import unittest
from lsst.ts.wep.bsc.StarData import StarData
from lsst.ts.wep.bsc.NbrStar import NbrStar
from lsst.ts.wep.Utility import FilterType
class TestNbrStar(unittest.TestCase):
"""Test the NbrStar class."""
def setUp(self):
stars = StarData(
[123, 456, 789],
[0.1, 0.2, 0.3],
[2.1, 2.2, 2.3],
[2.0, 3.0, 4.0],
[2.1, 2.1, 4.1],
[2.2, 3.2, 4.2],
[2.3, 3.3, 4.3],
[2.4, 3.4, 4.4],
[2.5, 3.5, 4.5],
)
stars.setRaInPixel(stars.getRA() * 10)
stars.setDeclInPixel(stars.getDecl() * 10)
self.stars = stars
self.neighboringStar = NbrStar()
def testGetId(self):
self._addStar()
self.assertTrue(123 in self.neighboringStar.getId())
self.assertTrue(self.neighboringStar.getId()[123], [456])
def _addStar(self):
self.neighboringStar.addStar(self.stars, 0, np.array([1]), FilterType.R)
def testGetRaDecl(self):
self._addStar()
self.assertEqual(
self.neighboringStar.getRaDecl(), {456: (0.2, 2.2), 123: (0.1, 2.1)}
)
def testGetRaDeclInPixel(self):
self._addStar()
self.assertEqual(
self.neighboringStar.getRaDeclInPixel(),
{456: (2.0, 22.0), 123: (1.0, 21.0)},
)
def testGetMag(self):
self._addStar()
self.assertEqual(len(self.neighboringStar.getMag(FilterType.R)), 2)
self.assertEqual(self.neighboringStar.getMag(FilterType.U), {})
def testAddStarAndGetData(self):
self._addStar()
self.assertNotEqual(len(self.neighboringStar.getId()), 0)
if __name__ == "__main__":
# Do the unit test
unittest.main()
| gpl-3.0 | 5,397,523,749,416,159,000 | 28.139785 | 80 | 0.628782 | false |
naggie/dsblog | dsblog/environment.py | 1 | 1659 | import yaml
from os import makedirs
from os.path import join,dirname,realpath,isdir
script_dir = dirname(realpath(__file__))
default_yml_filepath = join(script_dir,'defaults.yml')
defaults = {
"output_dir": 'output',
"header_img_dir": 'imgs/headers/',
"scaled_img_dir": 'imgs/scaled/',
"original_img_dir": 'imgs/original/',
"header_img_url": 'imgs/headers/',
"scaled_img_url": 'imgs/scaled/',
"original_img_url": 'imgs/original/',
"template_dir": join(script_dir,'templates'),
"max_article_img_width": 710,
"max_avatar_width": 710,
"database_file": "database.yml",
"static_dir": join(script_dir,'static'),
"copyright_msg": None,
"extra_links": [],
"import_to_discourse": False,
"strapline": None,
}
config = dict()
def getConfig():
if not config:
raise RuntimeError('config not loaded yet')
return config
def loadConfig(yml_filepath):
config.update(defaults)
with open(yml_filepath) as f:
patch = yaml.load(f.read())
config.update(patch)
# make paths absolute
config['header_img_dir'] = join(config['output_dir'],config['header_img_dir'])
config['scaled_img_dir'] = join(config['output_dir'],config['scaled_img_dir'])
config['original_img_dir'] = join(config['output_dir'],config['original_img_dir'])
config['database_file'] = join(config['output_dir'],config['database_file'])
def makeDirs():
if not config:
raise RuntimeError('config not loaded yet')
for key in ['header_img_dir','scaled_img_dir','original_img_dir']:
path = config[key]
if not isdir(path):
makedirs(path)
| mit | 978,721,617,976,014,700 | 24.136364 | 86 | 0.634117 | false |
praekelt/vumi-go | go/apps/jsbox/metrics.py | 1 | 3058 | # -*- test-case-name: go.apps.jsbox.tests.test_metrics -*-
# -*- coding: utf-8 -*-
"""Metrics for JS Box sandboxes"""
import re
from vxsandbox import SandboxResource
from vumi.blinkenlights.metrics import SUM, AVG, MIN, MAX, LAST
class MetricEventError(Exception):
"""Raised when a command cannot be converted to a metric event."""
class MetricEvent(object):
AGGREGATORS = {
'sum': SUM,
'avg': AVG,
'min': MIN,
'max': MAX,
'last': LAST
}
NAME_REGEX = re.compile(r"^[a-zA-Z][a-zA-Z0-9._-]{,100}$")
def __init__(self, store, metric, value, agg):
self.store = store
self.metric = metric
self.value = value
self.agg = agg
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
return all((self.store == other.store, self.metric == other.metric,
self.value == other.value, self.agg is other.agg))
@classmethod
def _parse_name(cls, name, kind):
if name is None:
raise MetricEventError("Missing %s name." % (kind,))
if not isinstance(name, basestring):
raise MetricEventError("Invalid type for %s name: %r"
% (kind, name))
if not cls.NAME_REGEX.match(name):
raise MetricEventError("Invalid %s name: %r." % (kind, name))
return name
@classmethod
def _parse_value(cls, value):
try:
value = float(value)
except (ValueError, TypeError):
raise MetricEventError("Invalid metric value %r." % (value,))
return value
@classmethod
def _parse_agg(cls, agg):
if not isinstance(agg, basestring):
raise MetricEventError("Invalid metric aggregator %r" % (agg,))
if agg not in cls.AGGREGATORS:
raise MetricEventError("Invalid metric aggregator %r." % (agg,))
return cls.AGGREGATORS[agg]
@classmethod
def from_command(cls, command):
store = cls._parse_name(command.get('store', 'default'), 'store')
metric = cls._parse_name(command.get('metric'), 'metric')
value = cls._parse_value(command.get('value'))
agg = cls._parse_agg(command.get('agg'))
return cls(store, metric, value, agg)
class MetricsResource(SandboxResource):
"""Resource that provides metric storing."""
def _publish_event(self, api, ev):
conversation = self.app_worker.conversation_for_api(api)
self.app_worker.publish_account_metric(conversation.user_account.key,
ev.store, ev.metric, ev.value,
ev.agg)
def handle_fire(self, api, command):
"""Fire a metric value."""
try:
ev = MetricEvent.from_command(command)
except MetricEventError, e:
return self.reply(command, success=False, reason=unicode(e))
self._publish_event(api, ev)
return self.reply(command, success=True)
| bsd-3-clause | 1,665,057,652,388,163,000 | 31.88172 | 77 | 0.577502 | false |
calexil/FightstickDisplay | pyglet/image/buffer.py | 1 | 9407 | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# Copyright (c) 2008-2021 pyglet contributors
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
from pyglet.gl import *
def get_max_color_attachments():
"""Get the maximum allow Framebuffer Color attachements"""
number = GLint()
glGetIntegerv(GL_MAX_COLOR_ATTACHMENTS, number)
return number.value
class Renderbuffer:
"""OpenGL Renderbuffer Object"""
def __init__(self, width, height, internal_format, samples=1):
"""Create an instance of a Renderbuffer object."""
self._id = GLuint()
self._width = width
self._height = height
self._internal_format = internal_format
glGenRenderbuffers(1, self._id)
glBindRenderbuffer(GL_RENDERBUFFER, self._id)
if samples > 1:
glRenderbufferStorageMultisample(GL_RENDERBUFFER, samples, internal_format, width, height)
else:
glRenderbufferStorage(GL_RENDERBUFFER, internal_format, width, height)
glBindRenderbuffer(GL_RENDERBUFFER, 0)
@property
def id(self):
return self._id.value
@property
def width(self):
return self._width
@property
def height(self):
return self._height
def bind(self):
glBindRenderbuffer(GL_RENDERBUFFER, self._id)
@staticmethod
def unbind():
glBindRenderbuffer(GL_RENDERBUFFER, 0)
def delete(self):
glDeleteRenderbuffers(1, self._id)
def __del__(self):
try:
glDeleteRenderbuffers(1, self._id)
# Python interpreter is shutting down:
except ImportError:
pass
def __repr__(self):
return "{}(id={})".format(self.__class__.__name__, self._id.value)
class Framebuffer:
"""OpenGL Framebuffer Object"""
def __init__(self, target=GL_FRAMEBUFFER):
"""Create an OpenGL Framebuffer object.
:rtype: :py:class:`~pyglet.image.Framebuffer`
.. versionadded:: 2.0
"""
self._id = GLuint()
glGenFramebuffers(1, self._id)
self._attachment_types = 0
self._width = 0
self._height = 0
self.target = target
@property
def id(self):
return self._id.value
@property
def width(self):
"""The width of the widest attachment."""
return self._width
@property
def height(self):
"""The width of the widest attachment."""
return self._height
def bind(self):
glBindFramebuffer(self.target, self._id)
def unbind(self):
glBindFramebuffer(self.target, 0)
def clear(self):
if self._attachment_types:
self.bind()
glClear(self._attachment_types)
self.unbind()
def delete(self):
glDeleteFramebuffers(1, self._id)
@property
def is_complete(self):
return glCheckFramebufferStatus(GL_FRAMEBUFFER) == GL_FRAMEBUFFER_COMPLETE
@staticmethod
def get_status():
states = {GL_FRAMEBUFFER_UNSUPPORTED: "Framebuffer unsupported. Try another format.",
GL_FRAMEBUFFER_INCOMPLETE_ATTACHMENT: "Framebuffer incomplete attachment.",
GL_FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT: "Framebuffer missing attachment.",
GL_FRAMEBUFFER_INCOMPLETE_DIMENSIONS_EXT: "Framebuffer unsupported dimension.",
GL_FRAMEBUFFER_INCOMPLETE_FORMATS_EXT: "Framebuffer incomplete formats.",
GL_FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER: "Framebuffer incomplete draw buffer.",
GL_FRAMEBUFFER_INCOMPLETE_READ_BUFFER: "Framebuffer incomplete read buffer.",
GL_FRAMEBUFFER_COMPLETE: "Framebuffer is complete."}
gl_status = glCheckFramebufferStatus(GL_FRAMEBUFFER)
return states.get(gl_status, "Unknown error")
def attach_texture(self, texture, target=GL_FRAMEBUFFER, attachment=GL_COLOR_ATTACHMENT0):
"""Attach a Texture to the Framebuffer
:Parameters:
`texture` : pyglet.image.Texture
Specifies the texture object to attach to the framebuffer attachment
point named by attachment.
`target` : int
Specifies the framebuffer target. target must be GL_DRAW_FRAMEBUFFER,
GL_READ_FRAMEBUFFER, or GL_FRAMEBUFFER. GL_FRAMEBUFFER is equivalent
to GL_DRAW_FRAMEBUFFER.
`attachment` : int
Specifies the attachment point of the framebuffer. attachment must be
GL_COLOR_ATTACHMENTi, GL_DEPTH_ATTACHMENT, GL_STENCIL_ATTACHMENT or
GL_DEPTH_STENCIL_ATTACHMENT.
"""
self.bind()
glFramebufferTexture(target, attachment, texture.id, texture.level)
# glFramebufferTexture2D(target, attachment, texture.target, texture.id, texture.level)
self._attachment_types |= attachment
self._width = max(texture.width, self._width)
self._height = max(texture.height, self._height)
self.unbind()
def attach_texture_layer(self, texture, layer, level, target=GL_FRAMEBUFFER, attachment=GL_COLOR_ATTACHMENT0):
"""Attach a Texture layer to the Framebuffer
:Parameters:
`texture` : pyglet.image.TextureArray
Specifies the texture object to attach to the framebuffer attachment
point named by attachment.
`layer` : int
Specifies the layer of texture to attach.
`level` : int
Specifies the mipmap level of texture to attach.
`target` : int
Specifies the framebuffer target. target must be GL_DRAW_FRAMEBUFFER,
GL_READ_FRAMEBUFFER, or GL_FRAMEBUFFER. GL_FRAMEBUFFER is equivalent
to GL_DRAW_FRAMEBUFFER.
`attachment` : int
Specifies the attachment point of the framebuffer. attachment must be
GL_COLOR_ATTACHMENTi, GL_DEPTH_ATTACHMENT, GL_STENCIL_ATTACHMENT or
GL_DEPTH_STENCIL_ATTACHMENT.
"""
self.bind()
glFramebufferTextureLayer(target, attachment, texture.id, level, layer)
self._attachment_types |= attachment
self._width = max(texture.width, self._width)
self._height = max(texture.height, self._height)
self.unbind()
def attach_renderbuffer(self, renderbuffer, target=GL_FRAMEBUFFER, attachment=GL_COLOR_ATTACHMENT0):
""""Attach a Renderbuffer to the Framebuffer
:Parameters:
`renderbuffer` : pyglet.image.Renderbuffer
Specifies the Renderbuffer to attach to the framebuffer attachment
point named by attachment.
`target` : int
Specifies the framebuffer target. target must be GL_DRAW_FRAMEBUFFER,
GL_READ_FRAMEBUFFER, or GL_FRAMEBUFFER. GL_FRAMEBUFFER is equivalent
to GL_DRAW_FRAMEBUFFER.
`attachment` : int
Specifies the attachment point of the framebuffer. attachment must be
GL_COLOR_ATTACHMENTi, GL_DEPTH_ATTACHMENT, GL_STENCIL_ATTACHMENT or
GL_DEPTH_STENCIL_ATTACHMENT.
"""
self.bind()
glFramebufferRenderbuffer(target, attachment, GL_RENDERBUFFER, renderbuffer.id)
self._attachment_types |= attachment
self._width = max(renderbuffer.width, self._width)
self._height = max(renderbuffer.height, self._height)
self.unbind()
def __del__(self):
try:
glDeleteFramebuffers(1, self._id)
# Python interpreter is shutting down:
except ImportError:
pass
def __repr__(self):
return "{}(id={})".format(self.__class__.__name__, self._id.value)
| gpl-3.0 | 933,900,144,839,290,800 | 37.395918 | 114 | 0.635059 | false |
njncalub/animakyoukai-san | animakyoukaisan/urls.py | 1 | 1035 | from django.conf import settings
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.http import HttpResponseRedirect
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^account/', include('django.contrib.auth.urls')),
url(r'^update/$', 'profiling.views.index_page', name='index_page'),
url(r'^register/$', 'profiling.views.registration_page', name='registration_page'),
url(r'^login/$', 'profiling.views.login_page', name='login_page'),
url(r'^logout/$', 'profiling.views.logout_page', name='logout_page'),
url(r'^forgot/$', 'profiling.views.reset_password_page', name='reset_password_page'),
url(r'^tinymce/', include('tinymce.urls')),
) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += patterns('',
url(r'^$', lambda x: HttpResponseRedirect('/update/'), name='home_page'),
)
| gpl-2.0 | -5,553,957,187,897,016,000 | 42.125 | 92 | 0.6657 | false |
glogiotatidis/mopidy | mopidy/backend.py | 1 | 12306 | from __future__ import absolute_import, unicode_literals
from mopidy import listener, models
class Backend(object):
"""Backend API
If the backend has problems during initialization it should raise
:exc:`mopidy.exceptions.BackendError` with a descriptive error message.
This will make Mopidy print the error message and exit so that the user can
fix the issue.
:param config: the entire Mopidy configuration
:type config: dict
:param audio: actor proxy for the audio subsystem
:type audio: :class:`pykka.ActorProxy` for :class:`mopidy.audio.Audio`
"""
#: Actor proxy to an instance of :class:`mopidy.audio.Audio`.
#:
#: Should be passed to the backend constructor as the kwarg ``audio``,
#: which will then set this field.
audio = None
#: The library provider. An instance of
#: :class:`~mopidy.backend.LibraryProvider`, or :class:`None` if
#: the backend doesn't provide a library.
library = None
#: The playback provider. An instance of
#: :class:`~mopidy.backend.PlaybackProvider`, or :class:`None` if
#: the backend doesn't provide playback.
playback = None
#: The playlists provider. An instance of
#: :class:`~mopidy.backend.PlaylistsProvider`, or class:`None` if
#: the backend doesn't provide playlists.
playlists = None
#: List of URI schemes this backend can handle.
uri_schemes = []
# Because the providers is marked as pykka_traversible, we can't get() them
# from another actor, and need helper methods to check if the providers are
# set or None.
def has_library(self):
return self.library is not None
def has_library_browse(self):
return self.has_library() and self.library.root_directory is not None
def has_playback(self):
return self.playback is not None
def has_playlists(self):
return self.playlists is not None
def ping(self):
"""Called to check if the actor is still alive."""
return True
class LibraryProvider(object):
"""
:param backend: backend the controller is a part of
:type backend: :class:`mopidy.backend.Backend`
"""
pykka_traversable = True
root_directory = None
"""
:class:`mopidy.models.Ref.directory` instance with a URI and name set
representing the root of this library's browse tree. URIs must
use one of the schemes supported by the backend, and name should
be set to a human friendly value.
*MUST be set by any class that implements* :meth:`LibraryProvider.browse`.
"""
def __init__(self, backend):
self.backend = backend
def browse(self, uri):
"""
See :meth:`mopidy.core.LibraryController.browse`.
If you implement this method, make sure to also set
:attr:`root_directory`.
*MAY be implemented by subclass.*
"""
return []
def get_distinct(self, field, query=None):
"""
See :meth:`mopidy.core.LibraryController.get_distinct`.
*MAY be implemented by subclass.*
Default implementation will simply return an empty set.
"""
return set()
def get_images(self, uris):
"""
See :meth:`mopidy.core.LibraryController.get_images`.
*MAY be implemented by subclass.*
Default implementation will simply call lookup and try and use the
album art for any tracks returned. Most extensions should replace this
with something smarter or simply return an empty dictionary.
"""
result = {}
for uri in uris:
image_uris = set()
for track in self.lookup(uri):
if track.album and track.album.images:
image_uris.update(track.album.images)
result[uri] = [models.Image(uri=u) for u in image_uris]
return result
def lookup(self, uri):
"""
See :meth:`mopidy.core.LibraryController.lookup`.
*MUST be implemented by subclass.*
"""
raise NotImplementedError
def refresh(self, uri=None):
"""
See :meth:`mopidy.core.LibraryController.refresh`.
*MAY be implemented by subclass.*
"""
pass
def search(self, query=None, uris=None, exact=False):
"""
See :meth:`mopidy.core.LibraryController.search`.
*MAY be implemented by subclass.*
.. versionadded:: 1.0
The ``exact`` param which replaces the old ``find_exact``.
"""
pass
class PlaybackProvider(object):
"""
:param audio: the audio actor
:type audio: actor proxy to an instance of :class:`mopidy.audio.Audio`
:param backend: the backend
:type backend: :class:`mopidy.backend.Backend`
"""
pykka_traversable = True
def __init__(self, audio, backend):
self.audio = audio
self.backend = backend
def pause(self):
"""
Pause playback.
*MAY be reimplemented by subclass.*
:rtype: :class:`True` if successful, else :class:`False`
"""
return self.audio.pause_playback().get()
def play(self):
"""
Start playback.
*MAY be reimplemented by subclass.*
:rtype: :class:`True` if successful, else :class:`False`
"""
return self.audio.start_playback().get()
def prepare_change(self):
"""
Indicate that an URI change is about to happen.
*MAY be reimplemented by subclass.*
It is extremely unlikely it makes sense for any backends to override
this. For most practical purposes it should be considered an internal
call between backends and core that backend authors should not touch.
"""
self.audio.prepare_change().get()
def translate_uri(self, uri):
"""
Convert custom URI scheme to real playable URI.
*MAY be reimplemented by subclass.*
This is very likely the *only* thing you need to override as a backend
author. Typically this is where you convert any Mopidy specific URI
to a real URI and then return it. If you can't convert the URI just
return :class:`None`.
:param uri: the URI to translate
:type uri: string
:rtype: string or :class:`None` if the URI could not be translated
"""
return uri
def change_track(self, track):
"""
Swith to provided track.
*MAY be reimplemented by subclass.*
It is unlikely it makes sense for any backends to override
this. For most practical purposes it should be considered an internal
call between backends and core that backend authors should not touch.
The default implementation will call :meth:`translate_uri` which
is what you want to implement.
:param track: the track to play
:type track: :class:`mopidy.models.Track`
:rtype: :class:`True` if successful, else :class:`False`
"""
uri = self.translate_uri(track.uri)
if not uri:
return False
self.audio.set_uri(uri).get()
return True
def resume(self):
"""
Resume playback at the same time position playback was paused.
*MAY be reimplemented by subclass.*
:rtype: :class:`True` if successful, else :class:`False`
"""
return self.audio.start_playback().get()
def seek(self, time_position):
"""
Seek to a given time position.
*MAY be reimplemented by subclass.*
:param time_position: time position in milliseconds
:type time_position: int
:rtype: :class:`True` if successful, else :class:`False`
"""
return self.audio.set_position(time_position).get()
def stop(self):
"""
Stop playback.
*MAY be reimplemented by subclass.*
Should not be used for tracking if tracks have been played or when we
are done playing them.
:rtype: :class:`True` if successful, else :class:`False`
"""
return self.audio.stop_playback().get()
def get_time_position(self):
"""
Get the current time position in milliseconds.
*MAY be reimplemented by subclass.*
:rtype: int
"""
return self.audio.get_position().get()
class PlaylistsProvider(object):
"""
A playlist provider exposes a collection of playlists, methods to
create/change/delete playlists in this collection, and lookup of any
playlist the backend knows about.
:param backend: backend the controller is a part of
:type backend: :class:`mopidy.backend.Backend` instance
"""
pykka_traversable = True
def __init__(self, backend):
self.backend = backend
def as_list(self):
"""
Get a list of the currently available playlists.
Returns a list of :class:`~mopidy.models.Ref` objects referring to the
playlists. In other words, no information about the playlists' content
is given.
:rtype: list of :class:`mopidy.models.Ref`
.. versionadded:: 1.0
"""
raise NotImplementedError
def get_items(self, uri):
"""
Get the items in a playlist specified by ``uri``.
Returns a list of :class:`~mopidy.models.Ref` objects referring to the
playlist's items.
If a playlist with the given ``uri`` doesn't exist, it returns
:class:`None`.
:rtype: list of :class:`mopidy.models.Ref`, or :class:`None`
.. versionadded:: 1.0
"""
raise NotImplementedError
def create(self, name):
"""
Create a new empty playlist with the given name.
Returns a new playlist with the given name and an URI.
*MUST be implemented by subclass.*
:param name: name of the new playlist
:type name: string
:rtype: :class:`mopidy.models.Playlist`
"""
raise NotImplementedError
def delete(self, uri):
"""
Delete playlist identified by the URI.
*MUST be implemented by subclass.*
:param uri: URI of the playlist to delete
:type uri: string
"""
raise NotImplementedError
def lookup(self, uri):
"""
Lookup playlist with given URI in both the set of playlists and in any
other playlist source.
Returns the playlists or :class:`None` if not found.
*MUST be implemented by subclass.*
:param uri: playlist URI
:type uri: string
:rtype: :class:`mopidy.models.Playlist` or :class:`None`
"""
raise NotImplementedError
def refresh(self):
"""
Refresh the playlists in :attr:`playlists`.
*MUST be implemented by subclass.*
"""
raise NotImplementedError
def save(self, playlist):
"""
Save the given playlist.
The playlist must have an ``uri`` attribute set. To create a new
playlist with an URI, use :meth:`create`.
Returns the saved playlist or :class:`None` on failure.
*MUST be implemented by subclass.*
:param playlist: the playlist to save
:type playlist: :class:`mopidy.models.Playlist`
:rtype: :class:`mopidy.models.Playlist` or :class:`None`
"""
raise NotImplementedError
class BackendListener(listener.Listener):
"""
Marker interface for recipients of events sent by the backend actors.
Any Pykka actor that mixes in this class will receive calls to the methods
defined here when the corresponding events happen in a backend actor. This
interface is used both for looking up what actors to notify of the events,
and for providing default implementations for those listeners that are not
interested in all events.
Normally, only the Core actor should mix in this class.
"""
@staticmethod
def send(event, **kwargs):
"""Helper to allow calling of backend listener events"""
listener.send_async(BackendListener, event, **kwargs)
def playlists_loaded(self):
"""
Called when playlists are loaded or refreshed.
*MAY* be implemented by actor.
"""
pass
| apache-2.0 | -5,506,081,038,595,665,000 | 27.887324 | 79 | 0.622461 | false |
CMPUT410W15/cmput410-project | socialdistribution/urls.py | 1 | 1265 | from django.conf.urls import patterns, include, url
from django.contrib import admin
from login.views import *
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf import settings
from django.conf.urls.static import static
from posts.views import *
from images.views import *
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'socialdistribution.views.home', name='home'),
url(r'^admin/', include(admin.site.urls)),
url(r'^friends/', include('author.urls')),
url(r'^api/', include('api.urls')),
url(r'^post/', include('posts.urls')),
url(r'^images/', include('images.urls')),
url(r'^$', home),
url(r'^logout/$', logout_page),
url(r'^accounts/login/$', 'django.contrib.auth.views.login'), # If user is not login it will redirect to login page
url(r'^register/$', register),
url(r'^register/success/$', register_success),
url(r'^home/$', home),
url(r'^home/([^/]+)/$', authorhome),
url(r'^home/author/posts/$',personal_stream),
url(r'^home/author/posts/friends/$',personal_stream_friends),
url(r'^post/(?P<post_id>[\w-]+)/$', comment, name="add_comment"),
)+ static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
urlpatterns += staticfiles_urlpatterns()
| apache-2.0 | 8,455,670,157,464,625,000 | 36.205882 | 119 | 0.670356 | false |
hb9kns/PyBitmessage | src/bitmessageqt/dialogs.py | 1 | 2513 | from PyQt4 import QtGui
from tr import _translate
from retranslateui import RetranslateMixin
import widgets
from newchandialog import NewChanDialog
from address_dialogs import (
AddAddressDialog, NewAddressDialog, NewSubscriptionDialog,
RegenerateAddressesDialog, SpecialAddressBehaviorDialog, EmailGatewayDialog
)
import paths
from version import softwareVersion
__all__ = [
"NewChanDialog", "AddAddressDialog", "NewAddressDialog",
"NewSubscriptionDialog", "RegenerateAddressesDialog",
"SpecialAddressBehaviorDialog", "EmailGatewayDialog"
]
class AboutDialog(QtGui.QDialog, RetranslateMixin):
def __init__(self, parent=None):
super(AboutDialog, self).__init__(parent)
widgets.load('about.ui', self)
last_commit = paths.lastCommit()
version = softwareVersion
commit = last_commit.get('commit')
if commit:
version += '-' + commit[:7]
self.labelVersion.setText(
self.labelVersion.text().replace(
':version:', version
).replace(':branch:', commit or 'v%s' % version)
)
self.labelVersion.setOpenExternalLinks(True)
try:
self.label_2.setText(
self.label_2.text().replace(
'2017', str(last_commit.get('time').year)
))
except AttributeError:
pass
self.setFixedSize(QtGui.QWidget.sizeHint(self))
class IconGlossaryDialog(QtGui.QDialog, RetranslateMixin):
def __init__(self, parent=None, config=None):
super(IconGlossaryDialog, self).__init__(parent)
widgets.load('iconglossary.ui', self)
# FIXME: check the window title visibility here
self.groupBox.setTitle('')
self.labelPortNumber.setText(_translate(
"iconGlossaryDialog",
"You are using TCP port %1. (This can be changed in the settings)."
).arg(config.getint('bitmessagesettings', 'port')))
self.setFixedSize(QtGui.QWidget.sizeHint(self))
class HelpDialog(QtGui.QDialog, RetranslateMixin):
def __init__(self, parent=None):
super(HelpDialog, self).__init__(parent)
widgets.load('help.ui', self)
self.setFixedSize(QtGui.QWidget.sizeHint(self))
class ConnectDialog(QtGui.QDialog, RetranslateMixin):
def __init__(self, parent=None):
super(ConnectDialog, self).__init__(parent)
widgets.load('connect.ui', self)
self.setFixedSize(QtGui.QWidget.sizeHint(self))
| mit | 1,662,048,093,800,792,800 | 32.065789 | 79 | 0.652209 | false |
desihub/desisim | py/desisim/test/test_templates.py | 1 | 13874 | from __future__ import division
import os
import unittest
import numpy as np
from astropy.table import Table, Column
from desisim.templates import ELG, LRG, QSO, BGS, STAR, STD, MWS_STAR, WD, SIMQSO
from desisim import lya_mock_p1d as lyamock
desimodel_data_available = 'DESIMODEL' in os.environ
desi_templates_available = 'DESI_ROOT' in os.environ
desi_basis_templates_available = 'DESI_BASIS_TEMPLATES' in os.environ
class TestTemplates(unittest.TestCase):
def setUp(self):
self.wavemin = 5000
self.wavemax = 8000
self.dwave = 2.0
self.wave = np.arange(self.wavemin, self.wavemax+self.dwave/2, self.dwave)
self.nspec = 5
self.seed = np.random.randint(2**32)
self.rand = np.random.RandomState(self.seed)
def _check_output_size(self, flux, wave, meta):
self.assertEqual(len(meta), self.nspec)
self.assertEqual(len(wave), len(self.wave))
self.assertEqual(flux.shape, (self.nspec, len(self.wave)))
@unittest.skipUnless(desi_basis_templates_available, '$DESI_BASIS_TEMPLATES was not detected.')
def test_simple_south(self):
'''Confirm that creating templates works at all'''
print('In function test_simple_south, seed = {}'.format(self.seed))
for T in [ELG, LRG, QSO, BGS, STAR, STD, MWS_STAR, WD, SIMQSO]:
template_factory = T(wave=self.wave)
flux, wave, meta, _ = template_factory.make_templates(self.nspec, seed=self.seed, south=True)
self._check_output_size(flux, wave, meta)
@unittest.skipUnless(desi_basis_templates_available, '$DESI_BASIS_TEMPLATES was not detected.')
def test_simple_north(self):
'''Confirm that creating templates works at all'''
print('In function test_simple_north, seed = {}'.format(self.seed))
for T in [ELG, LRG, QSO, BGS, STAR, STD, MWS_STAR, WD, SIMQSO]:
template_factory = T(wave=self.wave)
flux, wave, meta, _ = template_factory.make_templates(self.nspec, seed=self.seed, south=False)
self._check_output_size(flux, wave, meta)
@unittest.skipUnless(desi_basis_templates_available, '$DESI_BASIS_TEMPLATES was not detected.')
def test_restframe(self):
'''Confirm restframe template creation for a galaxy and a star'''
print('In function test_simple, seed = {}'.format(self.seed))
for T in [ELG, MWS_STAR]:
template_factory = T(wave=self.wave)
flux, wave, meta, _ = template_factory.make_templates(self.nspec, seed=self.seed, restframe=True)
self.assertEqual(len(wave), len(template_factory.basewave))
def test_input_wave(self):
'''Confirm that we can specify the wavelength array.'''
print('In function test_input_wave, seed = {}'.format(self.seed))
lrg = LRG(minwave=self.wavemin, maxwave=self.wavemax, cdelt=self.dwave)
flux, wave, meta, _ = lrg.make_templates(self.nspec, seed=self.seed)
self._check_output_size(flux, wave, meta)
@unittest.skipUnless(desi_basis_templates_available, '$DESI_BASIS_TEMPLATES was not detected.')
def test_random_seed(self):
'''Test that random seed works to get the same results back'''
print('In function test_input_random_seed, seed = {}'.format(self.seed))
for T in [ELG, QSO, MWS_STAR, SIMQSO]:
Tx = T(wave=self.wave)
flux1, wave1, meta1, _ = Tx.make_templates(self.nspec, seed=1)
flux2, wave2, meta2, _ = Tx.make_templates(self.nspec, seed=1)
flux3, wave3, meta3, _ = Tx.make_templates(self.nspec, seed=2)
self.assertTrue(np.all(flux1==flux2))
self.assertTrue(np.any(flux1!=flux3))
self.assertTrue(np.all(wave1==wave2))
@unittest.skipUnless(desi_basis_templates_available, '$DESI_BASIS_TEMPLATES was not detected.')
def test_OII(self):
'''Confirm that ELG [OII] flux matches meta table description'''
print('In function test_OII, seed = {}'.format(self.seed))
wave = np.arange(5000, 9800.1, 0.2)
flux, ww, meta, objmeta = ELG(wave=wave).make_templates(seed=self.seed,
nmodel=10, zrange=(0.6, 1.6),
logvdisp_meansig = [np.log10(75), 0.0],
nocolorcuts=True, nocontinuum=True)
for i in range(len(meta)):
z = meta['REDSHIFT'][i]
ii = (3722*(1+z) < wave) & (wave < 3736*(1+z))
OIIflux = 1e-17 * np.sum(flux[i,ii] * np.gradient(wave[ii]))
self.assertAlmostEqual(OIIflux, objmeta['OIIFLUX'][i], 2)
@unittest.skipUnless(desi_basis_templates_available, '$DESI_BASIS_TEMPLATES was not detected.')
def test_HBETA(self):
'''Confirm that BGS H-beta flux matches meta table description'''
print('In function test_HBETA, seed = {}'.format(self.seed))
wave = np.arange(5000, 7000.1, 0.2)
# Need to choose just the star-forming galaxies.
from desisim.io import read_basis_templates
baseflux, basewave, basemeta = read_basis_templates(objtype='BGS')
keep = np.where(basemeta['HBETA_LIMIT'] == 0)[0]
bgs = BGS(wave=wave, basewave=basewave, baseflux=baseflux[keep, :],
basemeta=basemeta[keep])
flux, ww, meta, objmeta = bgs.make_templates(seed=self.seed,
nmodel=10, zrange=(0.05, 0.4),
logvdisp_meansig=[np.log10(75),0.0],
nocolorcuts=True, nocontinuum=True)
for i in range(len(meta)):
z = meta['REDSHIFT'][i]
ii = (4854*(1+z) < wave) & (wave < 4868*(1+z))
hbetaflux = 1e-17 * np.sum(flux[i,ii] * np.gradient(wave[ii]))
self.assertAlmostEqual(hbetaflux, objmeta['HBETAFLUX'][i], 2)
@unittest.skipUnless(desi_basis_templates_available, '$DESI_BASIS_TEMPLATES was not detected.')
def test_input_redshift(self):
'''Test that we can input the redshift for a representative galaxy and star class.'''
print('In function test_input_redshift, seed = {}'.format(self.seed))
zrange = np.array([(0.5, 1.0), (0.5, 4.0), (-0.003, 0.003)])
for zminmax, T in zip(zrange, [LRG, QSO, STAR, SIMQSO]):
redshift = np.random.uniform(zminmax[0], zminmax[1], self.nspec)
Tx = T(wave=self.wave)
flux, wave, meta, _ = Tx.make_templates(self.nspec, redshift=redshift, seed=self.seed)
self.assertTrue(np.allclose(redshift, meta['REDSHIFT']))
@unittest.skipUnless(desi_basis_templates_available, '$DESI_BASIS_TEMPLATES was not detected.')
def test_wd_subtype(self):
'''Test option of specifying the white dwarf subtype.'''
print('In function test_wd_subtype, seed = {}'.format(self.seed))
wd = WD(wave=self.wave, subtype='DA')
flux, wave, meta, _ = wd.make_templates(self.nspec, seed=self.seed, nocolorcuts=True)
self._check_output_size(flux, wave, meta)
np.all(meta['SUBTYPE'] == 'DA')
wd = WD(wave=self.wave, subtype='DB')
flux, wave, meta, _ = wd.make_templates(self.nspec, seed=self.seed, nocolorcuts=True)
np.all(meta['SUBTYPE'] == 'DB')
@unittest.skipUnless(desi_basis_templates_available, '$DESI_BASIS_TEMPLATES was not detected.')
@unittest.expectedFailure
def test_wd_subtype_failure(self):
'''Test a known failure of specifying the white dwarf subtype.'''
print('In function test_wd_subtype_failure, seed = {}'.format(self.seed))
wd = WD(wave=self.wave, subtype='DA')
flux1, wave1, meta1, _ = wd.make_templates(self.nspec, seed=self.seed, nocolorcuts=True)
meta1['SUBTYPE'][0] = 'DB'
flux2, wave2, meta2, _ = wd.make_templates(input_meta=meta1)
@unittest.skipUnless(desi_basis_templates_available, '$DESI_BASIS_TEMPLATES was not detected.')
def test_input_meta(self):
'''Test that input meta table option works.'''
print('In function test_input_meta, seed = {}'.format(self.seed))
for T in [ELG, LRG, BGS, QSO, STAR, MWS_STAR, WD]:
Tx = T(wave=self.wave)
flux1, wave1, meta1, objmeta1 = Tx.make_templates(self.nspec, seed=self.seed)
if 'VDISP' in objmeta1.colnames:
vdisp = objmeta1['VDISP'].data
flux2, wave2, meta2, objmeta2 = Tx.make_templates(input_meta=meta1, vdisp=vdisp)
else:
flux2, wave2, meta2, objmeta2 = Tx.make_templates(input_meta=meta1)
badkeys = list()
for key in meta1.colnames:
if key in ('REDSHIFT', 'MAG', 'SEED', 'FLUX_G',
'FLUX_R', 'FLUX_Z', 'FLUX_W1', 'FLUX_W2'):
#- not sure why the tolerances aren't closer
if not np.allclose(meta1[key], meta2[key], rtol=1e-4):
print(meta1['OBJTYPE'][0], key, meta1[key], meta2[key])
badkeys.append(key)
else:
if not np.all(meta1[key] == meta2[key]):
badkeys.append(key)
self.assertEqual(len(badkeys), 0, 'mismatch for spectral type {} in keys {}'.format(meta1['OBJTYPE'][0], badkeys))
# if np.all(np.allclose(flux1, flux2, rtol=1e-3)) is False:
# import pdb ; pdb.set_trace()
self.assertTrue(np.all(np.isclose(flux1, flux2, atol=1e-3)))
#self.assertTrue(np.allclose(flux1, flux2, rtol=1e-4))
self.assertTrue(np.all(wave1 == wave2))
@unittest.skipUnless(desi_basis_templates_available, '$DESI_BASIS_TEMPLATES was not detected.')
def test_star_properties(self):
'''Test that input data table option works.'''
print('In function test_star_properties, seed = {}'.format(self.seed))
star_properties = Table()
star_properties.add_column(Column(name='REDSHIFT', length=self.nspec, dtype='f4'))
star_properties.add_column(Column(name='MAG', length=self.nspec, dtype='f4'))
star_properties.add_column(Column(name='MAGFILTER', length=self.nspec, dtype='U15'))
star_properties.add_column(Column(name='TEFF', length=self.nspec, dtype='f4'))
star_properties.add_column(Column(name='LOGG', length=self.nspec, dtype='f4'))
star_properties.add_column(Column(name='FEH', length=self.nspec, dtype='f4'))
star_properties['REDSHIFT'] = self.rand.uniform(-5E-4, 5E-4, self.nspec)
star_properties['MAG'] = self.rand.uniform(16, 19, self.nspec)
star_properties['MAGFILTER'][:] = 'decam2014-r'
star_properties['TEFF'] = self.rand.uniform(4000, 10000, self.nspec)
star_properties['LOGG'] = self.rand.uniform(0.5, 5.0, self.nspec)
star_properties['FEH'] = self.rand.uniform(-2.0, 0.0, self.nspec)
for T in [STAR]:
Tx = T(wave=self.wave)
flux, wave, meta, objmeta = Tx.make_templates(star_properties=star_properties, seed=self.seed)
badkeys = list()
for key in ('REDSHIFT', 'MAG'):
if not np.allclose(meta[key], star_properties[key]):
badkeys.append(key)
for key in ('TEFF', 'LOGG', 'FEH'):
if not np.allclose(objmeta[key], star_properties[key]):
badkeys.append(key)
self.assertEqual(len(badkeys), 0, 'mismatch for spectral type {} in keys {}'.format(meta['OBJTYPE'][0], badkeys))
def test_lyamock_seed(self):
'''Test that random seed works to get the same results back'''
print('In function test_lyamock_seed, seed = {}'.format(self.seed))
mock = lyamock.MockMaker()
wave1, flux1 = mock.get_lya_skewers(self.nspec, new_seed=1)
wave2, flux2 = mock.get_lya_skewers(self.nspec, new_seed=1)
wave3, flux3 = mock.get_lya_skewers(self.nspec, new_seed=2)
self.assertTrue(np.all(flux1==flux2))
self.assertTrue(np.any(flux1!=flux3))
self.assertTrue(np.all(wave1==wave2))
@unittest.skipUnless(desi_basis_templates_available, '$DESI_BASIS_TEMPLATES was not detected.')
def test_meta(self):
'''Test the metadata tables have the columns we expect'''
print('In function test_meta, seed = {}'.format(self.seed))
for T in [ELG, LRG, BGS, STAR, STD, MWS_STAR, WD, QSO]:
template_factory = T(wave=self.wave)
flux, wave, meta, objmeta = template_factory.make_templates(self.nspec, seed=self.seed)
self.assertTrue(np.all(np.in1d(['TARGETID', 'OBJTYPE', 'SUBTYPE', 'TEMPLATEID', 'SEED',
'REDSHIFT', 'MAG', 'MAGFILTER', 'FLUX_G', 'FLUX_R',
'FLUX_Z', 'FLUX_W1', 'FLUX_W2'],
meta.colnames)))
if ( isinstance(template_factory, ELG) or isinstance(template_factory, LRG) or
isinstance(template_factory, BGS) ):
self.assertTrue(np.all(np.in1d(['TARGETID', 'OIIFLUX', 'HBETAFLUX', 'EWOII', 'EWHBETA',
'D4000', 'VDISP', 'OIIDOUBLET', 'OIIIHBETA', 'OIIHBETA',
'NIIHBETA', 'SIIHBETA'],
objmeta.colnames)))
if (isinstance(template_factory, STAR) or isinstance(template_factory, STD) or
isinstance(template_factory, MWS_STAR) ):
self.assertTrue(np.all(np.in1d(['TARGETID', 'TEFF', 'LOGG', 'FEH'], objmeta.colnames)))
if isinstance(template_factory, WD):
self.assertTrue(np.all(np.in1d(['TARGETID', 'TEFF', 'LOGG'], objmeta.colnames)))
if isinstance(template_factory, QSO):
self.assertTrue(np.all(np.in1d(['TARGETID', 'PCA_COEFF'], objmeta.colnames)))
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | -7,112,978,407,730,016,000 | 54.2749 | 126 | 0.60235 | false |
phiros/nepi | src/nepi/resources/ns3/classes/single_model_spectrum_channel.py | 1 | 3165 | #
# NEPI, a framework to manage network experiments
# Copyright (C) 2014 INRIA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from nepi.execution.attribute import Attribute, Flags, Types
from nepi.execution.trace import Trace, TraceAttr
from nepi.execution.resource import ResourceManager, clsinit_copy, \
ResourceState
from nepi.resources.ns3.ns3channel import NS3BaseChannel
@clsinit_copy
class NS3SingleModelSpectrumChannel(NS3BaseChannel):
_rtype = "ns3::SingleModelSpectrumChannel"
@classmethod
def _register_attributes(cls):
attr_maxlossdb = Attribute("MaxLossDb",
"If a single-frequency PropagationLossModel is used, this value represents the maximum loss in dB for which transmissions will be passed to the receiving PHY. Signals for which the PropagationLossModel returns a loss bigger than this value will not be propagated to the receiver. This parameter is to be used to reduce the computational load by not propagating signals that are far beyond the interference range. Note that the default value corresponds to considering all signals for reception. Tune this value with care. ",
type = Types.Double,
default = "1e+09",
allowed = None,
range = None,
flags = Flags.Reserved | Flags.Construct)
cls._register_attribute(attr_maxlossdb)
attr_id = Attribute("Id",
"The id (unique integer) of this Channel.",
type = Types.Integer,
default = "0",
allowed = None,
range = None,
flags = Flags.Reserved | Flags.NoWrite)
cls._register_attribute(attr_id)
@classmethod
def _register_traces(cls):
pathloss = Trace("PathLoss", "This trace is fired whenever a new path loss value is calculated. The first and second parameters to the trace are pointers respectively to the TX and RX SpectrumPhy instances, whereas the third parameters is the loss value in dB. Note that the loss value reported by this trace is the single-frequency loss value obtained by evaluating only the TX and RX AntennaModels and the PropagationLossModel. In particular, note that SpectrumPropagationLossModel (even if present) is never used to evaluate the loss value reported in this trace. ")
cls._register_trace(pathloss)
def __init__(self, ec, guid):
super(NS3SingleModelSpectrumChannel, self).__init__(ec, guid)
self._home = "ns3-single-model-spectrum-channel-%s" % self.guid
| gpl-3.0 | 3,667,919,845,605,207,000 | 47.692308 | 577 | 0.706793 | false |
fishroot/nemoa | nemoa/file/nplot.py | 1 | 16627 | # -*- coding: utf-8 -*-
"""Common function for creating plots with matplotlib."""
__author__ = 'Patrick Michl'
__email__ = '[email protected]'
__license__ = 'GPLv3'
__docformat__ = 'google'
import numpy as np
from nemoa.types import OptDict
class Plot:
"""Base class for matplotlib plots.
Export classes like Histogram, Heatmap or Graph share a common
interface to matplotlib, as well as certain plotting attributes.
This base class is intended to provide a unified interface to access
matplotlib and those attributes.
Attributes:
"""
_default: dict = {
'fileformat': 'pdf',
'figure_size': (10.0, 6.0),
'dpi': None,
'bg_color': 'none',
'usetex': False,
'font_family': 'sans-serif',
'style': 'seaborn-white',
'title': None,
'show_title': True,
'title_fontsize': 14.0
}
_config: dict = {}
_kwds: dict = {}
_plt = None
_fig = None
_axes = None
def __init__(self, **kwds):
""" """
try:
import matplotlib
except ImportError as err:
raise ImportError(
"requires package matplotlib: "
"https://matplotlib.org") from err
# merge config from defaults, current config and keyword arguments
self._kwds = kwds
self._config = {**self._default, **self._config, **kwds}
# update global matplotlib settings
matplotlib.rc('text', usetex=self._config['usetex'])
matplotlib.rc('font', family=self._config['font_family'])
# link matplotlib.pyplot
import matplotlib.pyplot as plt
self._plt = plt
# close previous figures
plt.close('all')
# update plot settings
plt.style.use(self._config['style'])
# create figure
self._fig = plt.figure(
figsize=self._config['figure_size'],
dpi=self._config['dpi'],
facecolor=self._config['bg_color'])
# create subplot (matplotlib.axes.Axes)
self._axes = self._fig.add_subplot(111)
def set_default(self, config: OptDict = None) -> bool:
"""Set default values."""
self._config = {**self._config, **(config or {}), **self._kwds}
return True
def plot_title(self) -> bool:
"""Plot title."""
if not self._config['show_title']:
return False
title = self._config['title'] or 'Unknown'
fontsize = self._config['title_fontsize']
getattr(self._plt, 'title')(title, fontsize=fontsize)
return True
def show(self) -> None:
"""Show plot."""
getattr(self._plt, 'show')()
def save(self, path, **kwds):
"""Save plot to file."""
return self._fig.savefig(path, dpi=self._config['dpi'], **kwds)
def release(self):
"""Clear current plot."""
return self._fig.clear()
class Heatmap(Plot):
""" """
_config = {
'interpolation': 'nearest',
'grid': True
}
def plot(self, array):
""" """
try:
from matplotlib.cm import hot_r
except ImportError as err:
raise ImportError(
"requires package matplotlib: "
"https://matplotlib.org") from err
# plot grid
self._axes.grid(self._config['grid'])
# plot heatmap
cax = self._axes.imshow(
array,
cmap=hot_r,
interpolation=self._config['interpolation'],
extent=(0, array.shape[1], 0, array.shape[0]))
# create labels for axis
max_font_size = 12.
x_labels = []
for label in self._config['x_labels']:
if ':' in label:
label = label.split(':', 1)[1]
x_labels.append(get_texlabel(label))
y_labels = []
for label in self._config['y_labels']:
if ':' in label:
label = label.split(':', 1)[1]
y_labels.append(get_texlabel(label))
fontsize = min(max_font_size, \
400. / float(max(len(x_labels), len(y_labels))))
self._plt.xticks(
np.arange(len(x_labels)) + 0.5,
tuple(x_labels), fontsize=fontsize, rotation=65)
self._plt.yticks(
len(y_labels) - np.arange(len(y_labels)) - 0.5,
tuple(y_labels), fontsize=fontsize)
# create colorbar
cbar = self._fig.colorbar(cax)
for tick in cbar.ax.get_yticklabels():
tick.set_fontsize(9)
# (optional) plot title
self.plot_title()
return True
class Histogram(Plot):
""" """
_config = {
'bins': 100,
'facecolor': 'lightgrey',
'edgecolor': 'black',
'histtype': 'bar',
'linewidth': 0.5,
'grid': True
}
def plot(self, array):
""" """
# plot grid
self._axes.grid(self._config['grid'])
# plot histogram
self._axes.hist(
array,
bins=self._config['bins'],
facecolor=self._config['facecolor'],
histtype=self._config['histtype'],
linewidth=self._config['linewidth'],
edgecolor=self._config['edgecolor'])
# (optional) plot title
self.plot_title()
return True
class Scatter2D(Plot):
""" """
_config = {
'grid': True,
'pca': True
}
@staticmethod
def _pca2d(array):
"""Calculate projection to largest two principal components."""
# get dimension of array
dim = array.shape[1]
# calculate covariance matrix
cov = np.cov(array.T)
# calculate eigevectors and eigenvalues
vals, vecs = np.linalg.eig(cov)
# sort eigevectors by absolute eigenvalues
pairs = [(np.abs(vals[i]), vecs[:, i]) for i in range(len(vals))]
pairs.sort(key=lambda x: x[0], reverse=True)
# calculate projection matrix
proj = np.hstack(
[pairs[0][1].reshape(dim, 1), pairs[1][1].reshape(dim, 1)])
# calculate projection
parray = np.dot(array, proj)
return parray
def plot(self, array):
""" """
# test arguments
if array.shape[1] != 2:
if self._config['pca']:
array = self._pca2d(array)
else: raise TypeError(
"first argument is required to be an array of shape (n, 2)")
x, y = array[:, 0], array[:, 1]
# plot grid
self._axes.grid(self._config['grid'])
# plot scattered data
self._axes.scatter(x, y)
# (optional) plot title
self.plot_title()
return True
class Graph(Plot):
_config = {
'padding': (0.1, 0.1, 0.1, 0.1),
'show_legend': False,
'legend_fontsize': 9.0,
'graph_layout': 'layer',
'graph_direction': 'right',
'node_style': 'o',
'edge_width_enabled': True,
'edge_curvature': 1.0
}
def plot(self, G):
"""Plot graph.
Args:
G: networkx graph instance
figure_size (tuple): figure size in inches
(11.69,8.27) for A4, (16.53,11.69) for A3
edge_attribute (string): name of edge attribute, that
determines the edge colors by its sign and the edge width
by its absolute value.
default: 'weight'
edge_color (bool): flag for colored edges
True: edge colors are determined by the sign of the
attribute 'weight'
False: edges are black
edge_poscolor (string): name of color for edges with
positive signed attribute. For a full list of specified
color names see nemoa.base.nplot.get_color()
edge_negcolor (string): name of color for edges with
negative signed attribute. For a full list of specified
color names see nemoa.base.nplot.get_color()
edge_curvature (float): value within the intervall [-1, 1],
that determines the curvature of the edges.
Thereby 1 equals max convexity and -1 max concavity.
direction (string): string within the list ['up', 'down',
'left', 'right'], that dermines the plot direction of the
graph. 'up' means, the first layer is at the bottom.
edge_style (string): '-', '<-', '<->', '->',
'<|-', '<|-|>', '-|>', '|-', '|-|', '-|',
']-', ']-[', '-[', 'fancy', 'simple', 'wedge'
Returns:
Boolen value which is True if no error occured.
"""
try:
import matplotlib.patches
except ImportError as err:
raise ImportError(
"requires package matplotlib: "
"https://matplotlib.org") from err
try:
import networkx as nx
except ImportError as err:
raise ImportError(
"requires package networkx: "
"https://networkx.github.io") from err
from nemoa.base import ndict
from nemoa.math import graph
# adjust size of subplot
fig = self._fig
ax = self._axes
ax.set_autoscale_on(False)
figsize = fig.get_size_inches() * fig.dpi
ax.set_xlim(0., figsize[0])
ax.set_ylim(0., figsize[1])
ax.set_aspect('equal', 'box')
ax.axis('off')
# get node positions and sizes
layout_params = ndict.crop(self._config, 'graph_')
del layout_params['layout']
pos = graph.get_layout(
G, layout=self._config['graph_layout'], size=figsize,
padding=self._config['padding'], **layout_params)
sizes = graph.get_layout_normsize(pos)
node_size = sizes.get('node_size', None)
node_radius = sizes.get('node_radius', None)
line_width = sizes.get('line_width', None)
edge_width = sizes.get('edge_width', None)
font_size = sizes.get('font_size', None)
# get nodes and groups sorted by node attribute group_id
groups = graph.get_groups(G, attribute='group')
sorted_groups = sorted(
list(groups.keys()),
key=lambda g: 0 if not isinstance(g, list) or not g \
else G.node.get(g[0], {}).get('group_id', 0))
# draw nodes, labeled by groups
for group in sorted_groups:
gnodes = groups.get(group, [])
if not gnodes:
continue
refnode = G.node.get(gnodes[0])
label = refnode['description'] or refnode['group'] or str(group)
# draw nodes in group
node_obj = nx.draw_networkx_nodes(
G, pos, nodelist=gnodes, linewidths=line_width,
node_size=node_size, node_shape=self._config['node_style'],
node_color=get_color(refnode['color'], 'white'), label=label)
node_obj.set_edgecolor(
get_color(refnode['border_color'], 'black'))
# draw node labels
for node, data in G.nodes(data=True):
# determine label, fontsize and color
node_label = data.get('label', str(node).title())
node_label_format = get_texlabel(node_label)
node_label_size = np.sqrt(get_texlabel_width(node_label))
font_color = get_color(data['font_color'], 'black')
# draw node label
nx.draw_networkx_labels(
G, pos, labels={node: node_label_format},
font_size=font_size / node_label_size, font_color=font_color,
font_family='sans-serif', font_weight='normal')
# patch node for edges
circle = matplotlib.patches.Circle(
pos.get(node), alpha=0., radius=node_radius)
ax.add_patch(circle)
G.node[node]['patch'] = circle
# draw edges
seen = {}
if graph.is_directed(G):
default_edge_style = '-|>'
else: default_edge_style = '-'
for (u, v, data) in G.edges(data=True):
weight = data['weight']
if weight == 0.:
continue
# calculate edge curvature from node positions
# parameter rad describes the height in the normalized triangle
if (u, v) in seen:
rad = seen.get((u, v))
rad = -(rad + float(np.sign(rad)) * .2)
else:
scale = 1. / np.amax(np.array(figsize))
vec = scale * (np.array(pos[v]) - np.array(pos[u]))
rad = vec[0] * vec[1] / np.sqrt(2 * np.sum(vec ** 2))
if self._config['graph_layout'] == 'layer':
gdir = self._config['graph_direction']
if gdir in ['left', 'right']:
rad *= -1
seen[(u, v)] = rad
# determine style of edge from edge weight
if weight is None:
linestyle = '-'
linewidth = 0.5 * edge_width
alpha = 0.5
elif not self._config['edge_width_enabled']:
linestyle = '-'
linewidth = edge_width
alpha = np.amin([np.absolute(weight), 1.0])
else:
linestyle = '-'
linewidth = np.absolute(weight) * edge_width
alpha = np.amin([np.absolute(weight), 1.0])
# draw edge
node_a = G.node[u]['patch']
node_b = G.node[v]['patch']
arrow = matplotlib.patches.FancyArrowPatch(
posA=node_a.center, posB=node_b.center,
patchA=node_a, patchB=node_b,
arrowstyle=default_edge_style,
connectionstyle='arc3,rad=%s' % rad,
mutation_scale=linewidth * 12.,
linewidth=linewidth, linestyle=linestyle,
color=get_color(data.get('color', 'black')), alpha=alpha)
ax.add_patch(arrow)
# (optional) draw legend
if self._config['show_legend']:
num_groups = np.sum([1 for g in list(groups.values()) \
if isinstance(g, list) and g])
markerscale = 0.6 * self._config['legend_fontsize'] / font_size
ax.legend(
numpoints=1,
loc='lower center',
ncol=num_groups,
borderaxespad=0.,
framealpha=0.,
bbox_to_anchor=(0.5, -0.1),
fontsize=self._config['legend_fontsize'],
markerscale=markerscale)
# (optional) plot title
self.plot_title()
return True
def get_color(*args):
"""Convert color name of XKCD color name survey to RGBA tuple.
Args:
List of color names. If the list is empty, a full list of
available color names is returned. Otherwise the first valid
color in the list is returned as RGBA tuple. If no color is
valid None is returned.
"""
try:
from matplotlib import colors
except ImportError as err:
raise ImportError(
"requires package matplotlib: "
"https://matplotlib.org") from err
if not args:
clist = list(colors.get_named_colors_mapping().keys())
return sorted([cname[5:].title() \
for cname in clist if cname[:5] == 'xkcd:'])
rgb = None
for cname in args:
try:
rgb = colors.to_rgb('xkcd:%s' % cname)
break
except ValueError:
continue
return rgb
def get_texlabel(string):
"""Return formated node label as used for plots."""
lstr = string.rstrip('1234567890')
if len(lstr) == len(string):
return '${%s}$' % (string)
rnum = int(string[len(lstr):])
lstr = lstr.strip('_')
return '${%s}_{%i}$' % (lstr, rnum)
def get_texlabel_width(string):
"""Return estimated width for formated node labels."""
lstr = string.rstrip('1234567890')
if len(lstr) == len(string):
return len(string)
lstr = lstr.strip('_')
rstr = str(int(string[len(lstr):]))
return len(lstr) + 0.7 * len(rstr)
def filetypes():
"""Return supported image filetypes."""
try:
import matplotlib.pyplot as plt
except ImportError as err:
raise ImportError(
"requires package matplotlib: "
"https://matplotlib.org") from err
return plt.gcf().canvas.get_supported_filetypes()
| gpl-3.0 | -1,912,380,908,918,862,800 | 30.913628 | 77 | 0.527696 | false |
brake/python-utl | utl/hex.py | 1 | 2924 | #!/usr/bin/env python
# -*- coding: UTF-8 -*-
# ------------------------------------------------------------------------------
# Name: hex.py
# Package: utl
# Project: utl
#
# Created: 10.10.13 11:43
# Copyright 2013-2016 © Constantin Roganov
# License: The MIT License
# ------------------------------------------------------------------------------
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ------------------------------------------------------------------------------
"""Hex string to binary conversions and vice versa"""
from __future__ import unicode_literals, absolute_import
from builtins import *
from binascii import hexlify, unhexlify, Error
__author__ = 'Constantin Roganov'
def hexstr2bytes_list(hexstr):
"""Convert the hex string to list of bytes"""
if not hexstr:
raise TypeError("hexstr2bytes_list: input must be a hex string, '{}' received".format(hexstr))
# python 2
# return list(map(ord, unhexlify(hexstr)))
return [i for i in unhexlify(hexstr)]
def bytes_list2bin(bl):
"""Convert list of bytes to binary string"""
return b''.join(chr(i).encode('latin-1') for i in bl)
def bytes_list2hexstr(bl, uppercase=True):
"""Convert list of bytes to hex string"""
# python2
# result = hexlify(bytes_list2bin(bl))
result = bytes_list2bin(bl).hex()
return result.upper() if uppercase else result
def is_hexstr(s):
"""Check a string s for presence a valid hexadecimal data"""
try:
unhexlify(s)
return True
except (TypeError, Error):
return False
def swap_nibbles(s):
r"""Swap nibbles in a hex string.
len(s) must be even otherwise ValueError will be raised.
"""
if len(s) % 2:
raise ValueError('Odd-length string')
return ''.join([y+x for x,y in zip(*[iter(s)] * 2)])
| mit | 9,198,827,906,294,220,000 | 31.597701 | 102 | 0.624701 | false |
rndusr/stig | stig/commands/cli/misc.py | 1 | 1667 | # This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details
# http://www.gnu.org/licenses/gpl-3.0.txt
"""Documentation commands"""
from .. import CmdError
from ... import objects
from ..base import misc as base
from ...logging import make_logger # isort:skip
log = make_logger(__name__)
class HelpCmd(base.HelpCmdbase):
provides = {'cli'}
async def run(self, TOPIC):
# If TOPIC is a setting and it is managed by the server, we must fetch
# config values from the server so we can display its current value.
for topic in TOPIC:
if topic.startswith('srv.'):
try:
await objects.srvapi.settings.update()
except objects.srvapi.ClientError as e:
self.error(e)
finally:
break
return super().run(TOPIC)
def display_help(self, topics, lines):
for line in lines:
print(line)
class VersionCmd(base.VersionCmdbase):
provides = {'cli'}
class LogCmd(base.LogCmdbase):
provides = {'cli'}
def _do(self, action, *args):
cmd_str = '%s %s' % (action, ' '.join(args))
raise CmdError('Unsupported command in CLI mode: %s' % cmd_str)
| gpl-3.0 | 2,688,278,037,507,489,000 | 31.057692 | 78 | 0.644271 | false |
route-nazionale/event_manager | ragazzi/views.py | 1 | 1103 | from django.shortcuts import render
from django.core.exceptions import PermissionDenied
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
from base.views_support import HttpJSONResponse
from base.models import Rover, Event
from base.models.event import EventTurno1, EventTurno2, EventTurno3
import json
@csrf_exempt
@require_POST
def boy_evaluate(request, pk):
if not request.user.is_staff or request.session.get('valid'):
raise PermissionDenied()
data = json.loads(request.body)
rover = get_object_or_404(Rover, pk=pk)
# Step 1: simulation of new labs assignment
rover.turno1 = EventTurno1.objects.get(code=data['turno1'])
rover.turno2 = EventTurno2.objects.get(code=data['turno2'])
rover.turno3 = EventTurno3.objects.get(code=data['turno3'])
# Step 2: check constraints
msgs_constraints = rover.check_constraints()
msgs_constraints['satisfaction'] = rover.calculate_satisfaction()
return HttpJSONResponse(msgs_constraints)
| agpl-3.0 | 2,945,973,249,241,085,400 | 28.026316 | 69 | 0.752493 | false |
watchdogpolska/watchdog-kj-kultura | watchdog_kj_kultura/organizations_requests/tests/test_forms.py | 1 | 3033 | from django.core import mail
from django.test import RequestFactory, TestCase
from ...organizations.factories import OrganizationFactory
from ..factories import TemplateFactory
from ..forms import RequestForm
class RequestFormTestCase(TestCase):
def setUp(self):
self.organization = OrganizationFactory()
self.template = TemplateFactory()
self.factory = RequestFactory()
self.request = self.factory.get('/customer/details')
def test_form_is_valid(self):
body = "Lorem_FOO_BAR_Ipsum"
form = RequestForm(data={'email': '[email protected]',
'body': body,
'email_user': '[email protected]'},
organization=self.organization,
template=self.template,
request=self.request)
self.assertTrue(form.is_valid(), msg=form.errors)
def test_send_email_to_organization(self):
body = "Lorem_CONTENT_Ipsum"
form = RequestForm(data={'email': '[email protected]',
'body': body,
'email_user': '[email protected]'},
organization=self.organization,
template=self.template,
request=self.request)
self.assertTrue(form.is_valid(), msg=form.errors)
form.save()
self.assertEqual(len(mail.outbox), 2)
self.assertIn(self.organization.email, mail.outbox[0].to)
self.assertEqual(mail.outbox[0].subject, self.template.subject)
self.assertIn(body, mail.outbox[0].body)
def test_send_notification_to_user(self):
body = "Lorem_CONTENT_Ipsum"
form = RequestForm(data={'email': '[email protected]',
'body': body,
'email_user': '[email protected]'},
organization=self.organization,
template=self.template,
request=self.request)
self.assertTrue(form.is_valid(), msg=form.errors)
form.save()
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(len(mail.outbox), 2)
self.assertIn('[email protected]', mail.outbox[1].to)
self.assertEqual(mail.outbox[1].subject, self.template.subject)
self.assertIn(body, mail.outbox[1].body)
def test_require_email_in_body(self):
kwargs = dict(data={'email': '[email protected]',
'body': 'jacob',
'email_user': '[email protected]'},
organization=self.organization,
template=TemplateFactory(email_required=True),
request=self.request)
form = RequestForm(**kwargs)
self.assertFalse(form.is_valid())
kwargs['data']['body'] = kwargs['data']['email_user']
form = RequestForm(**kwargs)
self.assertTrue(form.is_valid())
| mit | 387,663,058,317,457,540 | 42.328571 | 71 | 0.554566 | false |
e-koch/VLA_Lband | 14B-088/HI/imaging/sd_regridding/sd_comparison.py | 1 | 3520 |
'''
Compare the regridded versions of the SD datasets.
'''
from spectral_cube import SpectralCube
import matplotlib.pyplot as plt
import os
from corner import hist2d
from radio_beam import Beam
import astropy.units as u
import numpy as np
from paths import fourteenB_HI_data_path, data_path
from galaxy_params import gal
# Load in the 4 cubes and run.
vla_cube = SpectralCube.read(fourteenB_HI_data_path("M33_14B-088_HI.clean.image.fits"))
arecibo_path = os.path.join(data_path, "Arecibo")
# Spectral interpolation, followed by reprojection.
arecibo_name = \
os.path.join(arecibo_path,
"14B-088_items_new/m33_arecibo_14B088.fits")
arecibo_cube = SpectralCube.read(arecibo_name)
ebhis_path = os.path.join(data_path, "EBHIS")
# Spectral interpolation, followed by reprojection.
ebhis_name = os.path.join(ebhis_path, "14B-088_items/m33_ebhis_14B088.fits")
ebhis_cube = SpectralCube.read(ebhis_name)
gbt_path = os.path.join(data_path, "GBT")
gbt_name = os.path.join(gbt_path, "14B-088_items/m33_gbt_vlsr_highres_Tmb_14B088.fits")
gbt_cube = SpectralCube.read(gbt_name)
gbt_lowres_name = os.path.join(gbt_path, "14B-088_items/m33_gbt_vlsr_Tmb_14B088.fits")
gbt_lowres_cube = SpectralCube.read(gbt_lowres_name)
# Compare total emission in the cubes.
vla_mask = np.isfinite(vla_cube[0])
arecibo_sum = arecibo_cube.with_mask(vla_mask).sum()
ebhis_sum = ebhis_cube.with_mask(vla_mask).sum()
gbt_sum = gbt_cube.with_mask(vla_mask).sum()
gbt_lowres_sum = gbt_lowres_cube.with_mask(vla_mask).sum()
plt.plot(arecibo_sum, ebhis_sum, gbt_sum, gbt_lowres_sum)
# Compare intensities in one plane
# arecibo_plane = arecibo_cube[500]
# ebhis_plane = ebhis_cube[500]
# gbt_plane = gbt_cube[500]
# gbt_plane[np.isnan(gbt_plane)] = 0.0 * u.K
# gbt_lowres_plane = gbt_lowres_cube[500]
# # Convolve GBT to match EBHIS
# beam_fwhm = lambda diam: ((1.2 * 21 * u.cm) / diam.to(u.cm)) * u.rad
# gbt_90m_beam = Beam(beam_fwhm(90 * u.m))
# gbt_plane._beam = gbt_90m_beam
# gbt_plane_convolved = gbt_plane.convolve_to(ebhis_plane.beam)
# gbt_100m_beam = Beam(beam_fwhm(100 * u.m))
# gbt_plane._beam = gbt_100m_beam
# gbt_plane_convolved_100 = gbt_plane.convolve_to(ebhis_plane.beam)
# ax = plt.subplot(131)
# hist2d(gbt_plane.value.ravel(), ebhis_plane.value.ravel(), ax=ax)
# plt.plot([0, 15], [0, 15])
# ax2 = plt.subplot(132)
# hist2d(gbt_plane_convolved.value.ravel(), ebhis_plane.value.ravel(), ax=ax2)
# plt.plot([0, 15], [0, 15])
# ax3 = plt.subplot(133)
# hist2d(gbt_plane_convolved_100.value.ravel(), ebhis_plane.value.ravel(), ax=ax3)
# plt.plot([0, 15], [0, 15])
# Best match for GBT is with a 106 m beam, convolved to the 80 m of EBHIS.
# Well, something is wrong here. It has to be that the difference between the
# data is a 80 m deconvolved w/ a 106 m beam. The EBHIS beam size should then
# be slightly smaller?
# Now convolve the Arecibo down to the GBT.
# gbt_90m_beam = Beam(beam_fwhm(90 * u.m))
# arecibo_plane_convolved = arecibo_plane.convolve_to(gbt_90m_beam)
# gbt_100m_beam = Beam(beam_fwhm(100 * u.m))
# arecibo_plane_convolved_100 = arecibo_plane.convolve_to(gbt_100m_beam)
# ax = plt.subplot(131)
# hist2d(arecibo_plane.value.ravel(), gbt_plane.value.ravel(), ax=ax)
# plt.plot([0, 15], [0, 15])
# ax2 = plt.subplot(132)
# hist2d(arecibo_plane_convolved.value.ravel(), gbt_plane.value.ravel(), ax=ax2)
# plt.plot([0, 15], [0, 15])
# ax3 = plt.subplot(133)
# hist2d(arecibo_plane_convolved_100.value.ravel(), gbt_plane.value.ravel(), ax=ax3)
# plt.plot([0, 15], [0, 15]) | mit | -5,906,216,601,364,028,000 | 31.009091 | 87 | 0.704545 | false |
Kovak/KivyNBT | flat_kivy/uix/behaviors.py | 1 | 19077 |
from weakref import ref
from kivy.app import App
from kivy.clock import Clock
from kivy.properties import (ObjectProperty, OptionProperty, NumericProperty,
ListProperty, StringProperty)
from kivy.metrics import sp
from kivy.animation import Animation
from kivy.graphics import Color, Ellipse, Rectangle
from kivy.graphics import (StencilPush, StencilPop, StencilUse,
StencilUnUse, Color, Rectangle)
try:
from kivy.graphics import (ScissorPush, ScissorPop)
except ImportError:
_has_scissor_instr = False
else:
_has_scissor_instr = True
from flat_kivy.utils import construct_data_resource
from flat_kivy.logmanager import LogManager
class ThemeBehavior(object):
theme = ListProperty([])
def on_theme(self, instance, value):
if value != []:
app = App.get_running_app()
theme = app.theme_manager.get_theme(value[0], value[1])
types = app.theme_manager.get_theme_types()
for each in types:
if isinstance(self, types[each]):
try:
theme_def = theme[each]
except:
print(each, 'not in theme', value[0], value[1], self)
continue
for propname in theme_def:
setattr(self, propname, theme_def[propname])
class GrabBehavior(object):
last_touch = ObjectProperty(None)
def on_touch_down(self, touch):
if touch.is_mouse_scrolling:
return False
if self.disabled:
return False
if not self.collide_point(touch.x, touch.y):
return False
if self in touch.ud:
return False
touch.grab(self)
touch.ud[self] = True
self.last_touch = touch
return super(GrabBehavior, self).on_touch_down(touch)
def on_touch_move(self, touch):
if super(GrabBehavior, self).on_touch_move(touch):
return True
if touch.grab_current is self:
return True
return self in touch.ud
def on_touch_up(self, touch):
if touch.grab_current is self:
result = super(GrabBehavior, self).on_touch_up(touch)
touch.ungrab(self)
self.last_touch = touch
return result
class LogBehavior(object):
log_manager = LogManager(
construct_data_resource('logs/'))
def on_touch_down(self, touch):
log_manager = self.log_manager
if self in touch.ud and log_manager.do_logging:
print(self, 'in on touch dwon')
coords = (touch.x, touch.y)
log_interface = log_manager.log_interface
touch_id = log_manager.touch_id
touch.ud['log_id'] = touch_id
log_interface.set_entry(
'touches', touch_id, 'touch_down_at', coords,
do_timestamp=True)
log_manager.touch_id += 1
log_interface.set_entry(
'touches', 'last_touch_id', 'value', touch_id)
return super(LogBehavior, self).on_touch_down(touch)
def on_touch_move(self, touch):
log_manager = self.log_manager
if self in touch.ud and log_manager.do_logging:
coords = (touch.x, touch.y)
touch_id = touch.ud['log_id']
log_manager.log_interface.append_entry('touches', touch_id,
'touch_moves_at', coords, do_timestamp=True)
return super(LogBehavior, self).on_touch_move(touch)
def on_touch_up(self, touch):
log_manager = self.log_manager
if self in touch.ud and log_manager.do_logging:
coords = (touch.x, touch.y)
touch_id = touch.ud['log_id']
log_manager.log_interface.set_entry(
'touches', touch_id, 'touch_up_at', coords, do_timestamp=True)
return super(LogBehavior, self).on_touch_up(touch)
class LogNoTouchBehavior(object):
log_manager = LogManager(
construct_data_resource('logs/'))
class ButtonBehavior(object):
'''Button behavior.
:Events:
`on_press`
Fired when the button is pressed.
`on_release`
Fired when the button is released (i.e. the touch/click that
pressed the button goes away).
'''
state = OptionProperty('normal', options=('normal', 'down'))
'''State of the button, must be one of 'normal' or 'down'.
The state is 'down' only when the button is currently touched/clicked,
otherwise 'normal'.
:attr:`state` is an :class:`~kivy.properties.OptionProperty`.
'''
def __init__(self, **kwargs):
self.register_event_type('on_press')
self.register_event_type('on_release')
super(ButtonBehavior, self).__init__(**kwargs)
def _do_press(self):
self.state = 'down'
def _do_release(self):
self.state = 'normal'
def on_touch_down(self, touch):
if self in touch.ud:
if isinstance(self, LogBehavior):
log_manager = self.log_manager
if log_manager.do_logging:
if isinstance(self, CheckBox):
touch_id = touch.ud['log_id']
log_manager.log_interface.set_entry(
'touches', touch_id,
'checkbox_pressed_down', self.state,
do_timestamp=True)
else:
touch_id = touch.ud['log_id']
log_manager.log_interface.set_entry(
'touches', touch_id,
'button_pressed', self.text, do_timestamp=True)
self._do_press()
self.dispatch('on_press')
return super(ButtonBehavior, self).on_touch_down(touch)
def on_touch_move(self, touch):
return super(ButtonBehavior, self).on_touch_move(touch)
def on_touch_up(self, touch):
if self in touch.ud:
if isinstance(self, LogBehavior):
log_manager = self.log_manager
if log_manager.do_logging:
if isinstance(self, CheckBox):
touch_id = touch.ud['log_id']
log_manager.log_interface.set_entry(
'touches', touch_id,
'checkbox_released', self.state,
do_timestamp=True)
else:
touch_id = touch.ud['log_id']
log_manager.log_interface.set_entry(
'touches', touch_id, 'button_released',
self.text, do_timestamp=True)
self._do_release()
self.dispatch('on_release')
return super(ButtonBehavior, self).on_touch_up(touch)
def on_press(self):
pass
def on_release(self):
pass
def trigger_action(self, duration=0.1):
'''Trigger whatever action(s) have been bound to the button by calling
both the on_press and on_release callbacks.
This simulates a quick button press without using any touch events.
Duration is the length of the press in seconds. Pass 0 if you want
the action to happen instantly.
.. versionadded:: 1.8.0
'''
self._do_press()
self.dispatch('on_press')
def trigger_release(dt):
self._do_release()
self.dispatch('on_release')
if not duration:
trigger_release(0)
else:
Clock.schedule_once(trigger_release, duration)
class ToggleButtonBehavior(ButtonBehavior):
'''ToggleButton behavior, see ToggleButton module documentation for more
information.
.. versionadded:: 1.8.0
'''
__groups = {}
group = ObjectProperty(None, allownone=True)
'''Group of the button. If None, no group will be used (button is
independent). If specified, :attr:`group` must be a hashable object, like
a string. Only one button in a group can be in 'down' state.
:attr:`group` is a :class:`~kivy.properties.ObjectProperty`
'''
def __init__(self, **kwargs):
self._previous_group = None
super(ToggleButtonBehavior, self).__init__(**kwargs)
def on_group(self, *largs):
groups = ToggleButtonBehavior.__groups
if self._previous_group:
group = groups[self._previous_group]
for item in group[:]:
if item() is self:
group.remove(item)
break
group = self._previous_group = self.group
if group not in groups:
groups[group] = []
r = ref(self, ToggleButtonBehavior._clear_groups)
groups[group].append(r)
def _release_group(self, current):
if self.group is None:
return
group = self.__groups[self.group]
for item in group[:]:
widget = item()
if widget is None:
group.remove(item)
if widget is current:
continue
widget.state = 'normal'
def _do_press(self):
self._release_group(self)
self.state = 'normal' if self.state == 'down' else 'down'
def _do_release(self):
pass
@staticmethod
def _clear_groups(wk):
# auto flush the element when the weak reference have been deleted
groups = ToggleButtonBehavior.__groups
for group in list(groups.values()):
if wk in group:
group.remove(wk)
break
@staticmethod
def get_widgets(groupname):
'''Return the widgets contained in a specific group. If the group
doesn't exist, an empty list will be returned.
.. important::
Always release the result of this method! In doubt, do::
l = ToggleButtonBehavior.get_widgets('mygroup')
# do your job
del l
.. warning::
It's possible that some widgets that you have previously
deleted are still in the list. Garbage collector might need
more elements before flushing it. The return of this method
is informative, you've been warned!
'''
groups = ToggleButtonBehavior.__groups
if groupname not in groups:
return []
return [x() for x in groups[groupname] if x()][:]
class TouchRippleBehavior(object):
ripple_rad = NumericProperty(10)
ripple_pos = ListProperty([0, 0])
ripple_color = ListProperty((0., 0., 0., 1.))
ripple_duration_in = NumericProperty(.7)
ripple_duration_out = NumericProperty(.3)
fade_to_alpha = NumericProperty(.12)
ripple_scale = NumericProperty(4.0)
ripple_func_in = StringProperty('in_cubic')
ripple_func_out = StringProperty('out_quad')
def on_touch_down(self, touch):
if self in touch.ud:
self.anim_complete(self, self)
self.ripple_pos = ripple_pos = (touch.x, touch.y)
Animation.cancel_all(self, 'ripple_rad', 'ripple_color')
rc = self.ripple_color
ripple_rad = self.ripple_rad
self.ripple_color = [rc[0], rc[1], rc[2], .16]
anim = Animation(
ripple_rad=max(self.width, self.height) * self.ripple_scale,
t=self.ripple_func_in,
ripple_color=[rc[0], rc[1], rc[2], self.fade_to_alpha],
duration=self.ripple_duration_in)
anim.start(self)
with self.canvas.after:
x,y = self.to_window(*self.pos)
width, height = self.size
#In python 3 the int cast will be unnecessary
pos = (int(round(x)), int(round(y)))
size = (int(round(width)), int(round(height)))
if _has_scissor_instr:
ScissorPush(x=pos[0], y=pos[1],
width=size[0], height=size[1])
else:
StencilPush()
Rectangle(pos=(int(round(x)), int(round(y))),
size=(int(round(width)), int(round(height))))
StencilUse()
self.col_instruction = Color(rgba=self.ripple_color)
self.ellipse = Ellipse(size=(ripple_rad, ripple_rad),
pos=(ripple_pos[0] - ripple_rad/2.,
ripple_pos[1] - ripple_rad/2.))
if _has_scissor_instr:
ScissorPop()
else:
StencilUnUse()
Rectangle(pos=(int(round(x)), int(round(y))),
size=(int(round(width)), int(round(height))))
StencilPop()
self.bind(ripple_color=self.set_color, ripple_pos=self.set_ellipse,
ripple_rad=self.set_ellipse)
return super(TouchRippleBehavior, self).on_touch_down(touch)
def set_ellipse(self, instance, value):
ellipse = self.ellipse
ripple_pos = self.ripple_pos
ripple_rad = self.ripple_rad
ellipse.size = (ripple_rad, ripple_rad)
ellipse.pos = (ripple_pos[0] - ripple_rad/2.,
ripple_pos[1] - ripple_rad/2.)
def set_color(self, instance, value):
self.col_instruction.rgba = value
def on_touch_up(self, touch):
if self in touch.ud:
rc = self.ripple_color
anim = Animation(ripple_color=[rc[0], rc[1], rc[2], 0.],
t=self.ripple_func_out, duration=self.ripple_duration_out)
anim.bind(on_complete=self.anim_complete)
anim.start(self)
return super(TouchRippleBehavior, self).on_touch_up(touch)
def anim_complete(self, anim, instance):
self.ripple_rad = 10
self.canvas.after.clear()
class SliderTouchRippleBehavior(object):
ripple_rad = NumericProperty(10)
ripple_pos = ListProperty([0, 0])
ripple_color = ListProperty((1., 1., 1., 1.))
ripple_duration_in = NumericProperty(.2)
ripple_duration_out = NumericProperty(.5)
fade_to_alpha = NumericProperty(.75)
ripple_scale = NumericProperty(2.0)
ripple_func_in = StringProperty('in_cubic')
ripple_func_out = StringProperty('out_quad')
def __init__(self, **kwargs):
super(SliderTouchRippleBehavior, self).__init__(**kwargs)
self.slider_stencil = None
self.slider_stencil_unuse = None
self.slider_line_stencil = None
self.slider_line_stencil_unuse = None
def on_touch_down(self, touch):
if self in touch.ud:
self.anim_complete(self, self)
self.ripple_pos = ripple_pos = (touch.x, touch.y)
Animation.cancel_all(self, 'ripple_rad', 'ripple_color')
rc = self.ripple_color
ripple_rad = self.ripple_rad
self.ripple_color = [rc[0], rc[1], rc[2], 1.]
anim = Animation(
ripple_rad=max(self.width, self.height) * self.ripple_scale,
t=self.ripple_func_in,
ripple_color=[rc[0], rc[1], rc[2], self.fade_to_alpha],
duration=self.ripple_duration_in)
anim.start(self)
with self.canvas.after:
x,y = self.to_window(*self.pos)
width, height = self.size
if self.orientation == 'horizontal':
ellipse_pos = (self.value_pos[0] - sp(16), self.center_y - sp(17))
stencil_pos = (self.x + self.padding + sp(2), self.center_y - sp(7))
stencil_size = (self.width - self.padding * 2 - sp(4), sp(14))
else:
ellipse_pos = (self.center_x - sp(17), self.value_pos[1] - sp(16))
stencil_pos = (self.center_x - sp(7), self.y + self.padding + sp(2))
stencil_size = (sp(14), self.height - self.padding * 2 - sp(4))
StencilPush()
Rectangle(
pos=stencil_pos,
size=stencil_size)
self.slider_stencil = Ellipse(
pos=ellipse_pos,
size=(sp(32), sp(32)))
StencilUse(op='lequal')
self.col_instruction = Color(rgba=self.ripple_color)
self.ellipse = Ellipse(size=(ripple_rad, ripple_rad),
pos=(ripple_pos[0] - ripple_rad/2.,
ripple_pos[1] - ripple_rad/2.))
StencilUnUse()
Rectangle(
pos=stencil_pos,
size=stencil_size)
self.slider_stencil_unuse = Ellipse(
pos=ellipse_pos,
size=(sp(32), sp(32)))
StencilPop()
self.bind(ripple_color=self.set_color, ripple_pos=self.set_ellipse,
ripple_rad=self.set_ellipse)
return super(SliderTouchRippleBehavior, self).on_touch_down(touch)
def update_stencil(self):
if self.orientation == 'horizontal':
pos = [self.value_pos[0] - sp(16),
self.center_y - sp(17)]
ellipse = [self.value_pos[0] - sp(16),
self.center_y - sp(17), sp(32), sp(32)]
else:
pos = [self.center_x - sp(17),
self.value_pos[1] - sp(16)]
ellipse = [self.center_x - sp(17),
self.value_pos[1] - sp(16), sp(32), sp(32)]
if self.slider_stencil is not None:
self.slider_stencil.pos = pos
if self.slider_stencil_unuse is not None:
self.slider_stencil_unuse.pos = pos
if self.slider_line_stencil is not None:
self.slider_line_stencil.ellipse = ellipse
if self.slider_line_stencil_unuse is not None:
self.slider_line_stencil_unuse.ellipse = ellipse
def on_value_pos(self, instance, value):
self.update_stencil()
def set_ellipse(self, instance, value):
ellipse = self.ellipse
ripple_pos = self.ripple_pos
ripple_rad = self.ripple_rad
ellipse.size = (ripple_rad, ripple_rad)
ellipse.pos = (ripple_pos[0] - ripple_rad/2.,
ripple_pos[1] - ripple_rad/2.)
def set_color(self, instance, value):
self.col_instruction.rgba = value
def on_touch_up(self, touch):
if self in touch.ud:
rc = self.ripple_color
anim = Animation(ripple_color=[rc[0], rc[1], rc[2], 0.],
t=self.ripple_func_out, duration=self.ripple_duration_out)
anim.bind(on_complete=self.anim_complete)
anim.start(self)
return super(SliderTouchRippleBehavior, self).on_touch_up(touch)
def anim_complete(self, anim, instance):
self.ripple_rad = 10
self.canvas.after.clear()
self.slider_stencil = None
self.slider_stencil_unuse = None
| mit | 4,843,255,177,433,859,000 | 36.187135 | 88 | 0.548514 | false |
groupe-conseil-nutshimit-nippour/django-geoprisma | geoprisma/acl/models.py | 1 | 1433 | # -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth.models import User, Group
from geoprisma.models import Resource, Session
#Dummy decorator if schema is not supported
def schematize(original_class):
return original_class
#Import model that support PGSQL schema if difined
if hasattr(settings, 'SCHEMATIZED_MODELS'):
try:
models = __import__(settings.SCHEMATIZED_MODELS, fromlist=['*'])
schematize = models.schematize
except ImportError:
from django.db import models
else:
from django.db import models
@schematize
class Action(models.Model):
name = models.CharField(max_length=255)
class Meta:
ordering = ('name',)
verbose_name = "Action"
verbose_name_plural = "Actions"
def __unicode__(self):
return self.name
@schematize
class Right(models.Model):
id_group = models.ForeignKey(Group)
id_resource = models.ForeignKey(Resource)
actions = models.ManyToManyField(Action)
class Meta:
ordering = ('id_group', 'id_resource',)
unique_together = ('id_group', 'id_resource',)
verbose_name = "Right"
verbose_name_plural = "Rights"
def __unicode__(self):
return "%s - %s" % (self.id_group, self.id_resource,)
| bsd-3-clause | -81,777,879,238,157,520 | 27.854167 | 80 | 0.593859 | false |
product-definition-center/pdc-client | pdc_client/plugins/image.py | 1 | 4868 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
from __future__ import print_function
import sys
from datetime import datetime
from pdc_client.plugin_helpers import PDCClientPlugin, add_parser_arguments, extract_arguments
info_desc = """Generally there may be duplicate file names. If the file name
you provide matches more that image, you will get a list of all those images
together with their SHA256 checksums. You desambiguate by providing the
checksum as a command line argument.
"""
def size_format(num):
fmt = '{0:.1f} {1}B'
factor = 1024.0
for unit in ('', 'Ki', 'Mi', 'Gi'):
if num < factor:
return fmt.format(num, unit)
num /= factor
return fmt.format(num, 'Ti')
class ImagePlugin(PDCClientPlugin):
command = 'image'
def register(self):
self.set_command()
list_parser = self.add_action('list', help='list all images')
list_parser.add_argument('--show-sha256', action='store_true',
help='whether to display SHA256 checksums along with the file names')
add_parser_arguments(list_parser, {'arch': {},
'compose': {},
'file_name': {},
'image_format': {},
'image_type': {},
'implant_md5': {},
'md5': {},
'sha1': {},
'sha256': {},
'volume_id': {},
'subvariant': {}},
group='Filtering')
list_parser.set_defaults(func=self.image_list)
info_parser = self.add_action('info', help='display details of an image', description=info_desc)
info_parser.add_argument('filename', metavar='FILENAME')
info_parser.add_argument('--sha256', nargs='?')
info_parser.set_defaults(func=self.image_info)
def _print_image_list(self, images, with_sha=False):
fmt = '{file_name}'
if with_sha:
fmt = '{file_name:80}{sha256}'
start_line = True
for image in images:
if start_line:
start_line = False
print(fmt.format(file_name='File-Name', sha256='SHA256'))
print()
print(fmt.format(**image))
def image_list(self, args):
filters = extract_arguments(args)
images = self.client.get_paged(self.client.images._, **filters)
if args.json:
print(self.to_json(list(images)))
return
self._print_image_list(images, args.show_sha256)
def image_info(self, args):
filters = {'file_name': args.filename}
if args.sha256:
filters['sha256'] = args.sha256
image = self.client.images._(**filters)
if image['count'] == 0:
print('Not found')
sys.exit(1)
elif image['count'] > 1:
print('More than one image with that name, use --sha256 to specify.')
self._print_image_list(image['results'], True)
sys.exit(1)
else:
image = image['results'][0]
if args.json:
print(self.to_json(image))
return
mtime = datetime.utcfromtimestamp(image['mtime'])
fmt = '{0:15} {1}'
print(fmt.format('File Name', image['file_name']))
print(fmt.format('Image Type', image['image_type']))
print(fmt.format('Image Format', image['image_format']))
print(fmt.format('Arch', image['arch']))
print(fmt.format('Disc', '{0} / {1}'.format(image['disc_number'], image['disc_count'])))
print(fmt.format('Modified', '{0} ({1})'.format(image['mtime'], mtime)))
print(fmt.format('Size', '{0} ({1})'.format(image['size'], size_format(image['size']))))
print(fmt.format('Bootable', 'yes' if image['bootable'] else 'no'))
print(fmt.format('Volume ID', image['volume_id']))
print(fmt.format('Implant MD5', image['implant_md5']))
print(fmt.format('Subvariant', image['subvariant']))
print('\nChecksums:')
print(' {0:7} {1}'.format('MD5', image['md5']))
print(' {0:7} {1}'.format('SHA1', image['sha1']))
print(' {0:7} {1}'.format('SHA256', image['sha256']))
if image['composes']:
print('\nUsed in composes:')
for compose in image['composes']:
print(' * {0}'.format(compose))
PLUGIN_CLASSES = [ImagePlugin]
| mit | 1,686,656,099,837,159,700 | 37.03125 | 104 | 0.513558 | false |
OAGr/exercism | python/twelve-days/twelve_days.py | 1 | 1335 | #1 hour 15 minutes with minor distractions
def sing():
return verses(1,12)
def verse(i):
lines = []
lines.append("On the %s day of Christmas my true love gave to me" % days[i])
lines += chorus(i)
lines.append(ending(i))
lines = ', '.join(lines)
return lines + "\n"
def verses(start,stop):
return "\n".join([verse(i) for i in range(start,stop+1)]) + "\n"
def chorus(i):
present = i
chorus = []
while (present > 1):
chorus.append(lines[present])
present = present - 1
return chorus
def ending(i):
if i == 1:
return lines[1]
else:
return "and " + lines[1]
lines = [
'NAN',
'a Partridge in a Pear Tree.',
'two Turtle Doves',
'three French Hens',
'four Calling Birds',
'five Gold Rings',
'six Geese-a-Laying',
'seven Swans-a-Swimming',
'eight Maids-a-Milking',
'nine Ladies Dancing',
'ten Lords-a-Leaping',
'eleven Pipers Piping',
'twelve Drummers Drumming'
]
days = [
'NAN',
'first',
'second',
'third',
'fourth',
'fifth',
'sixth',
'seventh',
'eighth',
'ninth',
'tenth',
'eleventh',
'twelfth'
]
| mit | 8,918,104,149,386,228,000 | 20.532258 | 80 | 0.494382 | false |
transientlunatic/minke | minke/mdctools.py | 1 | 34706 | """
88b d88 88 88
888b d888 "" 88
88`8b d8'88 88
88 `8b d8' 88 88 8b,dPPYba, 88 ,d8 ,adPPYba,
88 `8b d8' 88 88 88P' `"8a 88 ,a8" a8P_____88
88 `8b d8' 88 88 88 88 8888[ 8PP"""""""
88 `888' 88 88 88 88 88`"Yba, "8b, ,aa
88 `8' 88 88 88 88 88 `Y8a `"Ybbd8"'
--------------------------------------------------------
This file is a part of Minke, a tool for generating simulated
gravitational wave signals, used for characterising and training
search algorithms.
Minke was created by Daniel Williams, based on work started by Chris
Pankow and others, and is built around the LALSimulation library.
"""
from glue.ligolw import ligolw, utils, lsctables
lsctables.use_in(ligolw.LIGOLWContentHandler);
import numpy
import lalburst, lalsimulation, lalmetaio
from minke.antenna import response
from lal import TimeDelayFromEarthCenter as XLALTimeDelayFromEarthCenter
#from pylal.xlal.datatypes.ligotimegps import LIGOTimeGPS
from lal import LIGOTimeGPS
from glue.ligolw.utils import process
import glue
import glue.ligolw
import gzip
import lal, lalframe
import numpy as np
import pandas as pd
import os
import os.path
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import re
import random
import minke
from minke import sources
sourcemap = {}
for classin in dir(sources):
classin = sources.__dict__[classin]
if hasattr(classin, "waveform"):
sourcemap[classin.waveform] = classin
def source_from_row(row):
waveform = row.waveform
sourceobj = sourcemap[row.waveform].__new__(sourcemap[row.waveform])
sourceobj.numrel_data = str("")
params = {}
for attr in dir(row):
if not attr[0] == "_" and not attr[:3] =="get":
#print attr
try:
params[attr] = getattr(row, attr)
setattr(sourceobj, attr, getattr(row, attr))
except AttributeError:
print("Error processing the {} column".format(attr))
sourceobj.params = params
try:
sourceobj.time = row.time_geocent_gps
except:
sourceobj.time = row.geocent_start_time
pass
return sourceobj
def source_from_dict(params):
sourceobj = sourcemap[params['morphology']].__new__(sourcemap[params['morphology']])
sourceobj.numrel_data = str("")
params = {}
for attr in dir(row):
if not attr[0] == "_" and not attr[:3] =="get":
#print attr
params[attr] = getattr(row, attr)
setattr(sourceobj, attr, getattr(row, attr))
sourceobj.params = params
try:
sourceobj.time = row.time_geocent_gps
except:
sourceobj.time = row.geocent_start_time
pass
return sourceobj
table_types = {
# Ad-Hoc
"ga" : lsctables.SimBurstTable,
"sg" : lsctables.SimBurstTable,
"wnb" : lsctables.SimBurstTable,
"sc" : lsctables.SimBurstTable,
# Supernova Families
"d08" : lsctables.SimBurstTable,
"s10" : lsctables.SimBurstTable,
"m12" : lsctables.SimBurstTable,
"o13" : lsctables.SimBurstTable,
"y10" : lsctables.SimBurstTable,
# Long Duration
"adi" : lsctables.SimBurstTable,
# Ringdown
"rng" : lsctables.SimRingdownTable,
"gng" : lsctables.SimRingdownTable,
}
tables = {
"burst" : lsctables.SimBurstTable,
"ringdown" : lsctables.SimRingdownTable
}
def mkdir(path):
"""
Make all of the tree of directories in a given path if they don't
already exist.
Parameters
----------
path : str
The path to the desired directory.
"""
sub_path = os.path.dirname(path)
if not os.path.exists(sub_path):
mkdir(sub_path)
if not os.path.exists(path):
os.mkdir(path)
class TableTypeError(Exception):
pass
class MDCSet():
inj_families_names = {'ga' : 'Gaussian',
'sg' : 'SineGaussian',
'wnb': 'BTLWNB',
"sc" : "StringCusp",
# Supernova families
'd08' : 'Dimmelmeier+08',
's10' : 'Scheidegger+10',
'm12' : 'Mueller+12',
'o13' : 'Ott+13',
'y10' : "Yakunin+10",
# Long-duration
'adi' : 'ADI',
# Ringdown
'rng' : "BBHRingdown",
'gng' : "GenericRingdown",
}
inj_families_abb = dict((v,k) for k,v in list(inj_families_names.items()))
hist_parameters = {
"StringCusp": ["amplitude", "ra", "dec"],
"SineGaussian": ["hrss", "psi", "ra", "dec"],
"Gaussian": ["hrss", "psi", "ra", "dec"],
"BTLWNB": ["hrss", "ra", "dec"],
"Dimmelmeier+08": ['hrss', 'ra', 'dec']
}
waveforms = []
def __init__(self, detectors, name='MDC Set', table_type = "burst"):
"""
Represents an MDC set, stored in an XML SimBurstTable file.
Parameters
----------
detectors : list
A list of detector names where the injections should be made.
name : str
A name for the MDC Set. Defaults to 'MDC Set'.
table_type : str
The type of table which should be generated. Default is `burst`,
which generates a SimBurstTable.
"""
self.detectors = detectors
self.waveforms = []
self.strains = []
self.egw = []
self.times = []
self.name = name
self.times = np.array(self.times)
self.table_type = tables[table_type]
def __add__(self, waveform):
"""
Handle a waveform being added to the MDC set.
Parameters
----------
waveform : Waveform object
The waveform which should be added to the MDC set.
"""
# Check that this type of waveform can go into this type of
# XML file.
if not table_types[self.inj_families_abb[waveform.waveform]] == self.table_type:
raise TableTypeError()
self.waveforms.append(waveform)
self.times = np.append(self.times, waveform.time)
def save_xml(self, filename):
"""
Save the MDC set as an XML SimBurstTable.
Parameters
----------
filename : str
The location to save the xml file. The output is gzipped, so ending it with
a ".gz" would stick with convention.
"""
xmldoc = ligolw.Document()
lw = xmldoc.appendChild(ligolw.LIGO_LW())
sim = lsctables.New(self.table_type)
lw.appendChild(sim)
# This needs to be given the proper metadata once the package has the maturity to
# write something sensible.
for waveform in self.waveforms:
procrow = process.register_to_xmldoc(xmldoc, "minke_burst_mdc+{}".format(minke.__version__), {}) # waveform.params)
try:
waveform_row = waveform._row(sim)
waveform_row.process_id = procrow.process_id
except:
row = sim.RowType()
for a in list(self.table_type.validcolumns.keys()):
if a in list(waveform.params.keys()):
setattr(row, a, waveform.params[a])
else:
if not hasattr(waveform, a):
setattr(row, a, 0)
else:
setattr(row, a, getattr(waveform, a))
row.waveform = waveform.waveform
if self.table_type == lsctables.SimBurstTable:
# Fill in the time
row.set_time_geocent(GPS(float(waveform.time)))
# Get the sky locations
row.ra, row.dec, row.psi = waveform.ra, waveform.dec, waveform.psi
row.simulation_id = waveform.simulation_id
row.waveform_number = random.randint(0,int(2**32)-1)
### !! This needs to be updated.
row.process_id = "process:process_id:0" #procrow.process_id
waveform_row = row
sim.append(waveform_row)
#del waveform_row
# Write out the xml and gzip it.
utils.write_filename(xmldoc, filename, gz=True)
def load_xml(self, filename, full=True, start=None, stop=None):
"""Load the MDC Set from an XML file containing the SimBurstTable.
Parameters
----------
filename : str
The filename of the XML file.
full : bool
If this is true (which is the default) then all of
the calculated parameters are computed from the waveform
definintion.
start : float
The time at which the xml read-in should
start. The default is "None", in which case the xml file
will be read-in from the start.
end : float
The last time to be read from the xml file. The default is None,
which causes the xml to be read right-up to the last time in the
file.
To Do
-----
A the moment this loads the information in to the object, but it
doesn't produce waveform objects for each of the injections in the
file. This should be fixed so that the object works symmetrically.
"""
i = 0
#sim_burst_table = lalburst.SimBurstTableFromLIGOLw(filename, start, stop)
xml = glue.ligolw.utils.load_filename(filename,
contenthandler = glue.ligolw.ligolw.LIGOLWContentHandler,
verbose = True)
sim_burst_table = glue.ligolw.table.get_table(xml, self.table_type.tableName)
for i,simrow in enumerate(sim_burst_table):
# This is an ugly kludge to get around the poor choice of wavform name in the xmls, and
if simrow.waveform[:3]=="s15":
self.numrel_file = str(sim_burst_table.waveform)
sim_burst_table.waveform = "Dimmelmeier+08"
self.waveforms.append(source_from_row(simrow))
if full:
self._measure_hrss(i)
self._measure_egw_rsq(i)
if self.table_type == tables["burst"]:
self.times = np.append(self.times, float(simrow.time_geocent_gps))
def _generate_burst(self,row,rate=16384.0):
"""
Generate the burst described in a given row, so that it can be
measured.
Parameters
----------
row : SimBurst Row
The row of the waveform to be measured
rate : float
The sampling rate of the signal, in Hz. Defaults to 16384.0Hz
Returns
-------
hp :
The strain in the + polarisation
hx :
The strain in the x polarisation
hp0 :
A copy of the strain in the + polarisation
hx0 :
A copy of the strain in the x polarisation
"""
row = self.waveforms[row]
hp, hx, hp0, hx0 = row._generate()
return hp, hx, hp0, hx0
def _getDetector(self, det):
"""
A method to return a LALDetector object corresponding to a detector's
X#-style name, e.g. 'H1' as the Hanford 4km detector.
Parameters
----------
det : str
A string describing the detector in the format letter-number, e.g
"H1" would be the Hanford 4km detector, "L1" would be the
Livingston 4km, and so-forth.
Returns
-------
detector : LALDetector
The LAL object describing the detector
"""
# get detector
return lalsimulation.DetectorPrefixToLALDetector(det)
#if det not in lal.cached_detector_by_prefix.keys():
# raise ValueError, "%s is not a cached detector. "\
# "Cached detectors are: %s" % (det, inject.cached_detector.keys())
#return lal.cached_detector_by_prefix[det]
def _timeDelayFromGeocenter(self, detector, ra, dec, gpstime):
"""
Calculate the time delay between the geocentre and a given detector
for a signal from some sky location.
Parameters
----------
detector : str
A string describing the detector, e.g. H1 is the Hanford 4km
detector.
ra : float
The right-ascension of the observation in radians
dec : float
The declination of the obser
"""
if isinstance(detector, str): detector = self._getDetector(detector)
gpstime = LIGOTimeGPS(float(gpstime))
return XLALTimeDelayFromEarthCenter(detector.location, ra, dec, gpstime)
def directory_path(self):
"""
Generate the directory where the frames from this MDC should be stored,
so, e.g. Gaussians 0d100 would go in "ga/ga0d100/"
Returns
-------
str
the folder structure
"""
name = self._simID(0)
abb = self.inj_families_abb[self.waveforms[0].waveform].lower()
return "{}/{}".format(abb, name)
def _simID(self, row):
"""
Generate a name for an injection set in the format expected by cWB
Parameters
----------
row : SimBurst
The simburst table row describing the injection
Returns
-------
str
The name of the injection in the cWB format
"""
row = self.waveforms[row]
name = ''
numberspart = ''
if row.waveform in ("Dimmelmeier+08", "Scheidegger+10", "Mueller+12", "Ott+13", "Yakunin+10"):
#print row
numberspart = os.path.basename(row.params['numrel_data']).split('.')[0]
if row.waveform == "Gaussian":
numberspart = "{:.3f}".format(row.duration * 1e3)
elif row.waveform == "SineGaussian":
if row.pol_ellipse_e==1.0:
pol="linear"
elif row.pol_ellipse_e==0.0:
pol="circular"
elif 0.0<row.pol_ellipse_e<1.0:
pol = "elliptical"
else:
pol = "inclined"
numberspart = "f{:.0f}_q{:.0f}_{}".format(row.frequency, row.q, pol)
elif row.waveform == "BTLWNB":
numberspart = "{}b{}tau{}".format(row.frequency, row.bandwidth, row.duration)
name += '{}_{}'.format(self.inj_families_abb[row.waveform].lower(), numberspart).replace('.','d')
return name
def _measure_hrss(self, row, rate=16384.0):
"""
Measure the various components of hrss (h+^2, hx^2, hphx) for a given
input row. This is accomplished by generating the burst and calling
the SWIG wrapped XLALMeasureHrss in lalsimulation.
Parameters
----------
row : int
The row number of the waveforms to be measured
rate : float
The sampling rate of the signal, in Hz. Defaults to 16384.0Hz
Returns
-------
hrss : float
The measured hrss of the waveform amplitude: sqrt(|Hp|^2 + |Hx|^2)
hphp : float
The hrss of the + polarisation only.
hxhx : float
The hrss of the x polarisation only.
hphx : float
The hrss of |HpHx|
"""
row = self.waveforms[row]
hp, hx, hp0, hx0 = row._generate() #self._generate_burst(row)# self.hp, self.hx, self.hp0, self.hx0
hp0.data.data *= 0
hx0.data.data *= 0
# H+ hrss only
hphp = lalsimulation.MeasureHrss(hp, hx0)**2
# Hx hrss only
hxhx = lalsimulation.MeasureHrss(hp0, hx)**2
# sqrt(|Hp|^2 + |Hx|^2)
hrss = lalsimulation.MeasureHrss(hp, hx)
hp.data.data = numpy.abs(hx.data.data) + numpy.abs(hp.data.data)
# |H+Hx|
hphx = (lalsimulation.MeasureHrss(hp, hx0)**2 - hrss**2)/2
#print hrss
self.strains.append([hrss, hphp, hxhx, hphx])
def _measure_egw_rsq(self, row, rate=16384.0):
"""
Measure the energy emitted in gravitational waves divided
by the distance squared in M_solar / pc^2. This is accomplished
by generating the burst and calling the SWIG wrapped
XLALMeasureHrss in lalsimulation.
Parameters
----------
row : int
The row number of the waveforms to be measured
rate : float
The sampling rate of the signal, in Hz. Defaults to 16384.0Hz
Returns
-------
egw : float
The energy emitted in gravitational waves divided
by the distance squared in M_solar / pc^2.
"""
hp, hx, _, _ = self._generate_burst(row)
self.egw.append(lalsimulation.MeasureEoverRsquared(hp, hx))
def _responses(self, row):
"""
Calculate the antenna repsonses for each detector to the waveform.
Parameters
----------
row : int
The row number of the waveforms to be measured
Returns
-------
responses : list of lists
A list containing the lists of antenna responses, with the first
element of each list containing the detector acronym.
"""
output = []
row = self.waveforms[row]
for detector in self.detectors:
time = row.time_geocent_gps + self._timeDelayFromGeocenter(detector, row.ra, row.dec, row.time_geocent_gps)
time = np.float64(time)
rs = response(time, row.ra, row.dec, 0, row.psi, 'radians', detector)
output.append([detector, time, rs[0], rs[1]] )
return output
def plot_skymap(self):
"""
Plot a skymap of the injections distribution in RA and DEC on a Hammer projection.
Returns
-------
matplotlib figure
"""
fig = plt.figure()
# Load the ra and dec numbers out of the waveforms
dec = [getattr(s, 'dec') for s in self.waveforms]
ra = [getattr(s, 'ra') for s in self.waveforms]
# Make the plot on a hammer projection
plt.subplot(111, projection='hammer')
H, x, y = np.histogram2d(ra, dec, [50, 25], range=[[0, 2*np.pi], [-np.pi/2, np.pi/2]])
dist = plt.pcolormesh(x-np.pi,y, H.T, cmap="viridis")
plt.title("Sky distribution")
plt.colorbar(dist, orientation='horizontal')
return fig
def plot_hist(self, parameter):
"""
Plot a histogram of a waveform parameter.
Parameters
----------
parameter : str
The name of the simburst table parameter which is desired for the plot.
Returns
-------
matplotlib figure
"""
fig = plt.figure()
prms = [getattr(s, parameter) for s in self.waveforms]
ax2 = plt.subplot(111)
ax2.set_title("{} distribution".format(parameter))
ax2.set_xlabel(parameter)
ax2.hist(prms, bins=100, log=True, histtype="stepfilled", alpha=0.6);
return fig
def gravEn_row(self, row, frame):
"""
Produces a gravEn-style log row for a row of the simBurstTable.
Parameters
----------
row : int
The row number of the waveforms to be measured
Returns
-------
str
A string in the gravEn format which describes the injection.
"""
strains = self.strains[row]
rowname = self._simID(row)
responses = self._responses(row)
energy = self.egw[row]
row = self.waveforms[row]
output = []
if not row.incl:
cosincl = ""
else:
cosincl = np.cos(row.incl)
output.append(self.name) # GravEn_SimID
output.append(strains[0]) # SimHrss
output.append(energy) # SimEgwR2
output.append(strains[0]) # GravEn_Ampl
output.append(cosincl) # Internal_x the cosine of the angle the LOS makes with axis of angular momentum
output.append(row.phi) # Intenal_phi angle between source x-axis and the LOS
output.append(np.cos(np.pi/2.0 - row.dec)) # cos(External_x) # this needs to be the co-declination
output.append(row.ra if row.ra < np.pi else row.ra - 2*np.pi)
# ^ External_phi # This is the RA projected onto an Earth-based coordinate system
output.append(row.psi) # External_psi # source's polarisation angle
output.append(frame.start) # FrameGPS
output.append(row.time_geocent_gps) # EarthCtrGPS
output.append(rowname) # SimName
output.append(strains[1]) # SimHpHp
output.append(strains[2]) # SimHcHc
output.append(strains[3]) # SimHpHp
output.append(" ".join(" ".join(map(str,l)) for l in responses))
return ' '.join(str(e) for e in output)
class Frame():
"""
Represents a frame, in order to prepare the injection frames
"""
def __init__(self, start, duration, ifo, number = -1):
"""
Parameters
----------
number : int
The frame's number within the project. Defaults to -1.
"""
self.start = start
self.duration = duration
self.end = self.start + duration
self.ifos = ifo
self.number = -1
def __repr__(self):
out = ''
out += "MDC Frame \n"
for ifo in self.ifos:
out += "{} {} {} \n".format(ifo, self.start, self.duration)
return out
def get_rowlist(self,mdcs):
"""
Return the rows from an MDC set which correspond to this frame.
Parameters
----------
mdcs : MDCSet object
The set of MDCs from which the rows are to be found.
"""
return np.where((mdcs.times<self.end)&(mdcs.times>self.start))[0]
def calculate_n_injections(self, mdcs):
return len(mdcs.times[(mdcs.times<self.end)&(mdcs.times>self.start)])
def generate_log(self,mdc):
log = '# GravEn_SimID SimHrss SimEgwR2 GravEn_Ampl Internal_x Internal_phi External_x External_phi External_psi FrameGPS EarthCtrGPS SimName SimHpHp SimHcHc SimHpHc H1 H1ctrGPS H1fPlus H1fCross L1 L1ctrGPS L1fPlus L1fCross\n'
rowlist = self.get_rowlist(mdc)
for row in rowlist:
log += mdc.gravEn_row(row, self)
log += "\n"
return log
def generate_gwf(self, mdc, directory, project = "Minke", channel="SCIENCE", force=False, rate=16384.0):
"""
Produce the gwf file which corresponds to the MDC set over the period of this frame.
Parameters
----------
mdc : MDCSet object
The MDC set which should be used to produce this frame.
directory : str
The root directory where all of the frames are to be stored, for example
"/home/albert.einstein/data/mdc/frames/"
would cause the SineGaussian injections to be made in the directories under
"/home/albert.einstein/data/mdc/frames/sg"
project : str
The name of the project which this frame is a part of. Defaults to 'Minke'.
channel : str
The name of the channel which the injections should be made into. This is prepended by the initials
for each interferometer, so there will be a channel for each interferometer in the gwf.
force : bool
If true this forces the recreation of a GWF file even if it already exists.
Outputs
-------
gwf
The GWF file for this frame.
"""
ifosstr = "".join(set(ifo[0] for ifo in self.ifos))
family = mdc.waveforms[0].waveform
epoch = lal.LIGOTimeGPS(self.start)
filename = "{}-{}-{}-{}.gwf".format(ifosstr, family, self.start, self.duration)
self.frame = lalframe.FrameNew(epoch = epoch,
duration = self.duration, project='', run=1, frnum=1,
detectorFlags=lal.LALDETECTORTYPE_ABSENT)
ifobits = np.array([getattr(lal,"{}_DETECTOR_BIT".format(lal.cached_detector_by_prefix[ifo].frDetector.name.upper()))
for ifo in self.ifos])
ifoflag = numpy.bitwise_or.reduce(ifobits)
RUN_NUM = -1 # Simulated data should have a negative run number
head_date = str(self.start)[:5]
frameloc = directory+"/"+mdc.directory_path()+"/"+head_date+"/"
mkdir(frameloc)
if not os.path.isfile(frameloc + filename) or force:
epoch = lal.LIGOTimeGPS(self.start)
frame = lalframe.FrameNew(epoch, self.duration, project, RUN_NUM, self.number, ifoflag)
data = []
# Loop through each interferometer
for ifo in self.ifos:
# Calculate the number of samples in the timeseries
nsamp = int((self.end-self.start)*rate)
# Make the timeseries
h_resp = lal.CreateREAL8TimeSeries("{}:{}".format(ifo, channel), epoch, 0, 1.0/rate, lal.StrainUnit, nsamp)
# Loop over all of the injections corresponding to this frame
rowlist = self.get_rowlist(mdc)
if len(rowlist)==0: return
for row in rowlist:
sim_burst = mdc.waveforms[row]._row()
if sim_burst.hrss > 1:
distance = sim_burst.amplitude
else:
distance = None
#hp, hx = lalburst.GenerateSimBurst(sim_burst, 1.0/rate);
hp, hx, _, _ = mdc.waveforms[row]._generate(rate=rate, half=True, distance=distance)
# Apply detector response
det = lalsimulation.DetectorPrefixToLALDetector(ifo)
# Produce the total strains
h_tot = lalsimulation.SimDetectorStrainREAL8TimeSeries(hp, hx,
sim_burst.ra, sim_burst.dec, sim_burst.psi, det)
# Inject the waveform into the overall timeseries
lalsimulation.SimAddInjectionREAL8TimeSeries(h_resp, h_tot, None)
lalframe.FrameAddREAL8TimeSeriesSimData(frame, h_resp)
# Make the directory in which to store the files
# if it doesn't exist already
mkdir(frameloc)
# Write out the frame file
lalframe.FrameWrite(frame, frameloc+filename)
class HWInj(Frame):
"""
Represents a hardware injection frame.
Injection frames must be an ASCII file of the hoft sampled at
the antenna sampling rate, appropriately convolved with an
antenna response function.
As a result of the simplicity of this specific output format
we do not need information such as start-time in the file itself,
however we should have a sensible naming scheme for the ASCII files
since they will need to be produced as sidecars for an xml file.
"""
def __init__(self, ifos):
"""We'll need to know the start-time, the duration, and the ifo
for each which is to be used for hardware injections in order
to keep consistency with the data in the xml file, and so that the
appropriate waveform is injected into the appropriate detector.
Parameters
----------
ifos : list
The name of the interferometers, e.g. "L1" for the Livingston, LA LIGO detector.
"""
self.ifos = ifos
def __repr__(self):
"""
The printable representation of this object.
"""
out = ""
out += "Hardware MDC Frame \n"
for ifo in self.ifos:
out += "{} \n".format(ifo)
return out
def generate_pcal(self, mdc, directory, force = False, rate=16384):
"""
Produce the PCAL-ready hardware injection files as an ASCII list
sampled at the detector's sample rate.
Parameters
----------
mdc : MDCSet object
The signal set which should be used to generate the frame.
directory : str
The root directory where all of the frames are to be stored, for example
"/home/albert.einstein/data/mdc/frames/"
would cause the SineGaussian injections to be made in the directories under
"/home/albert.einstein/data/mdc/frames/sg"
force : bool
If true this forces the regeneration of the file, even if it
already exists.
Outputs
-------
ascii file
The ASCII file containing the correctly sampled waveform convolved with
the antenna pattern.
"""
family = mdc.waveforms[0].waveform
frameloc = os.path.join(directory, (mdc.directory_path()))
#rowlist = self.get_rowlist(mdc)
# Unlike with a conventional frame, we need to produce a separate file
# for each IFO.
for ifo in self.ifos:
for sim_burst in mdc.waveforms:
#sim_burst = mdc.waveforms[row]
# Check if the file exists, or if we're forcing the creation
filename = "{}_{}_{}.txt".format(family,
sim_burst.time,
ifo)
if not os.path.isfile(frameloc + filename) or force:
data = []
epoch = lal.LIGOTimeGPS(sim_burst.time)
duration = 10
nsamp = duration*rate
h_tot = sim_burst._generate_for_detector([ifo], sample_rate=rate)
data = np.array(h_tot.data.data)
np.savetxt(filename, data)
class HWFrameSet():
def __init__(self, ifos=["H1", "L1"]):
"""
A collection of hardware injection frames.
Parameters
----------
frame_list : str
The filespath of a CSV file containing the list of frames,
and the parameters required to produce them: the start and
duration times, and the interferometers they describe.
"""
self.frames = []
self.frames = [HWInj(ifos)]
#self.frames.append(frame)
def full_frameset(self, mdc, directory, force=False):
"""
Produce the gwf files which corresponds to the MDC set over the period of the frames in this collection.
Parameters
----------
mdc : MDCSet object
The MDC set which should be used to produce this frame.
directory : str
The root directory where all of the frames are to be stored, for example
"/home/albert.einstein/data/mdc/frames/"
would cause the SineGaussian injections to be made in the directories under
"/home/albert.einstein/data/mdc/frames/sg"
force : bool
If true this forces the recreation of a GWF file even if it already exists.
Outputs
-------
ascii files
The ASCII files for these hardware injections.
"""
for frame in self.frames:
frame.generate_pcal(mdc, directory, force)
class FrameSet():
def __init__(self, frame_list):
"""
A collection of frames.
Parameters
----------
frame_list : str
The filespath of a CSV file containing the list of frames,
and the parameters required to produce them: the start and
duration times, and the interferometers they describe.
"""
self.frames = []
self.frame_list = frame_list = pd.read_csv(frame_list)
for frame in frame_list.iterrows():
frame = frame[1]
ifos = frame['ifo'].replace("['",'').replace("']",'').replace("'",'').split(' ')
frame = Frame(frame['start time'],frame['duration'],ifos)
self.frames.append(frame)
def full_frameset(self, mdc, directory, channel="SCIENCE", force=False):
"""
Produce the gwf files which corresponds to the MDC set over the period of the frames in this collection.
Parameters
----------
mdc : MDCSet object
The MDC set which should be used to produce this frame.
directory : str
The root directory where all of the frames are to be stored, for example
"/home/albert.einstein/data/mdc/frames/"
would cause the SineGaussian injections to be made in the directories under
"/home/albert.einstein/data/mdc/frames/sg"
channel : str
The name of the channel which the injections should be made into. This is prepended by the initials
for each interferometer, so there will be a channel for each interferometer in the gwf.
force : bool
If true this forces the recreation of a GWF file even if it already exists.
Outputs
-------
gwf files
The GWF files for these frames.
"""
for frame in self.frames:
frame.generate_gwf(mdc, directory, channel, force)
def full_logfile(self, mdc, location):
"""
Produce a log file for the entire frame set
"""
full_log = ''
for frame in self.frames:
full_log += frame.generate_log(mdc)
with open(location, "w") as text_file:
text_file.write(full_log)
| isc | -1,914,375,457,587,391,000 | 35.687104 | 291 | 0.54953 | false |
niutool/niuforum | forum/utils.py | 1 | 3033 | import re
from PIL import Image, ImageOps
from io import BytesIO
from django.contrib.auth.models import User
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.http import HttpResponseForbidden
from django.shortcuts import get_object_or_404
from django.core.files.uploadedfile import SimpleUploadedFile
from forum.models import Topic
MENTION_REGEX = re.compile(r'@(\w+)', re.M)
IMAGE_LARGE = 144
IMAGE_MEDIUM = 96
IMAGE_SMALL = 48
NUM_PER_PAGE = 20
def _thumbnail(upload, size, fmt):
img = ImageOps.fit(upload, size, Image.ANTIALIAS)
temp = BytesIO()
img.save(temp, fmt, quality=95)
temp.seek(0)
return temp
def create_thumbnail(src, new_name, ext):
upload = Image.open(BytesIO(src.read()))
fmt = src.content_type.split('/')[-1]
large = _thumbnail(upload, (IMAGE_LARGE, IMAGE_LARGE), fmt)
filename_l = "%s_l.%s" % (new_name, ext)
large_file = SimpleUploadedFile(filename_l, large.read(), content_type=src.content_type)
medium = _thumbnail(upload, (IMAGE_MEDIUM, IMAGE_MEDIUM), fmt)
filename_m = "%s_m.%s" % (new_name, ext)
medium_file = SimpleUploadedFile(filename_m, medium.read(), content_type=src.content_type)
small = _thumbnail(upload, (IMAGE_SMALL, IMAGE_SMALL), fmt)
filename_s = "%s_s.%s" % (new_name, ext)
small_file = SimpleUploadedFile(filename_s, small.read(), content_type=src.content_type)
return large_file, medium_file, small_file
def get_pagination(current_page, num_pages, count):
page_list = []
show_pages = 2*count+1
if show_pages >= num_pages:
page_list.extend(range(1, num_pages+1))
elif current_page - count < 1:
page_list.extend(range(1, show_pages+1))
elif current_page + count > num_pages:
page_list.extend(range(num_pages+1-show_pages, num_pages+1))
else:
page_list.extend(range(current_page-count, current_page+count+1))
return page_list
def topic_pagination(page, topics):
paginator = Paginator(topics, NUM_PER_PAGE)
try:
topic_list = paginator.page(page)
except PageNotAnInteger:
topic_list = paginator.page(1)
except EmptyPage:
topic_list = paginator.page(paginator.num_pages)
page_list = get_pagination(topic_list.number, paginator.num_pages, 2)
return topic_list, page_list
def author_required(view_func):
def _wrapped_view_func(request, *args, **kwargs):
topic_id = kwargs.get('topic_id')
topic = get_object_or_404(Topic, id=topic_id)
if topic.author == request.user:
return view_func(request, *args, **kwargs)
else:
return HttpResponseForbidden()
return _wrapped_view_func
def get_metioned_user(sender, markdown):
mentioned = set(re.findall(MENTION_REGEX, markdown)) - set([sender.username])
# mentioned = set(re.findall(MENTION_REGEX, markdown))
if mentioned:
return User.objects.filter(username__in=mentioned)
return None
| mit | 2,211,155,211,056,811,500 | 32.7 | 94 | 0.667326 | false |
rodrigorm/golimar | plugin/python/golimar/ui/vimui.py | 1 | 8614 | #!/usr/bin/env python
#
# vimui.py
#
# User Interface for Vim
import functools
import vim
class Ui:
def __init__(self, skype):
self.skype = skype
self.is_open = False
self.tabnr = None
def open(self):
if self.is_open:
return
# try:
self.messages = MessagesWindow(self, 'tabnew')
self.messages.create()
self.tabnr = vim.eval('tabpagenr()')
self.friends = FriendsWindow(self, 'vertical belowright new')
self.friends.create()
vim.command('vertical resize 40')
self.chats = ChatsWindow(self, 'belowright new')
self.chats.create()
vim.command('resize +5')
self.messages.focus()
self.compose = ComposeWindow(self, 'rightbelow new')
self.compose.create()
vim.command('resize 5')
self.is_open = True
self.update()
# except Exception as e:
# self.is_open = False
# raise e
def composeMessage(self):
return self.compose.message()
def composeClean(self):
self.compose.clean()
def setChat(self, chat):
self.messages.setChat(chat)
if self.has_focus():
self.messages.markAsSeen()
self.compose.clean()
self.compose.focus()
self.update()
def render(self):
self.friends.update()
self.chats.update()
self.messages.update()
def bind(self):
self.skype.RegisterEventHandler('MessageStatus', self.MessageStatus)
self.skype.RegisterEventHandler('UserStatus', self.UserStatus)
self.skype.RegisterEventHandler('ConnectionStatus', self.UserStatus)
def unbind(self):
self.skype.UnregisterEventHandler('MessageStatus', self.MessageStatus)
self.skype.UnregisterEventHandler('UserStatus', self.UserStatus)
self.skype.UnregisterEventHandler('ConnectionStatus', self.UserStatus)
def MessageStatus(self, message, status):
self.update()
def UserStatus(self, status):
self.update()
def update(self):
self.unbind()
self.render()
if self.has_focus():
self.messages.markAsSeen()
self.bind()
def has_focus(self):
return self.is_open and vim.eval('tabpagenr()') == self.tabnr
def selectedFriend(self):
return self.friends.selected()
def selectedChat(self):
return self.chats.selected()
class Window:
name = 'WINDOW'
open_cmd = 'new'
buftype = 'nofile'
def __init__(self, ui, open_cmd):
self.buffer = None
self.ui = ui
self.open_cmd = open_cmd
self.is_open = False
def create(self):
""" create window """
vim.command('silent %s %s' % (self.open_cmd, self.name))
vim.command('setlocal buftype=%s modifiable winfixheight winfixwidth\
nobackup noswapfile' % (self.buftype))
self.buffer = vim.current.buffer
self.is_open = True
self.on_create()
def on_create(self):
""" callback """
def clean(self):
if self.buffer_empty():
return
self.buffer[:] = []
def write(self, msg, return_focus=True, after='normal G'):
self._return_focus(self.__curry(self._write, msg, after), return_focus)
def _write(self, msg, after='normal G'):
if not self.is_open:
self.create()
if self.buffer_empty():
self.buffer[:] = str(msg).split('\n')
else:
self.buffer.append(str(msg).split('\n'))
self.command(after)
def buffer_empty(self):
if len(self.buffer) == 1 \
and len(self.buffer[0]) == 0:
return True
else:
return False
def command(self, cmd):
""" go to my window & execute command """
winnr = self.getwinnr()
if winnr != int(vim.eval("winnr()")):
vim.command(str(winnr) + 'wincmd w')
vim.command(cmd)
def getwinnr(self):
return int(vim.eval("bufwinnr('"+self.name+"')"))
def set_line(self, lineno, return_focus=True):
self._return_focus(self.__curry(self._set_line, lineno), return_focus)
def _set_line(self, lineno):
self.focus()
vim.command("normal %sgg" % str(lineno))
def get_line(self):
return int(self._return_focus(self.__curry(self._get_line), True))
def _get_line(self):
self.focus()
return vim.current.range.start
def eval(self, cmd):
return self._return_focus(self.__curry(self._eval, cmd), True)
def _eval(self, cmd):
self.focus()
return vim.eval(cmd)
def focus(self):
vim.command(str(self.winnr()) + "wincmd w")
def winnr(self):
return int(vim.eval("bufwinnr('" + self.name + "')"))
def _return_focus(self, callback, flag=True):
if flag:
return self.__return_focus(callback)
else:
return callback()
def __return_focus(self, callback):
prev_win = vim.eval('winnr()')
result = callback()
vim.command('%swincmd W' % prev_win)
return result
def __curry(self, callback, *args):
return functools.partial(callback, *args)
class FriendsWindow(Window):
name = "Friends"
def on_create(self):
self.update()
vim.command('\
nnoremap <buffer> <cr> :python golimar.openSelectedFriend()\
<cr>')
vim.command('set filetype=golimarfriends')
def update(self):
self.clean()
for user in self.ui.skype.Friends:
self.write('(%s) %s' % (user.OnlineStatus, user.Handle))
self.set_line(0)
def selected(self):
return self.ui.skype.Friends[self.get_line()]
class ChatsWindow(Window):
name = "Chats"
def on_create(self):
self.update()
vim.command('nnoremap <buffer> <cr> :python golimar.openSelectedChat()<cr>')
def update(self):
self.clean()
for chat in self.ui.skype.RecentChats:
self.write(self._topic(chat) + self._unseen(chat))
self.set_line(0)
def _topic(self, chat):
if chat.Topic == '':
for member in chat.Members:
if member.Handle != self.ui.skype.CurrentUser.Handle:
return member.Handle
else:
return chat.Topic.encode('utf-8')
def _unseen(self, chat):
count = self.unseenCount(chat)
if count:
return ' [%i]' % (count)
return ''
def unseenCount(self, chat):
result = 0
for message in chat.RecentMessages:
if message.Status == 'RECEIVED':
result += 1
return result
def selected(self):
return self.ui.skype.RecentChats[self.get_line()]
class MessagesWindow(Window):
name = 'Skype'
def on_create(self):
self.chat = None
vim.command('set filetype=golimarchat')
def setChat(self, chat):
self.chat = chat
self.update()
def update(self):
self.clean()
if self.chat is None:
return
biggerName = 0
for message in self.chat.RecentMessages:
if len(message.FromHandle) > biggerName:
biggerName = len(message.FromHandle)
biggerName += 2
width = self.width()
pad = 21 + 1 + biggerName + 1 + 1
for message in self.chat.RecentMessages:
datetime = str(message.Datetime)
userFrom = '%s' % (message.FromHandle)
userFrom = userFrom.rjust(biggerName)
body = self.__body(message.Body.encode('utf-8'), width, pad)
self.write('[%s] %s: %s' % (datetime, userFrom, body))
def __body(self, body, width, pad):
lines = str(body).split('\n')
result = []
for line in lines:
result.extend(self.__split_str_into_len(str(line), width - pad - 4))
return ('\n' + (' ' * pad)).join(result)
def width(self):
return int(self.eval('winwidth(0)'))
def __split_str_into_len(self, s, l=2):
""" Split a string into chunks of length l """
return [s[i:i+l] for i in range(0, len(s), l)]
def markAsSeen(self):
if self.chat is None:
return
for message in self.chat.RecentMessages:
if message.Status == 'RECEIVED':
message.MarkAsSeen()
class ComposeWindow(Window):
name = 'Compose'
buftype = 'acwrite'
def message(self):
return '\n'.join(self.buffer)
| gpl-2.0 | -7,399,287,538,600,110,000 | 25.262195 | 84 | 0.565823 | false |
DmitriySalnikov/godot | modules/mono/build_scripts/godot_net_sdk_build.py | 11 | 1755 | # Build Godot.NET.Sdk solution
import os
from SCons.Script import Dir
def build_godot_net_sdk(source, target, env):
# source and target elements are of type SCons.Node.FS.File, hence why we convert them to str
module_dir = env["module_dir"]
solution_path = os.path.join(module_dir, "editor/Godot.NET.Sdk/Godot.NET.Sdk.sln")
build_config = "Release"
from .solution_builder import build_solution
extra_msbuild_args = ["/p:GodotPlatform=" + env["platform"]]
build_solution(env, solution_path, build_config, extra_msbuild_args)
# No need to copy targets. The Godot.NET.Sdk csproj takes care of copying them.
def get_nupkgs_versions(props_file):
import xml.etree.ElementTree as ET
tree = ET.parse(props_file)
root = tree.getroot()
return {
"Godot.NET.Sdk": root.find("./PropertyGroup/PackageVersion_Godot_NET_Sdk").text.strip(),
"Godot.SourceGenerators": root.find("./PropertyGroup/PackageVersion_Godot_SourceGenerators").text.strip(),
}
def build(env_mono):
assert env_mono["tools"]
output_dir = Dir("#bin").abspath
editor_tools_dir = os.path.join(output_dir, "GodotSharp", "Tools")
nupkgs_dir = os.path.join(editor_tools_dir, "nupkgs")
module_dir = os.getcwd()
nupkgs_versions = get_nupkgs_versions(os.path.join(module_dir, "SdkPackageVersions.props"))
target_filenames = [
"Godot.NET.Sdk.%s.nupkg" % nupkgs_versions["Godot.NET.Sdk"],
"Godot.SourceGenerators.%s.nupkg" % nupkgs_versions["Godot.SourceGenerators"],
]
targets = [os.path.join(nupkgs_dir, filename) for filename in target_filenames]
cmd = env_mono.CommandNoCache(targets, [], build_godot_net_sdk, module_dir=module_dir)
env_mono.AlwaysBuild(cmd)
| mit | 5,912,477,984,069,791,000 | 30.909091 | 114 | 0.688889 | false |
Kloudless/kloudless-python | kloudless/http.py | 1 | 4038 | from .util import logger
from . import config
from . import exceptions
import functools
import json
import time
import six
from abc import ABCMeta, abstractproperty
from requests.structures import CaseInsensitiveDict
class BaseAuth:
__metaclass__ = ABCMeta
scheme = abstractproperty()
def __init__(self, key):
self.key = key
@property
def auth_header(self):
return '%s %s' % (self.scheme, self.key)
def __call__(self, request):
request.headers['Authorization'] = self.auth_header
return request
class APIKeyAuth(BaseAuth):
scheme = 'APIKey'
class DevKeyAuth(BaseAuth):
scheme = 'DeveloperKey'
class BearerTokenAuth(BaseAuth):
scheme = 'Bearer'
_get_requestor = functools.partial
def request(method, path, configuration=None, **kwargs):
if configuration is None: configuration = {}
configuration = config.merge(configuration)
if path.startswith('applications'):
if not configuration['dev_key']:
raise exceptions.ConfigurationException(
"A Developer Key must be provided. You can get one at "
"https://developers.kloudless.com and set it by calling "
"'kloudless.configure(dev_key=\"DEV_KEY\")' prior to making "
"requests.")
kwargs['auth'] = DevKeyAuth(configuration['dev_key'])
elif configuration['api_key']:
kwargs['auth'] = APIKeyAuth(configuration['api_key'])
elif configuration['token']:
kwargs['auth'] = BearerTokenAuth(configuration['token'])
else:
raise exceptions.ConfigurationException(
"An API Key or Bearer Token must be provided. You can get an API Key at "
"https://developers.kloudless.com and set it by calling "
"'kloudless.configure(api_key=\"API_KEY\")' prior to making "
"requests. You can get a Bearer token by authenticating an account and "
"set it by calling 'kloudless.configure(token=\"TOKEN\")' as well.")
url = "%s/v%s/%s" % (configuration['base_url'],
configuration['api_version'],
path)
headers = kwargs['headers'] = CaseInsensitiveDict(kwargs.get('headers') or {})
# Set default headers if not present
for header_key, header_val in six.iteritems(configuration.get('headers') or {}):
if header_val is not None and header_key not in headers:
headers[header_key] = header_val
# Set content type
if kwargs.get('data'):
ctype = headers.setdefault('Content-Type', 'application/json')
if ctype.lower() == 'application/json':
kwargs['data'] = json.dumps(kwargs['data'])
# Make request
requestor = _get_requestor(method, url, **kwargs)
response = _request(requestor, configuration)
return response
def _request(requestor, configuration):
response = requestor()
if not response.ok:
logger.error("Request to '%s' failed: %s - %s" %
(response.url, response.status_code, response.text))
if response.status_code == 403:
raise exceptions.AuthenticationException(response=response)
elif response.status_code == 401:
raise exceptions.AuthorizationException(response=response)
elif response.status_code == 429:
throttle_obj = configuration.get('throttle_retry_strategy')
if not throttle_obj:
raise exceptions.RateLimitException(response=response)
delay = throttle_obj.track_and_delay(response)
if delay is not None:
time.sleep(delay)
return _request(requestor, configuration)
elif response.status_code >= 500:
raise exceptions.ServerException(response=response)
else:
raise exceptions.APIException(response=response)
else:
logger.debug("Request to '%s' succeeded. Status code: %s" %
(response.url, response.status_code))
return response
| mit | -1,713,365,648,381,834,800 | 33.810345 | 85 | 0.631748 | false |
epam/DLab | infrastructure-provisioning/src/general/scripts/aws/edge_start.py | 1 | 3048 | #!/usr/bin/python
# *****************************************************************************
#
# Copyright (c) 2016, EPAM SYSTEMS INC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ******************************************************************************
from dlab.fab import *
from dlab.actions_lib import *
import sys
if __name__ == "__main__":
local_log_filename = "{}_{}_{}.log".format(os.environ['conf_resource'], os.environ['edge_user_name'],
os.environ['request_id'])
local_log_filepath = "/logs/" + os.environ['conf_resource'] + "/" + local_log_filename
logging.basicConfig(format='%(levelname)-8s [%(asctime)s] %(message)s',
level=logging.DEBUG,
filename=local_log_filepath)
# generating variables dictionary
create_aws_config_files()
print('Generating infrastructure names and tags')
edge_conf = dict()
edge_conf['service_base_name'] = os.environ['conf_service_base_name']
edge_conf['instance_name'] = edge_conf['service_base_name'] + "-" + os.environ['edge_user_name'] + '-edge'
edge_conf['tag_name'] = edge_conf['service_base_name'] + '-Tag'
logging.info('[START EDGE]')
print('[START EDGE]')
try:
start_ec2(edge_conf['tag_name'], edge_conf['instance_name'])
except Exception as err:
append_result("Failed to start edge.", str(err))
sys.exit(1)
except:
sys.exit(1)
try:
instance_hostname = get_instance_hostname(edge_conf['tag_name'], edge_conf['instance_name'])
addresses = get_instance_ip_address(edge_conf['tag_name'], edge_conf['instance_name'])
ip_address = addresses.get('Private')
public_ip_address = addresses.get('Public')
print('[SUMMARY]')
logging.info('[SUMMARY]')
print("Instance name: {}".format(edge_conf['instance_name']))
print("Hostname: {}".format(instance_hostname))
print("Public IP: {}".format(public_ip_address))
print("Private IP: {}".format(ip_address))
with open("/root/result.json", 'w') as result:
res = {"instance_name": edge_conf['instance_name'],
"hostname": instance_hostname,
"public_ip": public_ip_address,
"ip": ip_address,
"Action": "Start up notebook server"}
print(json.dumps(res))
result.write(json.dumps(res))
except:
print("Failed writing results.")
sys.exit(0)
| apache-2.0 | -5,064,682,080,044,262,000 | 39.64 | 110 | 0.580052 | false |
njwilson23/rasterio | rasterio/tool.py | 1 | 5429 | """
Implementations of various common operations, like `show()` for displaying an
array or with matplotlib, and `stats()` for computing min/max/avg. Most can
handle a numpy array or `rasterio.Band()`. Primarily supports `$ rio insp`.
"""
from __future__ import absolute_import
import code
import collections
import logging
import warnings
try:
import matplotlib.pyplot as plt
except ImportError:
plt = None
except RuntimeError as e:
# Certain environment configurations can trigger a RuntimeError like:
# Trying to import matplotlibRuntimeError: Python is not installed as a
# framework. The Mac OS X backend will not be able to function correctly
# if Python is not installed as a framework. See the Python ...
warnings.warn(str(e), RuntimeWarning, stacklevel=2)
plt = None
import numpy
import rasterio
from rasterio.five import zip_longest
logger = logging.getLogger('rasterio')
Stats = collections.namedtuple('Stats', ['min', 'max', 'mean'])
# Collect dictionary of functions for use in the interpreter in main()
funcs = locals()
def show(source, cmap='gray', with_bounds=True):
"""
Display a raster or raster band using matplotlib.
Parameters
----------
source : array-like or (raster dataset, bidx)
If array-like, should be of format compatible with
matplotlib.pyplot.imshow. If the tuple (raster dataset, bidx),
selects band `bidx` from raster.
cmap : str (opt)
Specifies the colormap to use in plotting. See
matplotlib.Colors.Colormap. Default is 'gray'.
with_bounds : bool (opt)
Whether to change the image extent to the spatial bounds of the image,
rather than pixel coordinates. Only works when source is
(raster dataset, bidx).
"""
if isinstance(source, tuple):
arr = source[0].read(source[1])
xs = source[0].res[0] / 2.
ys = source[0].res[1] / 2.
if with_bounds:
extent = (source[0].bounds.left - xs, source[0].bounds.right - xs,
source[0].bounds.bottom - ys, source[0].bounds.top - ys)
else:
extent = None
else:
arr = source
extent = None
if plt is not None:
imax = plt.imshow(arr, cmap=cmap, extent=extent)
fig = plt.gcf()
fig.show()
else:
raise ImportError("matplotlib could not be imported")
def stats(source):
"""Return a tuple with raster min, max, and mean.
"""
if isinstance(source, tuple):
arr = source[0].read(source[1])
else:
arr = source
return Stats(numpy.min(arr), numpy.max(arr), numpy.mean(arr))
def show_hist(source, bins=10, masked=True, title='Histogram'):
"""
Easily display a histogram with matplotlib.
Parameters
----------
bins : int, optional
Compute histogram across N bins.
data : np.array or rasterio.Band or tuple(dataset, bidx)
Input data to display. The first three arrays in multi-dimensional
arrays are plotted as red, green, and blue.
masked : bool, optional
When working with a `rasterio.Band()` object, specifies if the data
should be masked on read.
title : str, optional
Title for the figure.
"""
if plt is None:
raise ImportError("Could not import matplotlib")
if isinstance(source, (tuple, rasterio.Band)):
arr = source[0].read(source[1], masked=masked)
else:
arr = source
# The histogram is computed individually for each 'band' in the array
# so we need the overall min/max to constrain the plot
rng = arr.min(), arr.max()
if len(arr.shape) is 2:
arr = [arr]
colors = ['gold']
else:
colors = ('red', 'green', 'blue', 'violet', 'gold', 'saddlebrown')
# If a rasterio.Band() is given make sure the proper index is displayed
# in the legend.
if isinstance(source, (tuple, rasterio.Band)):
labels = [str(source[1])]
else:
labels = (str(i + 1) for i in range(len(arr)))
# This loop should add a single plot each band in the input array,
# regardless of if the number of bands exceeds the number of colors.
# The colors slicing ensures that the number of iterations always
# matches the number of bands.
# The goal is to provide a curated set of colors for working with
# smaller datasets and let matplotlib define additional colors when
# working with larger datasets.
for bnd, color, label in zip_longest(arr, colors[:len(arr)], labels):
plt.hist(
bnd.flatten(),
bins=bins,
alpha=0.5,
color=color,
label=label,
range=rng
)
plt.legend(loc="upper right")
plt.title(title, fontweight='bold')
plt.grid(True)
plt.xlabel('DN')
plt.ylabel('Frequency')
fig = plt.gcf()
fig.show()
def main(banner, dataset, alt_interpreter=None):
""" Main entry point for use with python interpreter """
local = dict(funcs, src=dataset, np=numpy, rio=rasterio, plt=plt)
if not alt_interpreter:
code.interact(banner, local=local)
elif alt_interpreter == 'ipython':
import IPython
IPython.InteractiveShell.banner1 = banner
IPython.start_ipython(argv=[], user_ns=local)
else:
raise ValueError("Unsupported interpreter '%s'" % alt_interpreter)
return 0
| bsd-3-clause | 3,904,077,660,816,779,300 | 30.201149 | 78 | 0.63695 | false |
MixedEmotions/27_emotion_video_dcu | emotionService/emotionService.py | 1 | 5891 |
# coding: utf-8
# In[ ]:
from __future__ import division
import logging
import os
import xml.etree.ElementTree as ET
from senpy.plugins import EmotionPlugin, SenpyPlugin
from senpy.models import Results, EmotionSet, Entry, Emotion, Error
logger = logging.getLogger(__name__)
import numpy as np
import math, itertools
from collections import defaultdict
import gzip
from datetime import datetime
import requests, shutil
import subprocess
import sys
import validators
from haolin.ESClass import DCU_EmotionService
import json
class emotionService(EmotionPlugin):
def __init__(self, info, *args, **kwargs):
super(emotionService, self).__init__(info, *args, **kwargs)
self.name = info['name']
self.id = info['module']
self._info = info
local_path = os.path.dirname(os.path.abspath(__file__))
self._dimensions = ['V','A']
self._centroid_mappings = {
"V": "http://www.gsi.dit.upm.es/ontologies/onyx/vocabularies/anew/ns#valence",
"A": "http://www.gsi.dit.upm.es/ontologies/onyx/vocabularies/anew/ns#arousal",
"D": "http://www.gsi.dit.upm.es/ontologies/onyx/vocabularies/anew/ns#dominance"
}
self._storage_path = '/senpy-plugins/tmp'
def activate(self, *args, **kwargs):
st = datetime.now()
self._predictor = DCU_EmotionService()
logger.info("{} {}".format(datetime.now() - st, "predictor loaded"))
st = datetime.now()
logger.info("{} {}".format(datetime.now() - st, "active"))
logger.info("%s plugin is ready to go!" % self.name)
def deactivate(self, *args, **kwargs):
try:
logger.info("%s plugin is being deactivated..." % self.name)
except Exception:
print("Exception in logger while reporting deactivation of %s" % self.name)
# CUSTOM FUNCTION
def _download_file_old(self, saveFolder = '/senpy-plugins/tmp', url = "http://mixedemotions.insight-centre.org/tmp/little-girl.mp4"):
logger.info("{} {}".format(datetime.now(), "downloading "+url))
st = datetime.now()
global dump
downloadedFile = requests.get(url, stream=True)
dump = downloadedFile.raw
path, filename = os.path.dirname(url), os.path.basename(url)
with open(os.path.join(saveFolder, filename), 'wb') as file:
shutil.copyfileobj(dump, file)
del dump
del downloadedFile
logger.info("{} {}".format(datetime.now() - st, "downloaded "+url))
return os.path.join(saveFolder,filename)
def _download_file(self, saveFolder = '/senpy-plugins/tmp', url = "http://mixedemotions.insight-centre.org/tmp/little-girl.mp4"):
st = datetime.now()
logger.info("{} {}".format(datetime.now(), "downloading "+url))
path, filename = os.path.dirname(url), os.path.basename(url)
outfilename = os.path.join(saveFolder,filename)
subprocess.call(['wget', '-O', outfilename, url])
logger.info("{} {}".format(datetime.now() - st, "downloaded "+url))
return outfilename
def _remove_file(self, filename):
st = datetime.now()
logger.info("{} {}".format(datetime.now(), "deleting "+ filename))
subprocess.call(['rm', '-f', filename])
logger.info("{} {}".format(datetime.now() - st, "deleted "+filename))
def _convert_longformat_to_shortformat(self, json_long):
json_long = json.loads(json_long)
json_short = {
'V': np.mean([json_long[frame]['0']['emotion']['pad:pleasure'] for frame in json_long]) ,
'A': np.mean([json_long[frame]['0']['emotion']['pad:arousal' ] for frame in json_long])
}
return json_short
def _extract_features(self, filename, convert=True):
# predictor = DCU_EmotionService()
json_res = self._predictor.analysis_video(filename, vis=False)
if convert:
json_res = self._convert_longformat_to_shortformat(json_res)
return json_res
def analyse(self, **params):
logger.debug("emotionService with params {}".format(params))
filename = params.get("i", None)
## FILE MANIPULATIONS ------------------------------- \
if validators.url(filename):
filename = self._download_file(saveFolder = self._storage_path, url = filename)
else:
filename = os.path.join(self._storage_path,filename)
logger.info("{} {}".format(datetime.now(), filename))
if not os.path.isfile(filename):
raise Error("File %s does not exist" % filename)
## EXTRACTING FEATURES ------------------------------- \
feature_set = self._extract_features(filename, convert=True)
# self._remove_file(filename)
## GENERATING OUTPUT --------------------------------- \
response = Results()
entry = Entry()
entry['filename'] = os.path.basename(filename)
emotionSet = EmotionSet()
emotionSet.id = "Emotions"
emotion1 = Emotion()
for dimension in self._dimensions:
emotion1[ self._centroid_mappings[dimension] ] = 5*(1+feature_set[dimension])
emotionSet.onyx__hasEmotion.append(emotion1)
entry.emotions = [emotionSet,]
response.entries.append(entry)
return response
| apache-2.0 | -8,383,515,891,701,562,000 | 32.282486 | 137 | 0.55101 | false |
muccc/luftschleuse2 | software/lockd/announce.py | 1 | 1588 | # This file is part of lockd, the daemon of the luftschleuse2 project.
#
# See https://github.com/muccc/luftschleuse2 for more information.
#
# Copyright (C) 2013 Tobias Schneider <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import time
import socket
class Announcer:
def __init__(self, host, port):
self.timestamp = time.time()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
self.sock.bind(('0.0.0.0', 2080))
self.target = (host, port)
self.message = 'unknown'
def tick(self):
if time.time() - self.timestamp > 1:
self.announce()
self.timestamp = time.time()
def announce(self):
self.sock.sendto(self.message, self.target)
def update_state(self, state):
self.message = state.get_state_as_string()
f = open("/tmp/system_state", "w")
f.write(state.get_state_as_string() + "\n")
f.close()
| gpl-3.0 | 6,334,732,133,319,795,000 | 35.930233 | 74 | 0.65869 | false |
balloob/home-assistant | setup.py | 1 | 2249 | #!/usr/bin/env python3
"""Home Assistant setup script."""
from datetime import datetime as dt
from setuptools import find_packages, setup
import homeassistant.const as hass_const
PROJECT_NAME = "Home Assistant"
PROJECT_PACKAGE_NAME = "homeassistant"
PROJECT_LICENSE = "Apache License 2.0"
PROJECT_AUTHOR = "The Home Assistant Authors"
PROJECT_COPYRIGHT = f" 2013-{dt.now().year}, {PROJECT_AUTHOR}"
PROJECT_URL = "https://www.home-assistant.io/"
PROJECT_EMAIL = "[email protected]"
PROJECT_GITHUB_USERNAME = "home-assistant"
PROJECT_GITHUB_REPOSITORY = "core"
PYPI_URL = f"https://pypi.python.org/pypi/{PROJECT_PACKAGE_NAME}"
GITHUB_PATH = f"{PROJECT_GITHUB_USERNAME}/{PROJECT_GITHUB_REPOSITORY}"
GITHUB_URL = f"https://github.com/{GITHUB_PATH}"
DOWNLOAD_URL = f"{GITHUB_URL}/archive/{hass_const.__version__}.zip"
PROJECT_URLS = {
"Bug Reports": f"{GITHUB_URL}/issues",
"Dev Docs": "https://developers.home-assistant.io/",
"Discord": "https://discordapp.com/invite/c5DvZ4e",
"Forum": "https://community.home-assistant.io/",
}
PACKAGES = find_packages(exclude=["tests", "tests.*"])
REQUIRES = [
"aiohttp==3.7.1",
"astral==1.10.1",
"async_timeout==3.0.1",
"attrs==19.3.0",
"bcrypt==3.1.7",
"certifi>=2020.6.20",
"ciso8601==2.1.3",
"httpx==0.16.1",
"importlib-metadata==1.6.0;python_version<'3.8'",
"jinja2>=2.11.2",
"PyJWT==1.7.1",
# PyJWT has loose dependency. We want the latest one.
"cryptography==3.2.0",
"pip>=8.0.3",
"python-slugify==4.0.1",
"pytz>=2020.1",
"pyyaml==5.3.1",
"requests==2.24.0",
"ruamel.yaml==0.15.100",
"voluptuous==0.12.0",
"voluptuous-serialize==2.4.0",
"yarl==1.4.2",
]
MIN_PY_VERSION = ".".join(map(str, hass_const.REQUIRED_PYTHON_VER))
setup(
name=PROJECT_PACKAGE_NAME,
version=hass_const.__version__,
url=PROJECT_URL,
download_url=DOWNLOAD_URL,
project_urls=PROJECT_URLS,
author=PROJECT_AUTHOR,
author_email=PROJECT_EMAIL,
packages=PACKAGES,
include_package_data=True,
zip_safe=False,
install_requires=REQUIRES,
python_requires=f">={MIN_PY_VERSION}",
test_suite="tests",
entry_points={"console_scripts": ["hass = homeassistant.__main__:main"]},
)
| apache-2.0 | 612,110,318,167,927,600 | 28.592105 | 77 | 0.655402 | false |
Harhoy/transport | transport.py | 1 | 9259 | from __future__ import division
import numpy as np
import math as m
from easygui import multenterbox
import pandas as pd
import matplotlib.pyplot as plt
import math as m
def import_xl(file_path):
df = pd.read_excel(file_path,header = None)
df = df.values
return df
def export_xl(file_path,sheets):
writer = pd.ExcelWriter(file_path)
for sheet,name in sheets.items():
df = pd.DataFrame(name)
df.to_excel(writer,sheet)
writer.save()
#Henter ut en kolonne
def column(matrix, i):
return [row[i] for row in matrix]
#Henter ut en rad
def row(matrix, i):
return [column[i] for column in matrix]
#Selection sort O(n2)
def selection_sort(array):
n = len(array)
for i in range(0,n):
smallest = i
for j in range(i,n):
if array[j]<array[smallest]:
smallest = j
copy = array[i]
array[i] = array[smallest]
array[smallest] = copy
return array
#Leser om to lister inneholder minst ett felles tall
def common_node(array_1,array_2):
x = selection_sort(array_1)
y = selection_sort(array_2)
i = 0
j = 0
share = 0
stop = max([len(x),len(y)])-1
while min([i,j])< stop:
if x[i]>y[j]:
j+=1
elif x[i]<y[j]:
i+=1
else:
share = 1
j = 10**6
i = 10**6
return share
def common_node_count(array_1,array_2):
x = selection_sort(array_1)
y = selection_sort(array_2)
i = 0
j = 0
share = 0
while i < len(x) and j < len(y):
if x[i]>y[j]:
j+=1
elif x[i]<y[j]:
i+=1
else:
share += 1
j +=1
i +=1
return share
#KORTERSTE RUTE FUNKSJONER
#Lager en graf fra lenke-liste
def make_graph(array):
#nodes = common_node_count(column(array,0),column(array,1))
nodes = 35
matrix = np.full((nodes,nodes),10**6) #Lager matrise med store tall som byttes
for i in range(0,len(array)): #Hovedloop
#Trekker fra en for sammenlignbarhet med python-arrays
matrix[array[i][1]-1][array[i][0]-1] = array[i][2]
matrix[array[i][0]-1][array[i][1]-1] = array[i][2]
np.fill_diagonal(matrix, 0)
return matrix
#Lager lengdematrise n x n
def floyd_warshall(array):
matrix = make_graph(array)
#nodes = common_node_count(column(array,0),column(array,1))
nodes = 35
pred = np.full((nodes,nodes),-1)
for i in range(0,nodes):
for j in range(0,nodes):
if i != j:
pred[i][j] = i
for k in range(0,nodes):
for i in range(0,nodes):
for j in range(0,nodes):
if matrix[i][j] > matrix[i][k] + matrix[k][j]:
matrix[i][j] = matrix[i][k] + matrix[k][j]
pred[i][j] = pred[k][j]
return matrix,pred
#Laster inn data fra en csv fil til et nettverksarray
def get_network(net_csv):
graf = open(net_csv,'r')
lenker=0
for line in graf:
lenker+=1
graf_edit = np.full((lenker, 3),0)
graf = open(net_csv,'r')
k = 0
for line in graf:
stuff = line.split(";")
graf_edit[k][0] = float(stuff[0])
graf_edit[k][1] = float(stuff[1])
temp = stuff[2].split('\n')[0]
graf_edit[k][2] = float(temp)
k+=1
return graf_edit
#Lager en path-vektor
def path(p,i,j,path_vec):
if i == j:
path_vec.append(i)
else:
path(p, i, p[i][j],path_vec)
path_vec.append(j)
#Henter en spesifikk path
def get_path(p,i,j):
#j = j + 1
path_vec=[]
path(p,i,j,path_vec)
#for i in range(1,len(path_vec)):
# path_vec[i] = path_vec[i] - 1
return path_vec
#Lager adjecency-matrise (ikke ferdig)
def build_adj(pred):
adj_mat = np.zeros((len(pred),len(pred)))
array_a = []
array_b = []
for i in range(1,len(pred)):
for j in range(1,len(pred)):
array_a = get_path(pred,i,j)
print array_a
array_b = get_path(pred,2,10)
print array_b
try:
adj_mat[1][j] = common_node(array_a,array_b)
except:
adj_mat[1][j] = 0
print adj_mat[1][j]
return adj_mat
#Nettverkslaster
#Argumenter: (1) Forgjenger-matrise (2) antall noder (3) nettverksfil (4) od-matrise
def network_loader(graf,net,od,pred):
#Antall noder
n = len(od)-1
#Redigering
for k in range(0,len(net)):
net[k][3]=0 #Nulllstiller antall reiser
net[k][2]=graf[k][2] #Legger inn oppdaterteavstander fra grafen
#Legger ut reiser paa nettet
for i in range(0,n):
for j in range(0,n):
path = get_path(pred,i,j)
len_path=get_len_path(path)
for h in range(0,len_path):
for k in range(0,len(net)):
if net[k][0] == path[h]+1 and net[k][1] == path[1+h]+1:
net[k][3] += int(od[i][j])
elif net[k][1] == path[h]+1 and net[k][0] == path[1+h]+1:
net[k][3] += int(od[i][j])
return net
#a=get_path(pred,5,12)
#GRAVITASJONSFUNKSJONER
def deter_mat_make(length_mat):
deter_mat = np.zeros((len(length_mat),len(length_mat)))
for i in range(0,len(length_mat)):
for j in range(0,len(length_mat)):
deter_mat[i][j] = deter(length_mat[i][j])
return deter_mat
def deter(length):
return 2.71**(beta*length)
def sumproduct(list1,list2):
sums = 0
for i in range(0,len(list1)):
sums += list1[i]*list2[i]
return sums
def gravity(origin, destination, length_mat):
#Initialization
deter_mat = deter_mat_make(length_mat) #Lager matrise med forvitring
dimension = len(origin) #Henter ut matrisedimensjonene
alpha = [1]*(dimension) #Intitierer alpha-vektor
beta = [1]*(dimension) #Intitierer beta-vektor
largest = 10**6 #Intitierer storste avvik
alpha_last = alpha #Intitierer alpha -1
beta_last = beta #Intitierer beta -1
k = 0 #Intitierer tellevariabler for iterasjoner
iterasjoner = []
#Hovedlokke
while largest > .00001:
#Oppdaterer faktorene
for p in range(0,dimension):
alpha[p] = origin[p]/(sumproduct(beta_last,column(deter_mat,p)))
beta[p] = destination[p]/(sumproduct(alpha,row(deter_mat,p)))
largest = 0
#Looper for aa finne storste element
for j in range(0,dimension):
current = alpha[j]*sumproduct(beta,column(deter_mat,j))-origin[j]
if current>largest:
largest = current
#Setter forrige beta
beta_last = beta
iterasjoner.append(largest)
#Legger til en iterasjon
k+=1
print "Konvergens, Gravitasjonsmodell", largest
if k == maxiter:
largest = 0
return alpha,beta,k,iterasjoner
def create_od(origin,destination, length_mat):
alpha,beta,k,iterasjoner = gravity(origin, destination, length_mat)
deter_mat = deter_mat_make(length_mat)
od = np.zeros((len(origin),len(origin)))
for i in range(0,len(origin)):
for j in range(0,len(origin)):
od[i][j] = alpha[i]*beta[j]*deter_mat[i][j]
return od,alpha,beta,k,iterasjoner
def calc_pt_matrix(od,length_mat):
out_od = np.zeros((len(od),len(od)))
for i in range(0,len(od)):
for j in range(0,len(od)):
out_od[i][j] = int(out_od[i][j])*length_mat[i][j]
return out_od
def get_min(net):
smallest = 10**6
smallest_id = 10**6
for i in range(0,len(net)):
if net[i][3]/net[i][2]<smallest and net[i][5]==0:
smallest = net[i][3]/net[i][2]
smallest_id = i
return smallest_id,smallest
def change_graph(graph,net):
graph_out = graph
for i in range(0,len(net)):
if net[i][5]==1:
graph_out[i][2]=k_just*graph_out[i][2]
return graph_out
def production(net):
sumcost = 0
for i in range(0,len(net)):
if net[i][5]!=1:
sumcost += (net[i][3]/capacity)*net[i][2]
return sumcost
def sum_pass(net):
sumpass = 0
for i in range(0,len(net)):
sumpass+=net[i][3]
return sumpass
def get_len_path(path):
len_path = 0
if len(path) < 3:
len_path = 0
elif len(path) == 3:
len_path = 2
else:
len_path=int(len(path)/2)+int(len(path)%2)+1
return len_path
def obj(od,length_mat,net,prodgoal):
return (production(net)*kmk*dogn-prodgoal)**2*(k_just-1)*capacity/.9+time_cost(od,length_mat)
def time_cost(od,length_mat):
cost = 0
for i in range(0,len(od)-1):
for j in range(0,len(od)-1):
cost += od[i][j]*length_mat[i][j]
return cost
def get_zero_net(net):
zero_net = np.zeros((len(net),6))
for i in range(0,len(net)):
zero_net[i][2] = net[i][2]
zero_net[i][3] = net[i][3]
zero_net[i][5] = net[i][5]
return zero_net
def update_zero_net(net,zero_net):
for i in range(0,len(net)):
zero_net[i][5] = net[i][5]
return zero_net
| mit | -5,535,627,304,960,529,000 | 26.804805 | 97 | 0.550491 | false |
cvegaj/ElectriCERT | venv3/lib/python3.6/site-packages/cert_schema/model.py | 1 | 12691 | """
class ProofType(Enum):
merkle_proof_2017 = 0
class SignatureType(Enum):
signed_content = 0,
signed_transaction = 1
Signature
---------
|
|-- EmbeddedSignature: signs "contents" directly
|
|-- TransactionSignature: "contents" are embedded in transaction. Merkle proof for multiple
"""
import re
import sys
import pytz
from dateutil.parser import parse
from cert_schema import *
V1_1_REGEX = re.compile('[0-9a-fA-F]{24}')
V1_2_REGEX = re.compile('[0-9a-fA-F]{8}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{12}')
USE_PREFIX = False
def scope_name(name):
"""
TBD whether we want to include prefix. Doing this for now. Default is no prefix
:param name:
:return:
"""
if USE_PREFIX:
return BLOCKCERTS_PREFIX + name
else:
return name
class SignatureLine(object):
def __init__(self, image, name=None, job_title=None):
self.image = image
self.name = name
self.job_title = job_title
class ProofType(Enum):
merkle_proof_2017 = 0
class SignatureType(Enum):
signed_content = 0,
signed_transaction = 1
class Signature(object):
def __init__(self, signature_type, content_to_verify):
self.signature_type = signature_type
self.content_to_verify = content_to_verify
class TransactionSignature(Signature):
"""
Content is embedded in transaction in some manner
"""
def __init__(self, content_to_verify, transaction_id, merkle_proof=None):
super(TransactionSignature, self).__init__(SignatureType.signed_transaction, content_to_verify)
self.transaction_id = transaction_id
self.merkle_proof = merkle_proof
class EmbeddedSignature(Signature):
"""
Content is signed directly
"""
def __init__(self, content_to_verify, signature_value):
super(EmbeddedSignature, self).__init__(SignatureType.signed_content, content_to_verify)
self.signature_value = signature_value
class MerkleProof(object):
def __init__(self, target_hash, merkle_root, proof_type, original_proof_json):
self.target_hash = target_hash
self.merkle_root = merkle_root
self.proof_type = proof_type
self.proof_json = original_proof_json
from copy import deepcopy
chainpoint_proof = deepcopy(original_proof_json)
chainpoint_proof['type'] = 'ChainpointSHA256v2'
self.chainpoint_proof = chainpoint_proof
class Issuer(object):
def __init__(self, id, name, image, revocation_url=None):
self.id = id
self.name = name
self.image = image
self.revocation_url = revocation_url
class BlockchainCertificate(object):
def __init__(self, version, uid, recipient_name, recipient_public_key, title, description, signature_image,
issued_on, expires, subtitle, signatures, certificate_json, txid, issuer, revocation_addresses=[]):
self.version = version
self.uid = uid
self.recipient_name = recipient_name
self.recipient_public_key = recipient_public_key
self.title = title
self.description = description
self.signature_image = signature_image
self.issued_on = issued_on
self.expires = expires
self.subtitle = subtitle
self.signatures = signatures
self.certificate_json = certificate_json
self.txid = txid
self.issuer = issuer
self.revocation_addresses = revocation_addresses
def __str__(self):
sb = []
for key in self.__dict__:
_value = self.__dict__[key]
if _value and isstring(_value) and 'data:image/png;base64' in str(_value):
mapped_value = '<base64_encoded_image>'
elif _value:
mapped_value = _value
else:
mapped_value = '<None>'
sb.append("{key}='{value}'".format(key=key, value=mapped_value))
return ', '.join(sb)
def __repr__(self):
return self.__str__()
def isstring(s):
if (sys.version_info[0] >= 3):
return isinstance(s, str)
return isinstance(s, basestring)
def parse_issuer(issuer_json):
if 'revocationList' in issuer_json:
revocation_list = issuer_json['revocationList']
else:
revocation_list = None
return Issuer(issuer_json['id'], issuer_json['name'], issuer_json['image'], revocation_list)
def detect_version(certificate_json):
# assumes it's a certificate. Maybe add some schema validation
if not '@context' in certificate_json:
return BlockcertVersion.V1_1
context = certificate_json['@context']
if isinstance(context, list):
version_marker = context[-1]
else:
version_marker = context
if 'v1' in version_marker:
return BlockcertVersion.V1_2
elif '2.0-alpha' in version_marker:
return BlockcertVersion.V2_ALPHA
elif '2.0' in version_marker or 'v2' in version_marker:
return BlockcertVersion.V2
raise UnknownBlockcertVersionException()
def is_v1_uid(uid):
if V1_1_REGEX.search(uid):
return True
else:
return False
def parse_chainpoint_proof(proof_json):
proof_type = ProofType.merkle_proof_2017
return MerkleProof(proof_json['targetHash'], proof_json['merkleRoot'], proof_type, proof_json)
def parse_date(raw_date):
if raw_date is None:
return None
parsed_date = parse(raw_date)
utc = pytz.UTC
if parsed_date.tzinfo is None or parsed_date.tzinfo.utcoffset(parsed_date) is None:
parsed_date = utc.localize(parsed_date)
return parsed_date
def parse_expires_date(assertion):
if 'expires' in assertion:
return parse_date(assertion['expires'])
else:
return None
def get_value_or_default(node, field):
value = None
if field in node:
value = node[field]
return value
def parse_v2_blockchain_certificate(certificate_json, version_marker):
assertion = certificate_json
uid = assertion['id']
badge = assertion['badge']
recipient = assertion['recipient']
issuer = parse_issuer(badge['issuer'])
issued_on = parse_date(assertion['issuedOn'])
signature = assertion[scope_name('signature')]
txid = signature['anchors'][0]['sourceId']
merkle_proof = parse_chainpoint_proof(signature)
signature_lines = []
if scope_name('signatureLines') in badge:
signature_lines_raw = badge[scope_name('signatureLines')]
for l in signature_lines_raw:
image = l['image']
name = get_value_or_default(l, 'name')
job_title = get_value_or_default(l, 'job_title')
signature_lines.append(SignatureLine(image, name, job_title))
subtitle = get_value_or_default(badge, 'subtitle')
if version_marker == BlockcertVersion.V2_ALPHA:
recipient_profile = recipient[scope_name('recipientProfile')]
else:
recipient_profile = certificate_json[scope_name('recipientProfile')]
recipient_public_key_full = recipient_profile['publicKey']
recipient_public_key = str.split(str(recipient_public_key_full), ':')[1]
import copy
document_json = copy.deepcopy(certificate_json)
del document_json['signature']
transaction_signature = TransactionSignature(document_json, txid, merkle_proof)
return BlockchainCertificate(version_marker,
uid,
recipient_profile['name'],
recipient_public_key,
badge['name'],
badge['description'],
signature_lines,
issued_on,
parse_expires_date(assertion),
subtitle,
[transaction_signature],
certificate_json,
txid,
issuer)
def parse_v1_2_blockchain_certificate(certificate_json):
document = certificate_json['document']
receipt = certificate_json['receipt']
certificate = document['certificate']
assertion = document['assertion']
recipient = document['recipient']
recipient_public_key = recipient['publicKey']
issued_on = parse_date(assertion['issuedOn'])
issuer = parse_issuer(certificate['issuer'])
assertion_uid = assertion['uid']
txid = receipt['anchors'][0]['sourceId']
signature_lines = []
if 'image:signature' in assertion:
signature_lines.append(SignatureLine(assertion['image:signature']))
subtitle = get_value_or_default(certificate, 'subtitle')
recipient_revocation_address = get_value_or_default(recipient, 'revocationKey')
revocation_addresses = [recipient_public_key]
if recipient_revocation_address:
revocation_addresses.append(recipient_revocation_address)
embedded_signature = EmbeddedSignature(assertion_uid, document['signature'])
transaction_signature = TransactionSignature(document, txid, parse_chainpoint_proof(receipt))
return BlockchainCertificate(BlockcertVersion.V1_2,
assertion_uid,
recipient['givenName'] + ' ' + recipient['familyName'],
recipient_public_key,
certificate['name'],
certificate['description'],
signature_lines,
issued_on,
parse_expires_date(assertion),
subtitle,
[embedded_signature, transaction_signature],
certificate_json,
txid,
issuer,
revocation_addresses)
def parse_v1_1_blockchain_certificate(json_certificate, txid, certificate_bytes):
subtitle = json_certificate['certificate']['subtitle']['content']
display_subtitle = json_certificate['certificate']['subtitle']['display']
if display_subtitle in ['true', 'True', 'TRUE']:
subtitle = subtitle
else:
subtitle = None
issuer = parse_issuer(json_certificate['certificate']['issuer'])
issued_on = parse_date(json_certificate['assertion']['issuedOn'])
recipient_pubkey = json_certificate['recipient']['pubkey']
assertion_uid = json_certificate['assertion']['uid']
revocation_addresses = [recipient_pubkey]
embedded_signature = EmbeddedSignature(assertion_uid, json_certificate['signature'])
transaction_signature = TransactionSignature(certificate_bytes, txid)
signature_lines = []
if 'image:signature' in json_certificate['assertion']:
signature_lines.append(SignatureLine(json_certificate['assertion']['image:signature']))
return BlockchainCertificate(BlockcertVersion.V1_1,
assertion_uid,
json_certificate['recipient']['givenName'] + ' ' + json_certificate['recipient'][
'familyName'],
recipient_pubkey,
json_certificate['certificate']['title'],
json_certificate['certificate']['description'],
signature_lines,
issued_on,
parse_expires_date(json_certificate['assertion']),
subtitle,
[embedded_signature, transaction_signature],
json_certificate,
txid,
issuer,
revocation_addresses)
def to_certificate_model(certificate_json, txid=None, certificate_bytes=None):
version = detect_version(certificate_json)
if version == BlockcertVersion.V1_1:
if not txid or not certificate_bytes:
raise InvalidCertificateError('V1.1 Blockchain Certificates require a transaction id and raw bytes')
return parse_v1_1_blockchain_certificate(certificate_json, txid, certificate_bytes)
elif version == BlockcertVersion.V1_2:
return parse_v1_2_blockchain_certificate(certificate_json)
elif version == BlockcertVersion.V2 or version == BlockcertVersion.V2_ALPHA:
return parse_v2_blockchain_certificate(certificate_json, version)
else:
raise UnknownBlockcertVersionException(version)
| gpl-3.0 | 154,558,770,172,818,700 | 33.961433 | 116 | 0.60405 | false |
smurfix/HomEvenT | modules/path.py | 1 | 1945 | # -*- coding: utf-8 -*-
##
## Copyright © 2007, Matthias Urlichs <[email protected]>
##
## This program is free software: you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation, either version 3 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License (included; see the file LICENSE)
## for more details.
##
"""\
This code implements primitive "if true" and "if false" checks.
"""
from homevent.check import Check,register_condition,unregister_condition
from homevent.module import Module
import os
class ExistsPathCheck(Check):
name="exists path"
doc="Check if there's something behind that path"
def check(self,*args):
assert len(args) == 1,"Need exactly one argument (file name)"
return os.path.exists(args[0])
class ExistsFileCheck(Check):
name="exists file"
doc="Check if there's a file at that path"
def check(self,*args):
assert len(args) == 1,"Need exactly one argument (file name)"
return os.path.isfile(args[0])
class ExistsDirCheck(Check):
name="exists directory"
doc="Check if there's a directory at that path"
def check(self,*args):
assert len(args) == 1,"Need exactly one argument (directory name)"
return os.path.isdir(args[0])
class PathModule(Module):
"""\
This module provides a couple of filesystem existence checks.
"""
info = "Check for file/directory existence"
def load(self):
register_condition(ExistsPathCheck)
register_condition(ExistsFileCheck)
register_condition(ExistsDirCheck)
def unload(self):
unregister_condition(ExistsPathCheck)
unregister_condition(ExistsFileCheck)
unregister_condition(ExistsDirCheck)
init = PathModule
| gpl-3.0 | -2,167,329,687,825,755,100 | 28.454545 | 72 | 0.738169 | false |
erzel/vitess | test/base_sharding.py | 1 | 16103 | #!/usr/bin/env python
#
# Copyright 2013, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
"""This module contains a base class and utility functions for sharding tests.
"""
import struct
import logging
from vtdb import keyrange_constants
import utils
keyspace_id_type = keyrange_constants.KIT_UINT64
pack_keyspace_id = struct.Struct('!Q').pack
# fixed_parent_id is used as fixed value for the "parent_id" column in all rows.
# All tests assume a multi-column primary key (parent_id, id) but only adjust
# the "id" column and use this fixed value for "parent_id".
# Since parent_id is fixed, not all test code has to include parent_id in a
# WHERE clause (at the price of a full table scan).
fixed_parent_id = 86
class BaseShardingTest(object):
"""This base class uses unittest.TestCase methods to check various things.
All sharding tests should inherit from this base class, and use the
methods as needed.
"""
# _insert_value inserts a value in the MySQL database along with the comments
# required for routing.
# NOTE: We assume that the column name for the keyspace_id is called
# 'custom_ksid_col'. This is a regression test which tests for
# places which previously hardcoded the column name to 'keyspace_id'.
def _insert_value(self, tablet_obj, table, mid, msg, keyspace_id):
k = utils.uint64_to_hex(keyspace_id)
tablet_obj.mquery(
'vt_test_keyspace',
['begin',
'insert into %s(parent_id, id, msg, custom_ksid_col) '
'values(%d, %d, "%s", 0x%x) /* vtgate:: keyspace_id:%s */ '
'/* id:%d */' %
(table, fixed_parent_id, mid, msg, keyspace_id, k, mid),
'commit'],
write=True)
def _get_value(self, tablet_obj, table, mid):
"""Returns the row(s) from the table for the provided id, using MySQL.
Args:
tablet_obj: the tablet to get data from.
table: the table to query.
mid: id field of the table.
Returns:
A tuple of results.
"""
return tablet_obj.mquery(
'vt_test_keyspace',
'select parent_id, id, msg, custom_ksid_col from %s '
'where parent_id=%d and id=%d' %
(table, fixed_parent_id, mid))
def _check_value(self, tablet_obj, table, mid, msg, keyspace_id,
should_be_here=True):
result = self._get_value(tablet_obj, table, mid)
if keyspace_id_type == keyrange_constants.KIT_BYTES:
fmt = '%s'
keyspace_id = pack_keyspace_id(keyspace_id)
else:
fmt = '%x'
if should_be_here:
self.assertEqual(result, ((fixed_parent_id, mid, msg, keyspace_id),),
('Bad row in tablet %s for id=%d, custom_ksid_col=' +
fmt + ', row=%s') % (tablet_obj.tablet_alias, mid,
keyspace_id, str(result)))
else:
self.assertEqual(
len(result), 0,
('Extra row in tablet %s for id=%d, custom_ksid_col=' +
fmt + ': %s') % (tablet_obj.tablet_alias, mid, keyspace_id,
str(result)))
def _is_value_present_and_correct(
self, tablet_obj, table, mid, msg, keyspace_id):
"""_is_value_present_and_correct tries to read a value.
Args:
tablet_obj: the tablet to get data from.
table: the table to query.
mid: the id of the row to query.
msg: expected value of the msg column in the row.
keyspace_id: expected value of the keyspace_id column in the row.
Returns:
True if the value (row) is there and correct.
False if the value is not there.
If the value is not correct, the method will call self.fail.
"""
result = self._get_value(tablet_obj, table, mid)
if not result:
return False
if keyspace_id_type == keyrange_constants.KIT_BYTES:
fmt = '%s'
keyspace_id = pack_keyspace_id(keyspace_id)
else:
fmt = '%x'
self.assertEqual(result, ((fixed_parent_id, mid, msg, keyspace_id),),
('Bad row in tablet %s for id=%d, '
'custom_ksid_col=' + fmt) % (
tablet_obj.tablet_alias, mid, keyspace_id))
return True
def check_binlog_player_vars(self, tablet_obj, source_shards,
seconds_behind_master_max=0):
"""Checks the binlog player variables are correctly exported.
Args:
tablet_obj: the tablet to check.
source_shards: the shards to check we are replicating from.
seconds_behind_master_max: if non-zero, the lag should be smaller than
this value.
"""
v = utils.get_vars(tablet_obj.port)
self.assertIn('BinlogPlayerMapSize', v)
self.assertEquals(v['BinlogPlayerMapSize'], len(source_shards))
self.assertIn('BinlogPlayerSecondsBehindMaster', v)
self.assertIn('BinlogPlayerSecondsBehindMasterMap', v)
self.assertIn('BinlogPlayerSourceShardNameMap', v)
shards = v['BinlogPlayerSourceShardNameMap'].values()
self.assertEquals(sorted(shards), sorted(source_shards))
self.assertIn('BinlogPlayerSourceTabletAliasMap', v)
for i in xrange(len(source_shards)):
self.assertIn('%d' % i, v['BinlogPlayerSourceTabletAliasMap'])
if seconds_behind_master_max != 0:
self.assertTrue(
v['BinlogPlayerSecondsBehindMaster'] <
seconds_behind_master_max,
'BinlogPlayerSecondsBehindMaster is too high: %d > %d' % (
v['BinlogPlayerSecondsBehindMaster'],
seconds_behind_master_max))
for i in xrange(len(source_shards)):
self.assertTrue(
v['BinlogPlayerSecondsBehindMasterMap']['%d' % i] <
seconds_behind_master_max,
'BinlogPlayerSecondsBehindMasterMap is too high: %d > %d' % (
v['BinlogPlayerSecondsBehindMasterMap']['%d' % i],
seconds_behind_master_max))
def check_binlog_server_vars(self, tablet_obj, horizontal=True,
min_statements=0, min_transactions=0):
"""Checks the binlog server variables are correctly exported.
Args:
tablet_obj: the tablet to check.
horizontal: true if horizontal split, false for vertical split.
min_statements: check the statement count is greater or equal to this.
min_transactions: check the transaction count is greater or equal to this.
"""
v = utils.get_vars(tablet_obj.port)
if horizontal:
skey = 'UpdateStreamKeyRangeStatements'
tkey = 'UpdateStreamKeyRangeTransactions'
else:
skey = 'UpdateStreamTablesStatements'
tkey = 'UpdateStreamTablesTransactions'
self.assertIn(skey, v)
self.assertIn(tkey, v)
if min_statements > 0:
self.assertTrue(v[skey] >= min_statements,
'only got %d < %d statements' % (v[skey], min_statements))
if min_transactions > 0:
self.assertTrue(v[tkey] >= min_transactions,
'only got %d < %d transactions' % (v[tkey],
min_transactions))
def check_stream_health_equals_binlog_player_vars(self, tablet_obj, count):
"""Checks the variables exported by streaming health check match vars.
Args:
tablet_obj: the tablet to check.
count: number of binlog players to expect.
"""
blp_stats = utils.get_vars(tablet_obj.port)
self.assertEqual(blp_stats['BinlogPlayerMapSize'], count)
# Enforce health check because it's not running by default as
# tablets may not be started with it, or may not run it in time.
utils.run_vtctl(['RunHealthCheck', tablet_obj.tablet_alias])
stream_health = utils.run_vtctl_json(['VtTabletStreamHealth',
'-count', '1',
tablet_obj.tablet_alias])
logging.debug('Got health: %s', str(stream_health))
self.assertNotIn('serving', stream_health)
self.assertIn('realtime_stats', stream_health)
self.assertNotIn('health_error', stream_health['realtime_stats'])
self.assertIn('binlog_players_count', stream_health['realtime_stats'])
self.assertEqual(blp_stats['BinlogPlayerMapSize'],
stream_health['realtime_stats']['binlog_players_count'])
self.assertEqual(blp_stats['BinlogPlayerSecondsBehindMaster'],
stream_health['realtime_stats'].get(
'seconds_behind_master_filtered_replication', 0))
def check_destination_master(self, tablet_obj, source_shards):
"""Performs multiple checks on a destination master.
Combines the following:
- wait_for_binlog_player_count
- check_binlog_player_vars
- check_stream_health_equals_binlog_player_vars
Args:
tablet_obj: the tablet to check.
source_shards: the shards to check we are replicating from.
"""
tablet_obj.wait_for_binlog_player_count(len(source_shards))
self.check_binlog_player_vars(tablet_obj, source_shards)
self.check_stream_health_equals_binlog_player_vars(tablet_obj,
len(source_shards))
def check_running_binlog_player(self, tablet_obj, query, transaction,
extra_text=None):
"""Checks binlog player is running and showing in status.
Args:
tablet_obj: the tablet to check.
query: number of expected queries.
transaction: number of expected transactions.
extra_text: if present, look for it in status too.
"""
status = tablet_obj.get_status()
self.assertIn('Binlog player state: Running', status)
self.assertIn(
'<td><b>All</b>: %d<br><b>Query</b>: %d<br>'
'<b>Transaction</b>: %d<br></td>' % (query+transaction, query,
transaction), status)
self.assertIn('</html>', status)
if extra_text:
self.assertIn(extra_text, status)
def check_no_binlog_player(self, tablet_obj):
"""Checks no binlog player is running.
Also checks the tablet is not showing any binlog player in its status page.
Args:
tablet_obj: the tablet to check.
"""
tablet_obj.wait_for_binlog_player_count(0)
status = tablet_obj.get_status()
self.assertIn('No binlog player is running', status)
self.assertIn('</html>', status)
def check_throttler_service(self, throttler_server, names, rate):
"""Checks that the throttler responds to RPC requests.
We assume it was enabled by SplitClone with the flag --max_tps 9999.
Args:
throttler_server: vtworker or vttablet RPC endpoint. Format: host:port
names: Names of the throttlers e.g. BinlogPlayer/0 or <keyspace>/<shard>.
rate: Expected initial rate the throttler was started with.
"""
self.check_throttler_service_maxrates(throttler_server, names, rate)
self.check_throttler_service_configuration(throttler_server, names)
def check_throttler_service_maxrates(self, throttler_server, names, rate):
"""Checks the vtctl ThrottlerMaxRates and ThrottlerSetRate commands."""
# Avoid flakes by waiting for all throttlers. (Necessary because filtered
# replication on vttablet will register the throttler asynchronously.)
timeout_s = 10
while True:
stdout, _ = utils.run_vtctl(['ThrottlerMaxRates', '--server',
throttler_server], auto_log=True,
trap_output=True)
if '%d active throttler(s)' % len(names) in stdout:
break
timeout_s = utils.wait_step('all throttlers registered', timeout_s)
for name in names:
self.assertIn('| %s | %d |' % (name, rate), stdout)
self.assertIn('%d active throttler(s)' % len(names), stdout)
# Check that it's possible to change the max rate on the throttler.
new_rate = 'unlimited'
stdout, _ = utils.run_vtctl(['ThrottlerSetMaxRate', '--server',
throttler_server, new_rate],
auto_log=True, trap_output=True)
self.assertIn('%d active throttler(s)' % len(names), stdout)
stdout, _ = utils.run_vtctl(['ThrottlerMaxRates', '--server',
throttler_server], auto_log=True,
trap_output=True)
for name in names:
self.assertIn('| %s | %s |' % (name, new_rate), stdout)
self.assertIn('%d active throttler(s)' % len(names), stdout)
def check_throttler_service_configuration(self, throttler_server, names):
"""Checks the vtctl (Get|Update|Reset)ThrottlerConfiguration commands."""
# Verify updating the throttler configuration.
stdout, _ = utils.run_vtctl(['UpdateThrottlerConfiguration',
'--server', throttler_server,
'--copy_zero_values',
'target_replication_lag_sec:12345 '
'max_replication_lag_sec:65789 '
'initial_rate:3 '
'max_increase:0.4 '
'emergency_decrease:0.5 '
'min_duration_between_changes_sec:6 '
'max_duration_between_increases_sec:7 '
'ignore_n_slowest_replicas:0 '
'age_bad_rate_after_sec:9 '
'bad_rate_increase:0.10 '],
auto_log=True, trap_output=True)
self.assertIn('%d active throttler(s)' % len(names), stdout)
# Check the updated configuration.
stdout, _ = utils.run_vtctl(['GetThrottlerConfiguration',
'--server', throttler_server],
auto_log=True, trap_output=True)
for name in names:
# The max should be set and have a non-zero value.
# We test only the the first field 'target_replication_lag_sec'.
self.assertIn('| %s | target_replication_lag_sec:12345 ' % (name), stdout)
# protobuf omits fields with a zero value in the text output.
self.assertNotIn('ignore_n_slowest_replicas', stdout)
self.assertIn('%d active throttler(s)' % len(names), stdout)
# Reset clears our configuration values.
stdout, _ = utils.run_vtctl(['ResetThrottlerConfiguration',
'--server', throttler_server],
auto_log=True, trap_output=True)
self.assertIn('%d active throttler(s)' % len(names), stdout)
# Check that the reset configuration no longer has our values.
stdout, _ = utils.run_vtctl(['GetThrottlerConfiguration',
'--server', throttler_server],
auto_log=True, trap_output=True)
for name in names:
# Target lag value should no longer be 12345 and be back to the default.
self.assertNotIn('target_replication_lag_sec:12345', stdout)
self.assertIn('%d active throttler(s)' % len(names), stdout)
def verify_reconciliation_counters(self, worker_port, online_or_offline,
table, inserts, updates, deletes, equal):
"""Checks that the reconciliation Counters have the expected values."""
worker_vars = utils.get_vars(worker_port)
i = worker_vars['Worker' + online_or_offline + 'InsertsCounters']
if inserts == 0:
self.assertNotIn(table, i)
else:
self.assertEqual(i[table], inserts)
u = worker_vars['Worker' + online_or_offline + 'UpdatesCounters']
if updates == 0:
self.assertNotIn(table, u)
else:
self.assertEqual(u[table], updates)
d = worker_vars['Worker' + online_or_offline + 'DeletesCounters']
if deletes == 0:
self.assertNotIn(table, d)
else:
self.assertEqual(d[table], deletes)
e = worker_vars['Worker' + online_or_offline + 'EqualRowsCounters']
if equal == 0:
self.assertNotIn(table, e)
else:
self.assertEqual(e[table], equal)
| bsd-3-clause | 6,068,879,863,299,270,000 | 41.827128 | 80 | 0.611501 | false |
mohd-akram/item.tf | tf2api.py | 1 | 15632 | """This module is based on the Steam WebAPI and can be used to get information
about items in TF2. Using this module, you can obtain the item schema,
store prices, bundles, item sets and attributes for TF2.
You can also obtain market prices from backpack.tf and trade.tf.
There are also functions for parsing the information of each item.
"""
import asyncio
import json
from collections import defaultdict, OrderedDict
import aiohttp
async def getschema(apikey):
"""Return the schema"""
schema_task = asyncio.ensure_future(_getschemaoverview(apikey))
all_items = []
start = 0
while start is not None:
items, start = await _getschemaitems(apikey, start)
all_items.extend(items)
schema = await schema_task
schema['result']['items'] = all_items
return schema
async def _getschemaoverview(apikey):
url = ('https://api.steampowered.com/IEconItems_440/GetSchemaOverview/v1/'
f'?key={apikey}&language=en')
return await _getjsonresponse(url)
async def _getschemaitems(apikey, start):
url = ('https://api.steampowered.com/IEconItems_440/GetSchemaItems/v1/'
f'?key={apikey}&language=en&start={start}')
result = (await _getjsonresponse(url))['result']
return result['items'], result.get('next')
async def getitemsinfo(apikey, storeprices, indexes):
"""Return a dictionary of AssetClassInfo values with defindex as key"""
url = ('https://api.steampowered.com/ISteamEconomy/GetAssetClassInfo/v0001/'
'?key={0}&language=en&appid=440&class_count={1}'.format(apikey,
len(indexes)
))
for n, index in enumerate(indexes):
classid = storeprices[index]['classid']
url += '&classid{0}={1}'.format(n, classid)
infobyid = (await _getjsonresponse(url))['result']
del infobyid['success']
return {int(iteminfo['app_data']['def_index']): iteminfo
for iteminfo in infobyid.values()}
async def getbundles(apikey, storeprices):
"""Return a dictionary of store bundles with defindex as key"""
indexes = [index for index, price in storeprices.items()
if not {'Bundles', 'Class_Bundles'}.isdisjoint(price['tags'])]
return await getitemsinfo(apikey, storeprices, indexes)
def getitemsets(schema):
"""Return an ordered dictionary of itemsets with 'name' as key"""
return OrderedDict([(itemset['name'], itemset) for itemset in
schema['result']['item_sets']])
def getitems(schema):
"""Return an ordered dictionary of items in the schema where the key is
defindex for each item"""
return OrderedDict([(item['defindex'], item) for item in
schema['result']['items']])
def getitemsbyname(schema):
"""Return an ordered dictionary of items in the schema where the key is
item_name for each item"""
itemsbyname = OrderedDict()
duplicates = getobsoleteindexes()
for item in schema['result']['items']:
name = item['item_name']
if name not in itemsbyname:
if item['defindex'] not in duplicates:
itemsbyname[name] = item
return itemsbyname
def getattributes(schema):
"""Return a dictionary with each attribute's name as key"""
return {attribute['name']: attribute for attribute in
schema['result']['attributes']}
def getparticleeffects(schema):
"""Return a dictionary with each particle effect's id as key"""
return {effect['id']: effect for effect in
schema['result']['attribute_controlled_attached_particles']}
async def getstoreprices(apikey):
"""Return a dictionary of store prices where the key is defindex for
each item"""
url = ('https://api.steampowered.com/ISteamEconomy/GetAssetPrices/v0001/'
'?key={}&language=en&appid=440¤cy=usd'.format(apikey))
prices = (await _getjsonresponse(url))['result']['assets']
return {int(price['name']): price for price in prices}
def getnewstoreprices(storeprices):
"""Return a dictionary of store prices of new items with defindex as key"""
return {index: price for index, price in storeprices.items()
if 'New' in price['tags']}
async def getbackpackprices(apikey, items, itemsbyname):
"""Get market prices from backpack.tf.
Return a dictionary where the key is defindex and value is a dictionary of
prices for the item"""
url = ('https://backpack.tf/api/IGetPrices/v4/'
'?key={}&compress=1'.format(apikey))
pricesdata = (await _getjsonresponse(url))['response']['items']
pricesdict = defaultdict(dict)
qualities = getallqualities()
denoms = {'metal': 'Refined', 'hat': 'Hat', 'keys': 'Key',
'earbuds': 'Bud', 'usd': 'USD'}
for name, iteminfo in pricesdata.items():
if name not in itemsbyname:
continue
index = itemsbyname[name]['defindex']
item = items[index]
iscrate = False
if 'attributes' in item and item['attributes']:
attribute = item['attributes'][0]
if attribute['name'] == 'set supply crate series':
iscrate = True
crateno = str(attribute['value'])
if 'prices' not in iteminfo:
continue
for quality, tradeinfo in iteminfo['prices'].items():
try:
qualityname = qualities[int(quality)]
except KeyError:
continue
for tradable, craftinfo in tradeinfo.items():
# Ignore non-tradable version if there is a tradable one
if tradable == 'Non-Tradable' and 'Tradable' in tradeinfo:
continue
for craftable, price in craftinfo.items():
if type(price) is list:
price = price[0]
else:
if iscrate and crateno in price:
price = price[crateno]
elif '0' in price:
price = price['0']
else:
continue
if not price['value']:
continue
value = price['value']
valuehigh = (' - {:g}'.format(price['value_high'])
if 'value_high' in price else '')
denom = denoms[price['currency']]
if (value != 1 or valuehigh) and denom not in ('Refined',
'USD'):
denom += 's'
qlty = (qualityname if craftable != 'Non-Craftable'
else 'Uncraftable')
pricesdict[index][qlty] = '{:g}{} {}'.format(
value, valuehigh, denom)
return pricesdict
async def gettradeprices(apikey, items, itemsbyname):
"""Get market prices from trade.tf.
Return a dictionary where the key is defindex and value is a dictionary of
prices for the item"""
url = 'https://www.trade.tf/api/spreadsheet.json?key={}'.format(apikey)
pricesdata = (await _getjsonresponse(url))['items']
pricesdict = defaultdict(dict)
itemnames = set()
crates = defaultdict(int)
qualities = getallqualities()
qualities[-1] = 'Uncraftable'
denoms = {'r': 'Refined', 'k': 'Key', 'b': 'Bud'}
for index, prices in pricesdata.items():
index = int(index)
if index not in items:
# For crates, index = 10000*crate_defindex + crate_number
crateno = index % 10000
index //= 10000
# Store the price of the highest crate number only
if crateno < crates[index]:
continue
else:
crates[index] = crateno
name = items[index]['item_name']
# Trade.tf uses different indexes.
idx = itemsbyname[name]['defindex']
if index != idx and name in itemnames:
continue
for quality, price in prices.items():
quality = int(quality)
if 'regular' not in price:
continue
price = price['regular']
if price['unsure']:
continue
value = price['low']
valuehigh = (' - {:g}'.format(round(price['hi'], 2))
if value != price['hi'] else '')
denom = denoms[price['unit']]
qualityname = qualities[quality]
if (value != 1 or valuehigh) and denom != 'Refined':
denom += 's'
itemnames.add(name)
pricesdict[idx][qualityname] = '{:g}{} {}'.format(round(value, 2),
valuehigh,
denom)
return pricesdict
def getweapontags():
"""Return all weapon tags"""
return ('primary', 'secondary', 'melee', 'pda', 'pda2', 'building')
def getalltags():
"""Return all item tags"""
return (('weapon', 'cosmetic', 'hat', 'misc', 'taunt', 'tool', 'action',
'paint', 'craft', 'token', 'bundle', 'tournament', 'halloween') +
getweapontags())
def getallclasses():
"""Return an OrderedDict of TF2 classes with name as key and
a list of aliases as value"""
return OrderedDict([('Scout', ['Scoot']),
('Soldier', ['Solly']),
('Pyro', []),
('Demoman', ['Demo']),
('Heavy', ['Hoovy']),
('Engineer', ['Engi', 'Engie']),
('Medic', []),
('Sniper', []),
('Spy', [])])
def getallqualities():
"""Return a dictionary of TF2 item qualities with number as key and
description as value"""
return {6: 'Unique',
3: 'Vintage',
11: 'Strange',
1: 'Genuine',
14: "Collector's",
13: 'Haunted',
5: 'Unusual'}
def getalldenoms():
"""Return an OrderedDict of price denominations in descending order with
the defindex of their corresponding items as value"""
return OrderedDict([('Earbuds', 143),
('Key', 5021),
('Refined', 5002),
('Reclaimed', 5001),
('Scrap', 5000),
('Weapon', 0)])
def getstoreprice(item, storeprices):
"""Get store price of item"""
index = item['defindex']
return ('{:.2f}'.format(storeprices[index]['prices']['USD'] / 100.00)
if index in storeprices else '')
def getmarketprice(item, marketprices):
"""Get market price of item"""
index = item['defindex']
return marketprices[index] if index in marketprices else {}
def getitemattributes(item, allattributes, effects):
"""Get attributes of item"""
attributelist = []
if 'attributes' in item:
attributes = item['attributes']
for a in attributes:
value = a['value']
attribute = allattributes[a['name']]
if not attribute['hidden'] and 'description_string' in attribute:
description = attribute['description_string']
descformat = attribute['description_format']
if descformat == 'value_is_particle_index':
value = effects[value]['name']
description = description.replace('%s1', '{}')
else:
if descformat == 'value_is_percentage':
value = (value * 100) - 100
elif descformat == 'value_is_inverted_percentage':
value = 100 - (value * 100)
elif descformat == 'value_is_additive_percentage':
value *= 100
description = description.replace('%s1', '{:g}')
description = description.format(value)
attrdict = {'description': description,
'type': attribute['effect_type']}
if attrdict['type'] == 'unusual':
attrdict['type'] = 'neutral'
attributelist.append(attrdict)
order = ('neutral', 'positive', 'negative')
return sorted(attributelist, key=lambda k: order.index(k['type']))
def getitemclasses(item):
"""Get the TF2 classes that can use this item"""
return (sorted(item['used_by_classes'],
key=list(getallclasses().keys()).index)
if 'used_by_classes' in item else [])
def getitemtags(item):
"""Get a list of tags that describe the item"""
tags = []
itemclass = item['item_class']
itemtypename = item['item_type_name']
if itemclass == 'bundle':
tags.append(itemclass)
elif itemclass == 'craft_item':
tags.append('craft')
elif itemclass.endswith('_token'):
tags.append('token')
if 'item_slot' in item:
slot = item['item_slot']
if slot in getweapontags() and itemclass != 'slot_token':
tags.append('weapon')
if slot == 'misc':
tags.append('cosmetic')
if itemtypename in ('#TF_Wearable_Hat', 'Hat', 'Mask',
'Holiday Hat', 'Headset', 'Hair'):
tags.append('hat')
else:
tags.append(slot)
if itemtypename == 'Tournament Medal':
tags.append('tournament')
if 'tool' in item:
tags.append('tool')
if item['tool']['type'] == 'paint_can':
tags.append('paint')
if item.get('holiday_restriction') == 'halloween_or_fullmoon':
tags.append('halloween')
return tags
def getobsoleteindexes():
"""Return the indexes of obsolete items that have newer versions"""
map_stamps = {
2007, 2015, 2049, 2079, 2123, 2125, 2138, 2139, 2140, 2143, 2155, 2156
}
starter_packs = set(range(2018, 2027)) | set(range(2094, 2103))
return {699, 2093} | map_stamps | starter_packs
async def getplayerbackpack(apikey, steamid):
"""Return the player backpack of the given steamid"""
url = ('https://api.steampowered.com/IEconItems_440/GetPlayerItems/v0001/'
f'?key={apikey}&steamid={steamid}')
return (await _getjsonresponse(url)).get('result')
async def getplayersummary(apikey, steamid):
"""Return the player summary of the given steamid"""
return (await getplayersummaries(apikey, [steamid]))[0]
async def getplayersummaries(apikey, steamids):
"""Return the player summaries of a list of steamids"""
url = ('https://api.steampowered.com/ISteamUser/GetPlayerSummaries/v0002/'
f"?key={apikey}&steamids={','.join(steamids)}")
return (await _getjsonresponse(url))['response']['players']
async def resolvevanityurl(apikey, vanityurl):
"""Return the steamid of a given vanity url"""
url = ('https://api.steampowered.com/ISteamUser/ResolveVanityURL/v0001/'
f'?key={apikey}&vanityurl={vanityurl}')
response = (await _getjsonresponse(url))['response']
if response['success'] == 1:
return response['steamid']
async def _getjsonresponse(url):
headers = {'User-Agent': 'tf2api'}
async with aiohttp.ClientSession(headers=headers) as session:
async with session.get(url) as response:
return json.loads((await response.read()).decode())
| mit | 3,765,730,247,329,332,700 | 32.18896 | 80 | 0.5625 | false |
seblin/launchit | launchit/settings.py | 1 | 3719 | """
Configuration stuff.
"""
# Stdlib
import os
# 3rd party
from xdg.BaseDirectory import xdg_config_home
# Launchit package
from . import logger
# Default configuration
config = {
'encoding': 'utf-8',
'icon-theme': 'hicolor',
'menu-dir': '/etc/xdg/menus',
'starter' : 'xdg-open',
}
CONFIG_FILENAME = 'launchit.conf'
def update_config(configuration={}):
"""
Update default configuration with the result of `get_user_config()` and
after that with the given `configuration`-dictionary.
"Updating" means: If the same key exists in at least two dictionaries,
then the latter one's value is used. Otherwise the key is just added.
Thus, an empty dictionary will result in no change.
"""
for cfg in (get_user_config(), configuration):
config.update(cfg)
def get_user_config(filename=None):
"""
Return the parsed contents of a configuration file, which is named with
`filename`, as a dictionary, where the file is assumed to exist inside
the user's "standard" configuration directory. In case that no such file
could be found, an empty dictionary will be returned. If `filename` is
`None`, the `CONFIG_FILENAME` is used.
Note that a detailed explanation of the expected scheme inside the config
file can be found in `iter_config_entries()`, while the config file's path
is retrieved by `get_config_path()`.
"""
if filename is None:
filename = CONFIG_FILENAME
path = get_config_path(filename)
if not os.path.exists(path):
return {}
logger.info('Found config file {0!r}'.format(path))
return get_config_entries(path)
def get_config_path(filename=None):
"""
Return a XDG-compliant path based on given `filename`. If `filename` is
`None`, the `CONFIG_FILENAME` will be used.
"""
# TODO: Determinate the correct path on non-linux platforms, too
if filename is None:
filename = CONFIG_FILENAME
if os.path.dirname(filename):
raise ValueError('filename may not contain any path separator')
return os.path.join(xdg_config_home, filename)
def get_config_entries(path):
"""
Read a configuration file from the given path and return a dictionary,
which contains the file's entries.
"""
with open(path) as config_file:
return dict(iter_config_entries(config_file))
def iter_config_entries(lines):
"""
Iterate over the given configuration lines, which may be either a file-like
object or a list of strings and return a `(key, value)`-pair for each line.
Parsing is done according to the following rules:
Each line must use the scheme `key: value` to define an item. If a line
contains multiple `:`-chars, then the first one disappears, as it is used
as the separator, while the other ones will remain inside the value entry,
which consequently means that only one item per line can be defined. Lines
are read until a `#` appears, since that is interpreted as the beginning of
a comment. Whitespace at the beginning or at the end of a line is ignored.
The same goes for whitespace between key/value and separator. Empty lines
are just ignored, while a line with non-whitespaced contents, which doesn't
contain the separator, is an error. Note that keys and values will always
be strings.
"""
for index, line in enumerate(lines):
code = line.split('#')[0].strip()
if code:
if not ':' in code:
msg = 'Syntax error in line {0}: Expected a separator (`:`)'
raise ValueError(msg.format(index + 1))
key, value = code.split(':', 1)
yield (key.strip(), value.strip())
| mit | 119,058,826,191,110,880 | 37.340206 | 80 | 0.674106 | false |
WDavidX/pipy | v3dht22.py | 1 | 20163 | #!/usr/bin/env python
# 2014-07-11 DHT22.py
import time
import atexit
import pigpio
import smbus
import subprocess
import os
import signal
import RPi.GPIO as GPIO
import psutil
import json
import socket
import math
import datetime
import urllib2
class i2c_device:
def __init__(self, addr, port=1):
self.addr = addr
self.bus = smbus.SMBus(port)
# Write a single command
def write_cmd(self, cmd):
self.bus.write_byte(self.addr, cmd)
time.sleep(0.0001)
# Write a command and argument
def write_cmd_arg(self, cmd, data):
self.bus.write_byte_data(self.addr, cmd, data)
time.sleep(0.0001)
# Write a block of data
def write_block_data(self, cmd, data):
self.bus.write_block_data(self.addr, cmd, data)
time.sleep(0.0001)
# Read a single byte
def read(self):
return self.bus.read_byte(self.addr)
# Read
def read_data(self, cmd):
return self.bus.read_byte_data(self.addr, cmd)
# Read a block of data
def read_block_data(self, cmd):
return self.bus.read_block_data(self.addr, cmd)
# LCD Address
ADDRESS = 0x3f
# commands
LCD_CLEARDISPLAY = 0x01
LCD_RETURNHOME = 0x02
LCD_ENTRYMODESET = 0x04
LCD_DISPLAYCONTROL = 0x08
LCD_CURSORSHIFT = 0x10
LCD_FUNCTIONSET = 0x20
LCD_SETCGRAMADDR = 0x40
LCD_SETDDRAMADDR = 0x80
# flags for display entry mode
LCD_ENTRYRIGHT = 0x00
LCD_ENTRYLEFT = 0x02
LCD_ENTRYSHIFTINCREMENT = 0x01
LCD_ENTRYSHIFTDECREMENT = 0x00
# flags for display on/off control
LCD_DISPLAYON = 0x04
LCD_DISPLAYOFF = 0x00
LCD_CURSORON = 0x02
LCD_CURSOROFF = 0x00
LCD_BLINKON = 0x01
LCD_BLINKOFF = 0x00
# flags for display/cursor shift
LCD_DISPLAYMOVE = 0x08
LCD_CURSORMOVE = 0x00
LCD_MOVERIGHT = 0x04
LCD_MOVELEFT = 0x00
# flags for function set
LCD_8BITMODE = 0x10
LCD_4BITMODE = 0x00
LCD_2LINE = 0x08
LCD_1LINE = 0x00
LCD_5x10DOTS = 0x04
LCD_5x8DOTS = 0x00
# flags for backlight control
LCD_BACKLIGHT = 0x08
LCD_NOBACKLIGHT = 0x00
En = 0b00000100 # Enable bit
Rw = 0b00000010 # Read/Write bit
Rs = 0b00000001 # Register select bit
class lcd:
"""
Class to control LCD display
"""
LCD_BacklightOpt = LCD_NOBACKLIGHT
LCD_BacklightOpt = LCD_BACKLIGHT
LCD_TurnOn, LCD_TurnOff = LCD_BACKLIGHT, LCD_NOBACKLIGHT
def __init__(self):
self.lcd_device = i2c_device(ADDRESS)
self.lcd_write(0x03)
self.lcd_write(0x03)
self.lcd_write(0x03)
self.lcd_write(0x02)
self.lcd_write(LCD_FUNCTIONSET | LCD_2LINE | LCD_5x8DOTS | LCD_4BITMODE)
self.lcd_write(LCD_DISPLAYCONTROL | LCD_DISPLAYON)
self.lcd_write(LCD_CLEARDISPLAY)
self.lcd_write(LCD_ENTRYMODESET | LCD_ENTRYLEFT)
time.sleep(0.2)
self.backlight = lcd.LCD_BacklightOpt
# clocks EN to latch command
def lcd_backlighton(self, N=-1):
try:
N = int(N)
if not N in [1, 0, -1]:
raise Exception("Wrong backlight option")
except Exception:
N = lcd.LCD_TurnOff
if N == 1:
self.LCD_BacklightOpt = lcd.LCD_TurnOn
elif N == 0:
self.LCD_BacklightOpt = lcd.LCD_TurnOff
elif self.LCD_BacklightOpt == lcd.LCD_TurnOff:
self.LCD_BacklightOpt = lcd.LCD_TurnOn
else:
self.LCD_BacklightOpt = lcd.LCD_TurnOff
def lcd_strobe(self, data):
self.lcd_device.write_cmd(data | En | self.LCD_BacklightOpt)
time.sleep(.0005)
self.lcd_device.write_cmd(((data & ~En) | self.LCD_BacklightOpt))
time.sleep(.0001)
def lcd_write_four_bits(self, data):
self.lcd_device.write_cmd(data | self.LCD_BacklightOpt)
self.lcd_strobe(data)
# write a command to lcd
def lcd_write(self, cmd, mode=0):
self.lcd_write_four_bits(mode | (cmd & 0xF0))
self.lcd_write_four_bits(mode | ((cmd << 4) & 0xF0))
# put string function
def lcd_display_string(self, string, line):
if line == 1:
self.lcd_write(0x80)
if line == 2:
self.lcd_write(0xC0)
if line == 3:
self.lcd_write(0x94)
if line == 4:
self.lcd_write(0xD4)
for char in string:
self.lcd_write(ord(char), Rs)
# clear lcd and set to home
def lcd_clear(self):
self.lcd_write(LCD_CLEARDISPLAY)
self.lcd_write(LCD_RETURNHOME)
class sensor:
"""
A class to read relative humidity and temperature from the
DHT22 sensor. The sensor is also known as the AM2302.
The sensor can be powered from the Pi 3V3 or the Pi 5V rail.
Powering from the 3V3 rail is simpler and safer. You may need
to power from 5V if the sensor is connected via a long cable.
For 3V3 operation connect pin 1 to 3V3 and pin 4 to ground.
Connect pin 2 to a gpio.
For 5V operation connect pin 1 to 5V and pin 4 to ground.
The following pin 2 connection works for me. Use at YOUR OWN RISK.
5V--5K_resistor--+--10K_resistor--Ground
|
DHT22 pin 2 -----+
|
gpio ------------+
"""
def __init__(self, pi, gpio, LED=None, power=None):
"""
Instantiate with the Pi and gpio to which the DHT22 output
pin is connected.
Optionally a LED may be specified. This will be blinked for
each successful reading.
Optionally a gpio used to power the sensor may be specified.
This gpio will be set high to power the sensor. If the sensor
locks it will be power cycled to restart the readings.
Taking readings more often than about once every two seconds will
eventually cause the DHT22 to hang. A 3 second interval seems OK.
"""
self.pi = pi
self.gpio = gpio
self.LED = LED
self.power = power
if power is not None:
pi.write(power, 1) # Switch sensor on.
time.sleep(2)
self.powered = True
self.cb = None
atexit.register(self.cancel)
self.bad_CS = 0 # Bad checksum count.
self.bad_SM = 0 # Short message count.
self.bad_MM = 0 # Missing message count.
self.bad_SR = 0 # Sensor reset count.
self.bad_Trigger = False # flag true if the last trigger was good
# Power cycle if timeout > MAX_TIMEOUTS.
self.no_response = 0
self.MAX_NO_RESPONSE = 2
self.rhum = -999
self.temp = -999
self.tov = None
self.high_tick = 0
self.bit = 40
pi.set_pull_up_down(gpio, pigpio.PUD_OFF)
pi.set_watchdog(gpio, 0) # Kill any watchdogs.
self.cb = pi.callback(gpio, pigpio.EITHER_EDGE, self._cb)
def _cb(self, gpio, level, tick):
"""
Accumulate the 40 data bits. Format into 5 bytes, humidity high,
humidity low, temperature high, temperature low, checksum.
"""
diff = pigpio.tickDiff(self.high_tick, tick)
self.bad_Trigger = False
if level == 0:
# Edge length determines if bit is 1 or 0.
if diff >= 50:
val = 1
if diff >= 200: # Bad bit?
self.CS = 256 # Force bad checksum.
else:
val = 0
if self.bit >= 40: # Message complete.
self.bit = 40
elif self.bit >= 32: # In checksum byte.
self.CS = (self.CS << 1) + val
if self.bit == 39:
# 40th bit received.
self.pi.set_watchdog(self.gpio, 0)
self.no_response = 0
total = self.hH + self.hL + self.tH + self.tL
if (total & 255) == self.CS: # Is checksum ok?
self.rhum = ((self.hH << 8) + self.hL) * 0.1
if self.tH & 128: # Negative temperature.
mult = -0.1
self.tH = self.tH & 127
else:
mult = 0.1
self.temp = ((self.tH << 8) + self.tL) * mult
self.tov = time.time()
if self.LED is not None:
self.pi.write(self.LED, 0)
else:
self.bad_Trigger = True
self.bad_CS += 1
elif self.bit >= 24: # in temp low byte
self.tL = (self.tL << 1) + val
elif self.bit >= 16: # in temp high byte
self.tH = (self.tH << 1) + val
elif self.bit >= 8: # in humidity low byte
self.hL = (self.hL << 1) + val
elif self.bit >= 0: # in humidity high byte
self.hH = (self.hH << 1) + val
else: # header bits
pass
self.bit += 1
elif level == 1:
self.high_tick = tick
if diff > 250000:
self.bit = -2
self.hH = 0
self.hL = 0
self.tH = 0
self.tL = 0
self.CS = 0
else: # level == pigpio.TIMEOUT:
self.pi.set_watchdog(self.gpio, 0)
if self.bit < 8: # Too few data bits received.
self.bad_MM += 1 # Bump missing message count.
self.no_response += 1
if self.no_response > self.MAX_NO_RESPONSE:
self.no_response = 0
self.bad_SR += 1 # Bump sensor reset count.
if self.power is not None:
self.powered = False
self.pi.write(self.power, 0)
time.sleep(2)
self.pi.write(self.power, 1)
time.sleep(2)
self.powered = True
elif self.bit < 39: # Short message receieved.
self.bad_SM += 1 # Bump short message count.
self.no_response = 0
else: # Full message received.
self.no_response = 0
def sensor_info(self):
return self.temp, self.rhum, self.bad_Trigger, self.bad_SM
def is_last_tigger(self):
return self.bad_Trigger
def temperature(self):
"""Return current temperature."""
return self.temp
def humidity(self):
"""Return current relative humidity."""
return self.rhum
def staleness(self):
"""Return time since measurement made."""
if self.tov is not None:
return time.time() - self.tov
else:
return -999
def bad_checksum(self):
"""Return count of messages received with bad checksums."""
return self.bad_CS
def short_message(self):
"""Return count of short messages."""
return self.bad_SM
def missing_message(self):
"""Return count of missing messages."""
return self.bad_MM
def sensor_resets(self):
"""Return count of power cycles because of sensor hangs."""
return self.bad_SR
def trigger(self):
"""Trigger a new relative humidity and temperature reading."""
if self.powered:
if self.LED is not None:
self.pi.write(self.LED, 1)
self.pi.write(self.gpio, pigpio.LOW)
time.sleep(0.017) # 17 ms
self.pi.set_mode(self.gpio, pigpio.INPUT)
self.pi.set_watchdog(self.gpio, 200)
def cancel(self):
"""Cancel the DHT22 sensor."""
self.pi.set_watchdog(self.gpio, 0)
if self.cb != None:
self.cb.cancel()
self.cb = None
def orignal_sample():
pass
# Intervals of about 2 seconds or less will eventually hang the DHT22.
INTERVAL = 3
pi = pigpio.pi()
s = sensor(pi, 4, LED=None, power=None)
r = 0
next_reading = time.time()
while True:
r += 1
s.trigger()
time.sleep(0.2)
print("r={} H={} T={} stale={:3.2f} bad_checksum={} SMS={} Missing={} resets={}".format(
r, s.humidity(), s.temperature(), s.staleness(),
s.bad_checksum(), s.short_message(), s.missing_message(),
s.sensor_resets()))
next_reading += INTERVAL
time.sleep(next_reading - time.time()) # Overall INTERVAL second polling.
s.cancel()
pi.stop()
def init_mylcd():
mylcd = lcd()
mylcd.lcd_display_string("Emma Be Happy".center(20), 2)
mylcd.lcd_display_string("DHT22 Version 1.0".center(20), 3)
return mylcd
def backlight_control(fname="backlighton.txt"):
t=datetime.datetime.now()
if t.hour>22 or t.hour<8: return 1
if os.path.exists(fname): return 1
else: return 0
def get_log_file_name(tlast, tnew, outdir=r".", fname_prefix=r"dht22-"):
t_lastdate_num = int(time.strftime("%Y%m%d", time.localtime(tlast)))
t_newdate_num = int(time.strftime("%Y%m%d", time.localtime(tnew)))
# if not os.path.exists(outdir): os.mkdir(outdir)
fnout = os.path.join(os.getcwdu(), outdir, fname_prefix + time.strftime("%y-%m-%d") + ".txt")
# print fnout, t_newdate_num,t_lastdate_num
if (t_newdate_num > t_lastdate_num) and not (os.path.exists(fnout)): open(fnout, 'w').close()
os.system("""sudo chown pi %s"""%fnout)
return fnout
def update_lcd(mylcd, t, h):
mytimestr = time.strftime("%m-%d %H:%M %a")
mylcd.lcd_display_string(mytimestr.center(20), 1)
mylcd.lcd_display_string(("%.1fF %.1fC %.1f%%" % (t * 9 / 5.0 + 32, t, h)).center(20), 4)
def get_weather_api():
minneapolis_url = r'http://api.openweathermap.org/data/2.5/weather?id=5037649&units=metric'
try:
response = urllib2.urlopen(minneapolis_url,timeout=5)
data = json.load(response)
except urllib2.URLError, e:
print "urlopen error at %s, %s"%(time.strftime("%m-%d %H:%M:%S"),e)
return None
except socket.timeout,e:
print "urlopen error at %s, %s"%(time.strftime("%m-%d %H:%M:%S"),e)
return None
return data
wunderground_counter=0
def get_wunderground():
global wunderground_counter
myapi="8e1199ad75005651"
features="conditions"
settings="lang:EN"
query="55414"
format="json"
minneapolis_url="""http://api.wunderground.com/api/%s/%s/%s/q/%s.%s"""%(myapi,features,settings,query,format)
try:
wunderground_counter=wunderground_counter+1
print "Calling wunderground %5d at %s"%(wunderground_counter, time.strftime("%m-%d %H:%M:%S"))
response = urllib2.urlopen(minneapolis_url,timeout=5)
data = json.load(response)
except Exception,e:
print "get_wunderground error %s"%e
return None
#print data['current_observation']['feelslike_c']
return data
def get_sunset_time_str(date_info=None,latitude=44.97,longitude=-93.26,tz_info='US/Central'):
import pytz,datetime,astral
a=astral.Astral()
a.solar_depression='civil'
if date_info is None: date_info=datetime.date.today()
utc_datetime=a.sunset_utc(date_info,latitude,longitude)
lc_datetime=utc_datetime.astimezone(pytz.timezone(tz_info))
outstr="%02d%02d"%(lc_datetime.hour,lc_datetime.minute)
return outstr
def main1():
# Some init values
outdir = r"pipylog_" + os.path.splitext(__file__)[0]
if not os.path.exists(outdir): os.mkdir(outdir)
updateIntervalSec, runningTimeHour = 60, 24
retryInvervalSec = 3
totalLoopNum, errorLoopNum = 0, 0
main_t0 = time.time()
# init instances
pi = pigpio.pi()
try:
s = sensor(pi, 18, LED=None, power=None)
except Exception, e:
print "%s"%(e)
mylcd = init_mylcd()
mylcd.lcd_backlighton(backlight_control())
dht_running = True
initDone = False
# Sensor first few trials
init_t0 = -time.time()
init_loop = 0
while not initDone:
loop_t0 = time.time()
s.trigger()
t, h, badtrigger, badsm = s.sensor_info()
if (h != -999):
initDone = True
else: # time.sleep(loop_t0+3-time.time())
init_loop += 1
time.sleep(loop_t0 + retryInvervalSec - time.time())
print "Init sensor %d loops in in %.1f seconds" % (init_loop, time.time() + init_t0)
print "Output directory %s" % (os.path.abspath(outdir))
loop_t0=0
n_15min=0
n_5min=0
wunderground_data,weather_data=None,None
while (dht_running):
loop_t_last = loop_t0
loop_t0 = time.time()
try:
s.trigger()
totalLoopNum += 1
print totalLoopNum,errorLoopNum
t, h, badtrigger, badsm = s.sensor_info()
# print totalLoopNum,t,h,badtrigger, badsm
if badtrigger:
errorLoopNum += 1
t_badtrigger_waitsec = max(0, loop_t0 + retryInvervalSec - time.time())
if t_badtrigger_waitsec > 0: time.sleep(t_badtrigger_waitsec)
continue
mylcd.lcd_backlighton(backlight_control())
fnout = get_log_file_name(loop_t_last, loop_t0, outdir)
with open(fnout, 'a') as fhout:
fhout.write(
"%.2f , %s , %4.1f , %4.1f\n" % (\
loop_t0, time.strftime("%H%M%S", time.localtime(loop_t0)), t, h))
# first row
str1 = time.strftime("%H:%M %m-%d %a ",time.localtime(time.time()+60))+get_sunset_time_str()
mylcd.lcd_display_string(str1.center(20), 1)
# second row
str2 = "Emma Be Happy %d" % (totalLoopNum)
print str2
mylcd.lcd_display_string(str2.center(20), 2)
# third row from internet
if n_15min<math.floor(loop_t0 / (60*15.0)):
n_15min=math.floor(loop_t0 / (60*15.0))
wunderground_data=get_wunderground()
if wunderground_data is not None:
str3 = "%.0f/%.0fC %s %s"%(\
float(wunderground_data['current_observation']['feelslike_c']),\
float(wunderground_data['current_observation']['temp_c']),\
wunderground_data['current_observation']['weather'].split(" ")[-1],\
wunderground_data['current_observation']['relative_humidity'])
mylcd.lcd_display_string(str3.center(20), 3)
# fourth row
mylcd.lcd_display_string(("%.1fF %.1fC %.1f%%" % (t * 9 / 5.0 + 32, t, h)).center(20), 4)
twaitsec = max(0, loop_t0 + updateIntervalSec - time.time())
if twaitsec > 0: time.sleep(twaitsec)
except KeyboardInterrupt:
dht_running = False
except Exception, e:
# dht_running=False
print "%s"%e
time.sleep(1)
continue
print "\n" * 2
print "%s terminated" % (os.path.abspath(__file__))
print "Up time: %.1f sec, %d loops from %s " % (
time.time() -main_t0, totalLoopNum, time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(main_t0)))
print "Log file at %s" % ( fnout)
if errorLoopNum > 0: print "Error loops %d/%d" % (errorLoopNum, totalLoopNum)
def start_daemon():
p1 = subprocess.Popen(["ps","axo","pid,ppid,pgrp,tty,tpgid,sess,comm"], stdout=subprocess.PIPE)
p2 = subprocess.Popen(["awk", "$2==1"], stdin=p1.stdout, stdout=subprocess.PIPE)
p3 = subprocess.Popen(["awk", "$1==$3"], stdin=p2.stdout, stdout=subprocess.PIPE)
pdata,perr=p3.communicate()
pigpiod_found=False
for idx,item in enumerate(pdata.split("\n")):
pname=(item.strip()).split(' ')[-1]
if pname == "pigpiod":
pigpiod_found=True
line=item.strip()
break
if pigpiod_found: print line
else:
os.system("sudo pigpiod")
print "\nstarting pigpiod..."
time.sleep(3)
if __name__ == "__main__":
pass
# orignal_sample()
start_daemon()
main1()
#print get_weather_api()
| mit | -1,763,873,955,601,170,400 | 30.802839 | 118 | 0.560581 | false |
VanceKingSaxbeA/MarketsEngine | src/googlequotemachine.py | 1 | 3986 | /*Owner & Copyrights: Vance King Saxbe. A.*/""" Copyright (c) <2014> Author Vance King Saxbe. A, and contributors Power Dominion Enterprise, Precieux Consulting and other contributors. Modelled, Architected and designed by Vance King Saxbe. A. with the geeks from GoldSax Consulting and GoldSax Technologies email @[email protected]. Development teams from Power Dominion Enterprise, Precieux Consulting. Project sponsored by GoldSax Foundation, GoldSax Group and executed by GoldSax Manager."""from src.googlefinancequote import *
import sqlite3 as lite
import string
import gc
import time
import math
from src.dbbackup import *
import _thread
from src.goldsaxanalytics import *
def actionking(lck, tempf, stocklist, dbase, attmt,actionlist,cycle,timeatpresent,timetotake):
if tempf == []:
timeatpresent = time.clock()
if (time.clock() - timeatpresent) > timetotake:
return 0
lck.acquire()
lck.release()
f = pullprocess(stocklist)
sorter = []
con = lite.connect(dbase)
for fuck in f:
for suck in tempf:
if (fuck[0] == suck[0]):
try:
Jack = float(fuck[3].replace(",",""))
Jill = float(suck[3].replace(",",""))
except ValueError:
break
if (abs(Jack-Jill)> 0.01):
sorter.append(fuck[0])
stmt = "INSERT INTO "+fuck[0]+"table(ONNN, ATTT, PRIC) VALUES ('"+fuck[1]+"', '"+fuck[2]+"', "+fuck[3].replace(",","")+");"
cur = con.cursor()
try:
cur.execute(stmt)
con.commit()
except lite.OperationalError:
time.sleep(0.05)
try:
cur.execute(stmt)
con.commit()
except lite.OperationalError:
time.sleep(0.05)
try:
cur.execute(stmt)
con.commit()
except lite.OperationalError:
con.commit()
con.close()
if sorter != []:
attmt = 0
"""
a_lock = _thread.allocate_lock()
with a_lock:
for item in sorter:
_thread.start_new_thread(goldsaxanalytics.fetch,(item,dbase,a_lock))
"""
if tempf != [] and sorter == [] and attmt == 4:
gc.collect()
return null
if tempf != [] and sorter == [] and attmt == 3:
time.sleep(60)
gc.collect()
attmt = 4
if tempf != [] and sorter == [] and attmt == 2:
time.sleep(30)
gc.collect()
attmt = 3
if tempf != [] and sorter == [] and attmt == 1:
time.sleep(10)
gc.collect()
attmt = 2
if tempf != [] and sorter == []:
time.sleep(5)
attmt = 1
gc.collect()
time.sleep(0.0001)
gc.collect()
cycle = cycle + 1
return actionking(lck,f, stocklist,dbase, attmt,actionlist,cycle,timeatpresent,timetotake)
def pullprocess(ass):
sds = googlefinancequote.getquote(ass)
return sds
/*email to provide support at [email protected], [email protected], For donations please write to [email protected]*/ | mit | -2,020,211,338,537,667,600 | 38.87 | 530 | 0.466633 | false |
garbear/EventGhost | plugins/Barco/__init__.py | 1 | 19632 | from __future__ import with_statement
import eg
eg.RegisterPlugin(
name = "Barco CRT Projector",
description = "Controls Barco CRT projectors via RS232.",
kind = "external",
author = "Bitmonster",
version = "1.0.0",
canMultiLoad = True,
createMacrosOnAdd = True,
icon = (
"iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAIAAACQkWg2AAAABnRSTlMA/wD/AP83WBt9"
"AAAACXBIWXMAAAsTAAALEwEAmpwYAAABTElEQVR4nJWRO0sDQRSFz53Z7GNWo0VMYgiI"
"D9BERJSAP8PCyl9g5T8QC/HniI3Y2qnYZyEoeYAYo8YgaFbzmBmLXdYUksepprjfnHPP"
"Ja01JpEBwD85lV5p5ChfXxPHRwxAv+jp9idfXZS1KlteUI0nnlvSsgtS5Fo042LK7l3f"
"yqIXOhCIFzZJOM7hgayWrf09isf56oosVaClUdgCZ2w2Lh+fQ0BDq9YHs0xZrvZu7ozt"
"DVV/o2mXzSVUsyUrNUB1zi5iO4UQANA9v4yySu9hyCYsiDROPzoCNMZqliJgIk0cyQBw"
"lcvXFQEgoiGHz+TzuwFQTqXvv/yRDt/JVOgwmMh1hSsEAFeI1+a7ktI0Y1ojcjb+0gEA"
"lFSZZAKMc86FbfWlchw7ZpqNxkvwrwEgm0kPNtv+6QQPIgLQ9n34vm1b2fk0gGFb/qtf"
"bUt6K1gxHQUAAAAASUVORK5CYII="
),
)
ACTIONS = (
("Enter", "Enter", None, (0x07, )),
("Exit", "Exit", None, (0x08, )),
("Up", "Cursor Up", None, (0x21, )),
("Down", "Cursor Down", None, (0x22, )),
("Left", "Cursor Left", None, (0x24, )),
("Right", "Cursor Right", None, (0x23, )),
("AdjustToggle", "Adjust Toggle", None, (0x09, )),
("AdjustOn", "Adjust On", None, (0x51, 0x09, 0x00, 0x00, 0x01)),
("AdjustOff", "Adjust Off", None, (0x51, 0x09, 0x00, 0x00, 0x00)),
("TextToggle", "Text Toggle", None, (0x0d, )),
("TextOn", "Text On", None, (0x51, 0x06, 0x00, 0x00, 0x01)),
("TextOff", "Text Off", None, (0x51, 0x06, 0x00, 0x00, 0x00)),
("PauseToggle", "Pause Toggle", None, (0x0f, )),
("PauseOff", "Pause Off", None, (0x51, 0x01, 0x00, 0x00, 0x00)),
("PauseOn", "Pause On", None, (0x51, 0x01, 0x00, 0x00, 0x01)),
("PowerToggle", "Power Toggle", None, (0x0e, )),
("PowerOff", "Power Off (Standby)", None, (0x51, 0x0a, 0x00, 0x00, 0x00)),
("PowerOn", "Power On", None, (0x51, 0x0a, 0x00, 0x00, 0x01)),
("Numpad0", "Numpad 0", None, (0x10, )),
("Numpad1", "Numpad 1", None, (0x11, )),
("Numpad2", "Numpad 2", None, (0x12, )),
("Numpad3", "Numpad 3", None, (0x13, )),
("Numpad4", "Numpad 4", None, (0x14, )),
("Numpad5", "Numpad 5", None, (0x15, )),
("Numpad6", "Numpad 6", None, (0x16, )),
("Numpad7", "Numpad 7", None, (0x17, )),
("Numpad8", "Numpad 8", None, (0x18, )),
("Numpad9", "Numpad 9", None, (0x19, )),
("ContrastUp", "Contrast Up", None, (0x28, )),
("ContrastDown", "Contrast Down", None, (0x29, )),
("BrightnessUp", "Brightness Up", None, (0x2a, )),
("BrightnessDown", "Brightness Down", None, (0x2b, )),
("SaturationUp", "Colour Saturation Up", None, (0x2c, )),
("SaturationDown", "Colour Saturation Down", None, (0x2d, )),
("TintUp", "Colour Tint Up", None, (0x22, )),
("TintDown", "Colour Tint Down", None, (0x2f, )),
("SharpnessUp", "Sharpness Up", None, (0x36, )),
("SharpnessDown", "Sharpness Down", None, (0x37, )),
)
import wx
from time import sleep, clock
STX = 0x02
ACK = chr(0x06)
NAK = chr(0x15)
BAUDRATES = [110, 150, 300, 600, 1200, 2400, 4800, 9600]
ALL_BYTE_VALUES = frozenset(range(256))
class ActionBase(eg.ActionBase):
def __call__(self):
with self.plugin.serialThread as serial:
self.SendCommand(serial, *self.value)
def SendCommand(self, serial, cmd, dat1=0, dat2=0, dat3=0, dat4=0, block=None):
data = [self.plugin.address, cmd, dat1, dat2, dat3, dat4]
checksum = sum(data) % 256
data.append(checksum)
if block is not None:
data2 = [ord(x) for x in block]
checksum2 = sum(data2) % 256
data += data2
data.append(checksum2)
offset = 0
if STX in data:
offset = 1
while offset in data:
offset += 1
offset = (STX - offset) % 256
data = [STX, offset] + [(x + offset) % 256 for x in data]
s = "".join([chr(x) for x in data])
if eg.debugLevel:
print " ".join(["%02X" % x for x in data])
serial.Write(s)
res = serial.Read(1, 0.5)
if res != ACK:
raise self.Exceptions.DeviceNotFound("Got no ACK!")
def GetResponse(self, serial, cmde):
answer = serial.Read(7, 1.0)
if len(answer) < 7:
raise self.Exceptions.DeviceNotFound("Not enough bytes received!")
if eg.debugLevel:
print " ".join(["%02X" % ord(x) for x in answer])
answer = [ord(c) for c in answer]
adr, cmd, dat1, dat2, dat3, dat4, chks = answer
if adr != self.plugin.address:
raise self.Exceptions.DeviceNotFound("Wrong address received!")
if cmd != cmde:
raise self.Exceptions.DeviceNotFound("Wrong command received!")
if chks != sum(answer[:6]) % 256:
raise self.Exceptions.DeviceNotFound("Wrong checksum received!")
return dat1, dat2, dat3, dat4
class SendCustom(ActionBase):
def __call__(self, cmd, dat1, dat2, dat3, dat4):
with self.plugin.serialThread as serial:
self.SendCommand(serial, cmd, dat1, dat2, dat3, dat4)
def GetLabel(self, *args):
return " ".join("%02X" % arg for arg in args)
def Configure(self, cmd=0, dat1=0, dat2=0, dat3=0, dat4=0):
panel = eg.ConfigPanel()
values = cmd, dat1, dat2, dat3, dat4
ctrls = [
panel.SpinIntCtrl(values[i], min=0, max=255)
for i in range(5)
]
hexCtrl = panel.StaticText("")
def UpdateValue(event):
res = ["%02X" % ctrl.GetValue() for ctrl in ctrls]
hexCtrl.SetLabel(" ".join(res))
event.Skip()
UpdateValue(wx.CommandEvent())
for ctrl in ctrls:
ctrl.Bind(wx.EVT_TEXT, UpdateValue)
panel.AddLine("CMD:", ctrls[0])
panel.AddLine(
"DAT1:", ctrls[1], None,
"DAT2:", ctrls[2], None,
"DAT3:", ctrls[3], None,
"DAT4:", ctrls[4]
)
panel.AddLine()
panel.AddLine("Hex string:", hexCtrl)
while panel.Affirmed():
panel.SetResult(*(ctrl.GetValue() for ctrl in ctrls))
class SetText(ActionBase, eg.ActionWithStringParameter):
def __call__(self, s):
s = s + (chr(0) * (208 - len(s)))
with self.plugin.serialThread as serial:
self.SendCommand(serial, 0x70, 0x01, 0x01, 0x01, 0x0c, s)
class ReadTime(ActionBase):
def __call__(self):
with self.plugin.serialThread as serial:
self.SendCommand(serial, 0x60)
dat1, dat2, dat3, dat4 = self.GetResponse(serial, 0x60)
result = dat1 * 256 + dat2
print "Hours:", result
return result
class ReadStatus(ActionBase):
def __call__(self):
with self.plugin.serialThread as serial:
self.SendCommand(serial, 0x4b)
dat1, dat2, dat3, dat4 = self.GetResponse(serial, 0x4b)
print "Fast/Slow:", bool(dat1 & (1 << 7))
print "Green Convergence:", bool(dat1 & (1 << 6))
print "Cursor Position:", dat2
print "PC mode:", bool(dat3 & (1 << 3))
print "Text mode:", bool(dat3 & (1 << 2))
print "Pause:", bool(dat3 & (1 << 1))
print "Standby:", bool(dat3 & (1 << 0))
extraInfo = bool(dat3 & (1 << 7))
if extraInfo:
print "Magnetic focus:", bool(dat3 & (1 << 6))
print "Convergence is stripped:", bool(dat3 & (1 << 7))
print "Orbiting installed:", bool(dat4 & (1 << 0))
print "Soft edge installed:", bool(dat4 & (1 << 1))
print "Contrast modulation installed:", bool(dat4 & (1 << 2))
print "NS is mounted on the convergence:", bool(dat4 & (1 << 3))
print "Controller with ASIC:", bool(dat4 & (1 << 4))
print "IRIS is installed:", bool(dat4 & (1 << 5))
print "Dynamic stigmators:", bool(dat4 & (1 << 6))
class ReadVersion(ActionBase):
def __call__(self):
with self.plugin.serialThread as serial:
self.SendCommand(serial, 0x4c)
dat1, dat2, dat3, dat4 = self.GetResponse(serial, 0x4c)
print "Identifier:", chr(dat1)
print "Version: %d%d.%d%d" % (dat2 / 16, dat2 & 0x0f, dat3 / 16, dat3 & 0x0f)
print "Model:", dat4
class ReadSerialNumber(ActionBase):
def __call__(self):
with self.plugin.serialThread as serial:
self.SendCommand(serial, 0x4d)
dat1, dat2, dat3, dat4 = self.GetResponse(serial, 0x4d)
digits = (
chr(48 + (dat1 & 0x0f)),
chr(48 + (dat2 / 16)),
chr(48 + (dat2 & 0x0f)),
chr(48 + (dat3 / 16)),
chr(48 + (dat3 & 0x0f)),
chr(48 + (dat4 / 16)),
chr(48 + (dat4 & 0x0f))
)
s = "".join(digits)
print "Serial Number:", s
return
class GetInfo(ActionBase):
def __call__(self):
with self.plugin.serialThread as serial:
self.SendCommand(serial, 0x4b)
dat1, dat2, dat3, dat4 = self.GetResponse(serial, 0x4b)
print "Fast/Slow:", bool(dat1 & (1 << 7))
print "Green Convergence:", bool(dat1 & (1 << 6))
print "Cursor Position:", dat2
print "PC mode:", bool(dat3 & (1 << 3))
print "Text mode:", bool(dat3 & (1 << 2))
print "Pause:", bool(dat3 & (1 << 1))
print "Standby:", bool(dat3 & (1 << 0))
extraInfo = bool(dat3 & (1 << 7))
if extraInfo:
print "Magnetic focus:", bool(dat3 & (1 << 6))
print "Convergence is stripped:", bool(dat3 & (1 << 7))
print "Orbiting installed:", bool(dat4 & (1 << 0))
print "Soft edge installed:", bool(dat4 & (1 << 1))
print "Contrast modulation installed:", bool(dat4 & (1 << 2))
print "NS is mounted on the convergence:", bool(dat4 & (1 << 3))
print "Controller with ASIC:", bool(dat4 & (1 << 4))
print "IRIS is installed:", bool(dat4 & (1 << 5))
print "Dynamic stigmators:", bool(dat4 & (1 << 6))
self.SendCommand(serial, 0x4d)
dat1, dat2, dat3, dat4 = self.GetResponse(serial, 0x4d)
digits = (
chr(48 + (dat1 & 0x0f)),
chr(48 + (dat2 / 16)),
chr(48 + (dat2 & 0x0f)),
chr(48 + (dat3 / 16)),
chr(48 + (dat3 & 0x0f)),
chr(48 + (dat4 / 16)),
chr(48 + (dat4 & 0x0f))
)
s = "".join(digits)
print "Serial Number:", s
self.SendCommand(serial, 0x60)
dat1, dat2, dat3, dat4 = self.GetResponse(serial, 0x60)
result = dat1 * 256 + dat2
print "Hours:", result
self.SendCommand(serial, 0x4c)
dat1, dat2, dat3, dat4 = self.GetResponse(serial, 0x4c)
print "Identifier:", chr(dat1)
print "Version: %d%d.%d%d" % (dat2 / 16, dat2 & 0x0f, dat3 / 16, dat3 & 0x0f)
print "Model:", dat4
self.SendCommand(serial, 0x4a)
dat1, dat2, dat3, dat4 = self.GetResponse(serial, 0x4a)
print "Horizontal period: %dns" % (dat1 * 250)
print "Vertical frequency: %d Hz" % dat4
if dat2 & 0x80:
print "Source block number: %d not closed" % (dat2 & 0x7f)
else:
print "Source block number: %d" % (dat2 & 0x7f)
SOURCES = {
0: "Video",
1: "SVHS",
2: "TTL",
3: "RGsB",
4: "RGBS",
5: "Internal",
8: "Forced video",
9: "Forced SVHS",
0xA: "Forced TTL",
0xB: "Forced RGsB",
0xC: "Forced RGBS",
}
print "Source: " + SOURCES.get(dat3 / 16, "Unknown")
INSTALLATIONS = ["Rear/ Ceiling", "Front / Table", "Front/ Ceiling", "Rear / Table"]
print "Installation: " + INSTALLATIONS[dat3 & 0x03]
print "HDTV:", dat3 & 0x04
class RequestShape(ActionBase):
def __call__(self, shape=0, x=0, y=0, colours=0x07):
with self.plugin.serialThread as serial:
self.SendCommand(serial, 0x78, shape, y * 16 + x, colours)
def Configure(self, shape=0, x=0, y=0, colours=0x07):
choices = [
("Internal convergence pattern", 0x00),
("Horizontal line in center of zones", 0x01),
("Vertical line in center of zones", 0x02),
("Crosshatch in zone XY", 0x05),
("Convergence contour around zone XY", 0x06),
("Erase shape, switch colour", 0x07),
("Vertical bars, switch colour", 0x08),
("Horizontal bars, switch colour", 0x09),
]
panel = eg.ConfigPanel()
shapeCtrl = panel.SpinIntCtrl(shape, max=255)
xCtrl = panel.SpinIntCtrl(x, max=9)
yCtrl = panel.SpinIntCtrl(y, max=9)
redCtrl = panel.CheckBox(colours & 0x01, "Red")
greenCtrl = panel.CheckBox(colours & 0x02, "Green")
blueCtrl = panel.CheckBox(colours & 0x04, "Blue")
panel.AddLine("Shape:", shapeCtrl)
panel.AddLine("X coordinate:", xCtrl)
panel.AddLine("Y coordinate:", yCtrl)
panel.AddLine("Colours:", redCtrl)
panel.AddLine(None, greenCtrl)
panel.AddLine(None, blueCtrl)
while panel.Affirmed():
colours = int(redCtrl.GetValue()) * 0x01
colours |= int(greenCtrl.GetValue()) * 0x02
colours |= int(blueCtrl.GetValue()) * 0x04
panel.SetResult(
shapeCtrl.GetValue(),
xCtrl.GetValue(),
yCtrl.GetValue(),
colours,
)
class LockIr(ActionBase):
name = "Lock IR"
description = (
"Programs the projector to filter out certain infrared commands."
)
def __call__(self, flags=0x7f):
with self.plugin.serialThread as serial:
self.SendCommand(serial, 0x50, flags)
def Configure(self, flags=0x7f):
panel = eg.ConfigPanel()
choices = [
"Stand by",
"Pause",
"Text",
"Adjust keys (Adj, Enter, Exit, cursors)",
"Numeric keys",
"Picture control keys",
"Sound control keys",
]
panel.AddLine("Allowed IR-commands:")
ctrls = []
for i, choice in enumerate(choices):
ctrl = panel.CheckBox(flags & (1 << i), choice)
ctrls.append(ctrl)
panel.AddLine(None, ctrl)
while panel.Affirmed():
flags = 0
for i, ctrl in enumerate(ctrls):
flags |= (1 << i) * int(ctrl.GetValue())
panel.SetResult(flags)
class ReadPotentiometer(ActionBase):
def __call__(self, kind, x=0, y=0):
with self.plugin.serialThread as serial:
self.SendCommand(serial, 0x7a, kind, y * 16 + x)
dat1, dat2, dat3, dat4 = self.GetResponse(serial, 0x7a)
print "Value:", dat1, dat2, dat3, dat4
return dat3
def Configure(self, kind=0, x=0, y=0):
panel = eg.ConfigPanel()
kindCtrl = panel.SpinIntCtrl(kind, max=255)
xCtrl = panel.SpinIntCtrl(x, max=9)
yCtrl = panel.SpinIntCtrl(y, max=9)
panel.AddLine("Potentiometer:", kindCtrl)
panel.AddLine("X coordinate:", xCtrl)
panel.AddLine("Y coordinate:", yCtrl)
while panel.Affirmed():
panel.SetResult(
kindCtrl.GetValue(),
xCtrl.GetValue(),
yCtrl.GetValue(),
)
class WritePotentiometer(ActionBase):
def __call__(self, kind, x=0, y=0, value=128, flags=3):
print kind, x, y, value, flags
with self.plugin.serialThread as serial:
self.SendCommand(serial, 0x79, kind, y * 16 + x, value, flags)
def Configure(self, kind=0, x=0, y=0, value=128, flags=3):
panel = eg.ConfigPanel()
kindCtrl = panel.SpinIntCtrl(kind, max=255)
xCtrl = panel.SpinIntCtrl(x, max=9)
yCtrl = panel.SpinIntCtrl(y, max=9)
valueCtrl = panel.SpinIntCtrl(value, max=255)
deltaCb = panel.CheckBox(not (flags & 1), "Value is delta")
storeCb = panel.CheckBox(not (flags & 2), "Store in EEPROM")
panel.AddLine("Potentiometer:", kindCtrl)
panel.AddLine("X coordinate:", xCtrl)
panel.AddLine("Y coordinate:", yCtrl)
panel.AddLine("Value/Delta:", valueCtrl)
panel.AddLine(deltaCb)
panel.AddLine(storeCb)
while panel.Affirmed():
panel.SetResult(
kindCtrl.GetValue(),
xCtrl.GetValue(),
yCtrl.GetValue(),
valueCtrl.GetValue(),
int(not deltaCb.GetValue()) + int(not storeCb.GetValue()) * 2
)
class Barco(eg.PluginBase):
def __init__(self):
self.AddActionsFromList(ACTIONS, ActionBase)
group = self.AddGroup("Unfinished")
group.AddAction(SetText)
group.AddAction(RequestShape)
group.AddAction(LockIr)
group.AddAction(ReadSerialNumber)
group.AddAction(ReadVersion)
group.AddAction(ReadStatus)
group.AddAction(ReadTime)
group.AddAction(GetInfo)
group.AddAction(ReadPotentiometer)
group.AddAction(WritePotentiometer)
group.AddAction(SendCustom)
@eg.LogIt
def __start__(self, port=0, address=0, baudrate=9600):
self.port = port
self.address = address
self.serialThread = eg.SerialThread()
self.serialThread.SetReadEventCallback(self.OnReceive)
self.serialThread.Open(port, baudrate)
self.serialThread.SetRts()
self.serialThread.Start()
def __stop__(self):
self.serialThread.Close()
def OnReceive(self, serial):
data = serial.Read(512)
print "Barco: " + " ".join(["%02X" % ord(c) for c in data])
def Configure(self, port=0, address=0, baudrate=9600):
panel = eg.ConfigPanel()
portCtrl = panel.SerialPortChoice(port)
choices = [str(baudrate) for baudrate in BAUDRATES]
baudrateCtrl = panel.Choice(BAUDRATES.index(baudrate), choices=choices)
addrCtrl = panel.SpinIntCtrl(address, min=0, max=255)
panel.AddLine("Serial port:", portCtrl)
panel.AddLine("Baudrate:", baudrateCtrl)
panel.AddLine("Projector address:", addrCtrl)
while panel.Affirmed():
panel.SetResult(
portCtrl.GetValue(),
addrCtrl.GetValue(),
BAUDRATES[baudrateCtrl.GetValue()],
)
| gpl-2.0 | -198,317,870,930,125,540 | 35.537285 | 96 | 0.532141 | false |
w4nderlust/lingua-politicae | scraping/scrape_facebook.py | 1 | 1718 | import os
import facebook
import json
import requests
# get temporary access token here: https://developers.facebook.com/tools/explorer/
from globals import POLITICIANS_INFO_FILE_PATH, FACEBOOK_POSTS_DIRECTORY
access_token = 'EAACEdEose0cBAAKiNQ3ZB3kpnGu7GqkWq4mUHQBb4BuKmae6FHH3jSTIqZBeuqU7hhVv3WiAdxWMLbwx1h9ptmzRWMwufknjSkG2ORXPo8WNuI6IeUGFcrZBciUWE4tD7rXKYGlVdLZB4ZCfQ4hmQdUag39FpdWkxxe9i3gBKcMxwq5kwOvv2CcZAFjr28ls4ZD'
graph = facebook.GraphAPI(access_token)
try:
with open(POLITICIANS_INFO_FILE_PATH) as data_file:
users = json.load(data_file)
except ValueError:
users = []
print("A problem occurred when parsing politicians_info.json")
def get_posts(user):
profile = graph.get_object(user['facebook'])
output_file = os.path.join(FACEBOOK_POSTS_DIRECTORY, user["facebook"] + '_facebook.json')
results = []
print("getting {0}".format(user['facebook']))
posts = graph.get_connections(profile['id'], 'feed?limit=100')
pages = 0
while pages < 10:
try:
# Perform some action on each post in the collection we receive from
# Facebook.
for post in posts['data']:
results.append(post)
# Attempt to make a request to the next page of data, if it exists.
posts = requests.get(posts['paging']['next']).json()
pages += 1
except KeyError:
# When there are no more pages (['paging']['next']), break from the
# loop and end the script.
break
print("posts {0}".format(len(results)))
with open(output_file, 'w') as outfile:
json.dump(results, outfile)
for user in users:
get_posts(user)
print('all done')
| apache-2.0 | -5,462,487,004,753,837,000 | 29.678571 | 213 | 0.672293 | false |
tulip-control/tulip-control | contrib/fmu/robotfmu.py | 1 | 3360 | #!/usr/bin/env python
"""Demonstration of FMU export from a controller synthesized using TuLiP
This is a modified copy from the TuLiP sources of
examples/robot_planning/continuous.py that exports an FMU. The key
changes are
* caching of the abstraction and controller in AbstractPwa.p and FSM.p, and
* calling exportFMU() and `make test_controller` (near end of this file).
"""
import os.path
import os
import pickle
import sys
import numpy as np
from tulip import spec, synth, hybrid
from polytope import box2poly
from tulip.abstract import prop2part, discretize
from exportFMU import exportFMU
BUILDDIR = "build/"
def specify_discretize_synthesize():
"""Return PWA partition and controller, dump them to pickle files."""
# Problem parameters
input_bound = 1.0
uncertainty = 0.01
# Continuous state space
cont_state_space = box2poly([[0., 3.], [0., 2.]])
# Continuous dynamics
A = np.array([[1.0, 0.], [0., 1.0]])
B = np.array([[0.1, 0.], [0., 0.1]])
E = np.array([[1., 0.], [0., 1.]])
# Available control, possible disturbances
U = input_bound * np.array([[-1., 1.], [-1., 1.]])
W = uncertainty * np.array([[-1., 1.], [-1., 1.]])
# Convert to polyhedral representation
U = box2poly(U)
W = box2poly(W)
# Construct the LTI system describing the dynamics
sys_dyn = hybrid.LtiSysDyn(A, B, E, None, U, W, cont_state_space)
# Define atomic propositions for relevant regions of state space
cont_props = {}
cont_props['home'] = box2poly([[0., 1.], [0., 1.]])
cont_props['lot'] = box2poly([[2., 3.], [1., 2.]])
# Compute proposition preserving partition of the continuous state space
cont_partition = prop2part(cont_state_space, cont_props)
pwa = discretize(
cont_partition, sys_dyn, closed_loop=True,
N=8, min_cell_volume=0.1, plotit=False)
"""Specifications"""
# Environment variables and assumptions
env_vars = {'park'}
env_init = set()
env_prog = '!park'
env_safe = set()
# System variables and requirements
sys_vars = {'X0reach'}
sys_init = {'X0reach'}
sys_prog = {'home'} # []<>home
sys_safe = {'(X(X0reach) <-> lot) || (X0reach && !park)'}
sys_prog |= {'X0reach'}
# Create the specification
specs = spec.GRSpec(env_vars, sys_vars, env_init, sys_init,
env_safe, sys_safe, env_prog, sys_prog)
specs.qinit = r'\A \E'
specs.moore = False
specs.plus_one = False
"""Synthesize"""
ctrl = synth.synthesize(
specs, sys=pwa.ts, ignore_sys_init=True, solver='gr1c')
# store the result for future use
if len(BUILDDIR) > 0 and not os.path.exists(BUILDDIR):
os.mkdir(BUILDDIR)
pickle.dump(ctrl, open(BUILDDIR + 'FSM.p', 'wb'))
pickle.dump(pwa, open(BUILDDIR + 'AbstractPwa.p', 'wb'))
return pwa, ctrl
def pickle_files_exist():
return (
os.path.isfile(BUILDDIR + 'AbstractPwa.p') and
os.path.isfile(BUILDDIR + 'FSM.p'))
if __name__ == '__main__':
if pickle_files_exist():
pwa = pickle.load(open(BUILDDIR + "AbstractPwa.p", "rb"))
ctrl = pickle.load(open(BUILDDIR + "FSM.p", "rb"))
else:
pwa, ctrl = specify_discretize_synthesize()
x0 = np.array([1.5, 1.5])
d0 = 18
exportFMU(ctrl, pwa, x0, d0)
os.system("make test_controller")
| bsd-3-clause | 7,800,883,244,847,692,000 | 29 | 76 | 0.622917 | false |
akx/shoop | shoop_tests/core/test_product_packages.py | 1 | 2238 | # -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import pytest
import six
from django.test import RequestFactory
from shoop.core.models import (
AnonymousContact, OrderLineType, ProductMode, Shop
)
from shoop.core.order_creator import OrderCreator, SourceLine
from shoop.testing.factories import (
create_product, get_default_shop, get_default_supplier,
get_initial_order_status
)
from shoop.testing.utils import apply_request_middleware
from shoop_tests.utils.basketish_order_source import BasketishOrderSource
@pytest.mark.django_db
def test_package():
shop = get_default_shop()
supplier = get_default_supplier()
package_product = create_product("PackageParent", shop=shop, supplier=supplier)
assert not package_product.get_package_child_to_quantity_map()
children = [create_product("PackageChild-%d" % x, shop=shop, supplier=supplier) for x in range(4)]
package_def = {child: 1 + i for (i, child) in enumerate(children)}
package_product.make_package(package_def)
assert package_product.is_package_parent()
package_product.save()
sp = package_product.get_shop_instance(shop)
assert not list(sp.get_orderability_errors(supplier=supplier, quantity=1, customer=AnonymousContact()))
with pytest.raises(ValueError): # Test re-packaging fails
package_product.make_package(package_def)
# Check that OrderCreator can deal with packages
source = BasketishOrderSource(get_default_shop())
source.add_line(
type=OrderLineType.PRODUCT,
product=package_product,
supplier=get_default_supplier(),
quantity=10,
base_unit_price=source.create_price(10),
)
source.status = get_initial_order_status()
request = apply_request_middleware(RequestFactory().get("/"))
creator = OrderCreator(request)
order = creator.create_order(source)
pids_to_quantities = order.get_product_ids_and_quantities()
for child, quantity in six.iteritems(package_def):
assert pids_to_quantities[child.pk] == 10 * quantity
| agpl-3.0 | 2,530,028,607,514,225,000 | 36.3 | 107 | 0.725648 | false |
google-research/kubric | kubric/__init__.py | 1 | 3004 | # Copyright 2021 The Kubric Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Root of the kubric module."""
from kubric.core.scene import Scene
from kubric.core.assets import Asset
from kubric.core.assets import UndefinedAsset
from kubric.core.cameras import Camera
from kubric.core.cameras import UndefinedCamera
from kubric.core.cameras import PerspectiveCamera
from kubric.core.cameras import OrthographicCamera
from kubric.core.color import Color
from kubric.core.color import get_color
from kubric.core.lights import Light
from kubric.core.lights import UndefinedLight
from kubric.core.lights import DirectionalLight
from kubric.core.lights import PointLight
from kubric.core.lights import RectAreaLight
from kubric.core.materials import Material
from kubric.core.materials import UndefinedMaterial
from kubric.core.materials import PrincipledBSDFMaterial
from kubric.core.materials import FlatMaterial
from kubric.core.objects import Object3D
from kubric.core.objects import PhysicalObject
from kubric.core.objects import Sphere
from kubric.core.objects import Cube
from kubric.core.objects import FileBasedObject
from kubric.core.traits import Vector3D
from kubric.core.traits import Scale
from kubric.core.traits import Quaternion
from kubric.core.traits import RGB
from kubric.core.traits import RGBA
from kubric.core.traits import AssetInstance
from kubric.custom_types import AddAssetFunction
from kubric.custom_types import PathLike
from kubric import assets
from kubric.assets import AssetSource
from kubric.assets import TextureSource
from kubric.randomness import random_hue_color
from kubric.randomness import random_rotation
from kubric.randomness import rotation_sampler
from kubric.randomness import position_sampler
from kubric.randomness import resample_while
from kubric.randomness import move_until_no_overlap
from kubric.utils import ArgumentParser
from kubric.utils import setup_logging
from kubric.utils import log_my_flags
from kubric.utils import setup_directories
from kubric.utils import get_scene_metadata
from kubric.utils import get_instance_info
from kubric.utils import get_camera_info
from kubric.utils import process_collisions
from kubric.utils import save_as_pkl
from kubric.utils import save_as_json
from kubric.utils import done
from kubric.utils import str2path
from kubric.version import __version__
# TODO: remove and add a test that checks pathlib should NOT be imported?
from tensorflow_datasets.core.utils.generic_path import as_path
| apache-2.0 | 4,263,002,817,207,333,000 | 34.761905 | 74 | 0.828229 | false |
pyfa-org/Pyfa | service/port/eft.py | 1 | 34060 | # =============================================================================
# Copyright (C) 2014 Ryan Holmes
#
# This file is part of pyfa.
#
# pyfa is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# pyfa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with pyfa. If not, see <http://www.gnu.org/licenses/>.
# =============================================================================
import re
from logbook import Logger
from eos.const import FittingModuleState, FittingSlot
from eos.db.gamedata.queries import getDynamicItem
from eos.saveddata.booster import Booster
from eos.saveddata.cargo import Cargo
from eos.saveddata.citadel import Citadel
from eos.saveddata.drone import Drone
from eos.saveddata.fighter import Fighter
from eos.saveddata.fit import Fit
from eos.saveddata.implant import Implant
from eos.saveddata.module import Module
from eos.saveddata.ship import Ship
from gui.fitCommands.helpers import activeStateLimit
from service.const import PortEftOptions
from service.fit import Fit as svcFit
from service.market import Market
from service.port.muta import parseMutant, renderMutant
from service.port.shared import IPortUser, fetchItem, processing_notify
pyfalog = Logger(__name__)
MODULE_CATS = ('Module', 'Subsystem', 'Structure Module')
SLOT_ORDER = (FittingSlot.LOW, FittingSlot.MED, FittingSlot.HIGH, FittingSlot.RIG, FittingSlot.SUBSYSTEM, FittingSlot.SERVICE)
OFFLINE_SUFFIX = '/OFFLINE'
NAME_CHARS = '[^,/\[\]]' # Characters which are allowed to be used in name
def exportEft(fit, options, callback):
# EFT formatted export is split in several sections, each section is
# separated from another using 2 blank lines. Sections might have several
# sub-sections, which are separated by 1 blank line
sections = []
header = '[{}, {}]'.format(fit.ship.item.typeName, fit.name)
# Section 1: modules, rigs, subsystems, services
modsBySlotType = {}
for module in fit.modules:
modsBySlotType.setdefault(module.slot, []).append(module)
modSection = []
mutants = {} # Format: {reference number: module}
mutantReference = 1
for slotType in SLOT_ORDER:
rackLines = []
modules = modsBySlotType.get(slotType, ())
for module in modules:
if module.item:
# if module was mutated, use base item name for export
if module.isMutated:
modName = module.baseItem.typeName
else:
modName = module.item.typeName
if module.isMutated and options[PortEftOptions.MUTATIONS]:
mutants[mutantReference] = module
mutationSuffix = ' [{}]'.format(mutantReference)
mutantReference += 1
else:
mutationSuffix = ''
modOfflineSuffix = ' {}'.format(OFFLINE_SUFFIX) if module.state == FittingModuleState.OFFLINE else ''
if module.charge and options[PortEftOptions.LOADED_CHARGES]:
rackLines.append('{}, {}{}{}'.format(
modName, module.charge.typeName, modOfflineSuffix, mutationSuffix))
else:
rackLines.append('{}{}{}'.format(modName, modOfflineSuffix, mutationSuffix))
else:
rackLines.append('[Empty {} slot]'.format(
FittingSlot(slotType).name.capitalize() if slotType is not None else ''))
if rackLines:
modSection.append('\n'.join(rackLines))
if modSection:
sections.append('\n\n'.join(modSection))
# Section 2: drones, fighters
minionSection = []
droneExport = exportDrones(fit.drones)
if droneExport:
minionSection.append(droneExport)
fighterExport = exportFighters(fit.fighters)
if fighterExport:
minionSection.append(fighterExport)
if minionSection:
sections.append('\n\n'.join(minionSection))
# Section 3: implants, boosters
charSection = []
if options[PortEftOptions.IMPLANTS]:
implantExport = exportImplants(fit.implants)
if implantExport:
charSection.append(implantExport)
if options[PortEftOptions.BOOSTERS]:
boosterExport = exportBoosters(fit.boosters)
if boosterExport:
charSection.append(boosterExport)
if charSection:
sections.append('\n\n'.join(charSection))
# Section 4: cargo
if options[PortEftOptions.CARGO]:
cargoExport = exportCargo(fit.cargo)
if cargoExport:
sections.append(cargoExport)
# Section 5: mutated modules' details
mutationLines = []
if mutants and options[PortEftOptions.MUTATIONS]:
for mutantReference in sorted(mutants):
mutant = mutants[mutantReference]
mutationLines.append(renderMutant(mutant, firstPrefix='[{}] '.format(mutantReference), prefix=' '))
if mutationLines:
sections.append('\n'.join(mutationLines))
text = '{}\n\n{}'.format(header, '\n\n\n'.join(sections))
if callback:
callback(text)
else:
return text
def exportDrones(drones):
droneLines = []
for drone in sorted(drones, key=lambda d: d.item.typeName):
droneLines.append('{} x{}'.format(drone.item.typeName, drone.amount))
return '\n'.join(droneLines)
def exportFighters(fighters):
fighterLines = []
for fighter in sorted(fighters, key=lambda f: f.item.typeName):
fighterLines.append('{} x{}'.format(fighter.item.typeName, fighter.amount))
return '\n'.join(fighterLines)
def exportImplants(implants):
implantLines = []
for implant in sorted(implants, key=lambda i: i.slot or 0):
implantLines.append(implant.item.typeName)
return '\n'.join(implantLines)
def exportBoosters(boosters):
boosterLines = []
for booster in sorted(boosters, key=lambda b: b.slot or 0):
boosterLines.append(booster.item.typeName)
return '\n'.join(boosterLines)
def exportCargo(cargos):
cargoLines = []
for cargo in sorted(cargos, key=lambda c: (c.item.group.category.name, c.item.group.name, c.item.typeName)):
cargoLines.append('{} x{}'.format(cargo.item.typeName, cargo.amount))
return '\n'.join(cargoLines)
def importEft(lines):
lines = _importPrepare(lines)
try:
fit = _importCreateFit(lines)
except EftImportError:
return
aFit = AbstractFit()
aFit.mutations = _importGetMutationData(lines)
stubPattern = '^\[.+?\]$'
modulePattern = '^(?P<typeName>{0}+?)(,\s*(?P<chargeName>{0}+?))?(?P<offline>\s*{1})?(\s*\[(?P<mutation>\d+?)\])?$'.format(NAME_CHARS, OFFLINE_SUFFIX)
droneCargoPattern = '^(?P<typeName>{}+?) x(?P<amount>\d+?)$'.format(NAME_CHARS)
sections = []
for section in _importSectionIter(lines):
for line in section.lines:
# Stub line
if re.match(stubPattern, line):
section.itemSpecs.append(None)
continue
# Items with quantity specifier
m = re.match(droneCargoPattern, line)
if m:
try:
itemSpec = MultiItemSpec(m.group('typeName'))
# Items which cannot be fetched are considered as stubs
except EftImportError:
section.itemSpecs.append(None)
else:
itemSpec.amount = int(m.group('amount'))
section.itemSpecs.append(itemSpec)
continue
# All other items
m = re.match(modulePattern, line)
if m:
try:
itemSpec = RegularItemSpec(m.group('typeName'), chargeName=m.group('chargeName'))
# Items which cannot be fetched are considered as stubs
except EftImportError:
section.itemSpecs.append(None)
else:
if m.group('offline'):
itemSpec.offline = True
if m.group('mutation'):
itemSpec.mutationIdx = int(m.group('mutation'))
section.itemSpecs.append(itemSpec)
continue
_clearTail(section.itemSpecs)
sections.append(section)
hasDroneBay = any(s.isDroneBay for s in sections)
hasFighterBay = any(s.isFighterBay for s in sections)
for section in sections:
if section.isModuleRack:
aFit.addModules(section.itemSpecs)
elif section.isImplantRack:
for itemSpec in section.itemSpecs:
aFit.addImplant(itemSpec)
elif section.isDroneBay:
for itemSpec in section.itemSpecs:
aFit.addDrone(itemSpec)
elif section.isFighterBay:
for itemSpec in section.itemSpecs:
aFit.addFighter(itemSpec)
elif section.isCargoHold:
for itemSpec in section.itemSpecs:
aFit.addCargo(itemSpec)
# Mix between different kinds of item specs (can happen when some
# blank lines are removed)
else:
for itemSpec in section.itemSpecs:
if itemSpec is None:
continue
if itemSpec.isModule:
aFit.addModule(itemSpec)
elif itemSpec.isImplant:
aFit.addImplant(itemSpec)
elif itemSpec.isDrone and not hasDroneBay:
aFit.addDrone(itemSpec)
elif itemSpec.isFighter and not hasFighterBay:
aFit.addFighter(itemSpec)
elif itemSpec.isCargo:
aFit.addCargo(itemSpec)
# Subsystems first because they modify slot amount
for i, m in enumerate(aFit.subsystems):
if m is None:
dummy = Module.buildEmpty(aFit.getSlotByContainer(aFit.subsystems))
dummy.owner = fit
fit.modules.replaceRackPosition(i, dummy)
elif m.fits(fit):
m.owner = fit
fit.modules.replaceRackPosition(i, m)
sFit = svcFit.getInstance()
sFit.recalc(fit)
sFit.fill(fit)
# Other stuff
for modRack in (
aFit.rigs,
aFit.services,
aFit.modulesHigh,
aFit.modulesMed,
aFit.modulesLow,
):
for i, m in enumerate(modRack):
if m is None:
dummy = Module.buildEmpty(aFit.getSlotByContainer(modRack))
dummy.owner = fit
fit.modules.replaceRackPosition(i, dummy)
elif m.fits(fit):
m.owner = fit
if not m.isValidState(m.state):
pyfalog.warning('service.port.eft.importEft: module {} cannot have state {}', m, m.state)
fit.modules.replaceRackPosition(i, m)
for implant in aFit.implants:
fit.implants.append(implant)
for booster in aFit.boosters:
fit.boosters.append(booster)
for drone in aFit.drones.values():
fit.drones.append(drone)
for fighter in aFit.fighters:
fit.fighters.append(fighter)
for cargo in aFit.cargo.values():
fit.cargo.append(cargo)
return fit
def importEftCfg(shipname, lines, iportuser):
"""Handle import from EFT config store file"""
# Check if we have such ship in database, bail if we don't
sMkt = Market.getInstance()
try:
sMkt.getItem(shipname)
except (KeyboardInterrupt, SystemExit):
raise
except:
return [] # empty list is expected
fits = [] # List for fits
fitIndices = [] # List for starting line numbers for each fit
for line in lines:
# Detect fit header
if line[:1] == "[" and line[-1:] == "]":
# Line index where current fit starts
startPos = lines.index(line)
fitIndices.append(startPos)
for i, startPos in enumerate(fitIndices):
# End position is last file line if we're trying to get it for last fit,
# or start position of next fit minus 1
endPos = len(lines) if i == len(fitIndices) - 1 else fitIndices[i + 1]
# Finally, get lines for current fitting
fitLines = lines[startPos:endPos]
try:
# Create fit object
fitobj = Fit()
# Strip square brackets and pull out a fit name
fitobj.name = fitLines[0][1:-1]
# Assign ship to fitting
try:
fitobj.ship = Ship(sMkt.getItem(shipname))
except ValueError:
fitobj.ship = Citadel(sMkt.getItem(shipname))
moduleList = []
for x in range(1, len(fitLines)):
line = fitLines[x]
if not line:
continue
# Parse line into some data we will need
misc = re.match("(Drones|Implant|Booster)_(Active|Inactive)=(.+)", line)
cargo = re.match("Cargohold=(.+)", line)
# 2017/03/27 NOTE: store description from EFT
description = re.match("Description=(.+)", line)
if misc:
entityType = misc.group(1)
entityState = misc.group(2)
entityData = misc.group(3)
if entityType == "Drones":
droneData = re.match("(.+),([0-9]+)", entityData)
# Get drone name and attempt to detect drone number
droneName = droneData.group(1) if droneData else entityData
droneAmount = int(droneData.group(2)) if droneData else 1
# Bail if we can't get item or it's not from drone category
try:
droneItem = sMkt.getItem(droneName, eager="group.category")
except (KeyboardInterrupt, SystemExit):
raise
except:
pyfalog.warning("Cannot get item.")
continue
if droneItem.category.name == "Drone":
# Add drone to the fitting
d = Drone(droneItem)
d.amount = droneAmount
if entityState == "Active":
d.amountActive = droneAmount
elif entityState == "Inactive":
d.amountActive = 0
fitobj.drones.append(d)
elif droneItem.category.name == "Fighter": # EFT saves fighter as drones
ft = Fighter(droneItem)
ft.amount = int(droneAmount) if ft.amount <= ft.fighterSquadronMaxSize else ft.fighterSquadronMaxSize
fitobj.fighters.append(ft)
else:
continue
elif entityType == "Implant":
# Bail if we can't get item or it's not from implant category
try:
implantItem = sMkt.getItem(entityData, eager="group.category")
except (KeyboardInterrupt, SystemExit):
raise
except:
pyfalog.warning("Cannot get item.")
continue
if implantItem.category.name != "Implant":
continue
# Add implant to the fitting
imp = Implant(implantItem)
if entityState == "Active":
imp.active = True
elif entityState == "Inactive":
imp.active = False
fitobj.implants.append(imp)
elif entityType == "Booster":
# Bail if we can't get item or it's not from implant category
try:
boosterItem = sMkt.getItem(entityData, eager="group.category")
except (KeyboardInterrupt, SystemExit):
raise
except:
pyfalog.warning("Cannot get item.")
continue
# All boosters have implant category
if boosterItem.category.name != "Implant":
continue
# Add booster to the fitting
b = Booster(boosterItem)
if entityState == "Active":
b.active = True
elif entityState == "Inactive":
b.active = False
fitobj.boosters.append(b)
# If we don't have any prefixes, then it's a module
elif cargo:
cargoData = re.match("(.+),([0-9]+)", cargo.group(1))
cargoName = cargoData.group(1) if cargoData else cargo.group(1)
cargoAmount = int(cargoData.group(2)) if cargoData else 1
# Bail if we can't get item
try:
item = sMkt.getItem(cargoName)
except (KeyboardInterrupt, SystemExit):
raise
except:
pyfalog.warning("Cannot get item.")
continue
# Add Cargo to the fitting
c = Cargo(item)
c.amount = cargoAmount
fitobj.cargo.append(c)
# 2017/03/27 NOTE: store description from EFT
elif description:
fitobj.notes = description.group(1).replace("|", "\n")
else:
withCharge = re.match("(.+),(.+)", line)
modName = withCharge.group(1) if withCharge else line
chargeName = withCharge.group(2) if withCharge else None
# If we can't get module item, skip it
try:
modItem = sMkt.getItem(modName)
except (KeyboardInterrupt, SystemExit):
raise
except:
pyfalog.warning("Cannot get item.")
continue
# Create module
m = Module(modItem)
# Add subsystems before modules to make sure T3 cruisers have subsystems installed
if modItem.category.name == "Subsystem":
if m.fits(fitobj):
fitobj.modules.append(m)
else:
m.owner = fitobj
# Activate mod if it is activable
if m.isValidState(FittingModuleState.ACTIVE):
m.state = activeStateLimit(m.item)
# Add charge to mod if applicable, on any errors just don't add anything
if chargeName:
try:
chargeItem = sMkt.getItem(chargeName, eager="group.category")
if chargeItem.category.name == "Charge":
m.charge = chargeItem
except (KeyboardInterrupt, SystemExit):
raise
except:
pyfalog.warning("Cannot get item.")
pass
# Append module to fit
moduleList.append(m)
# Recalc to get slot numbers correct for T3 cruisers
sFit = svcFit.getInstance()
sFit.recalc(fitobj)
sFit.fill(fitobj)
for module in moduleList:
if module.fits(fitobj):
fitobj.modules.append(module)
# Append fit to list of fits
fits.append(fitobj)
if iportuser: # NOTE: Send current processing status
processing_notify(
iportuser, IPortUser.PROCESS_IMPORT | IPortUser.ID_UPDATE,
"%s:\n%s" % (fitobj.ship.name, fitobj.name)
)
except (KeyboardInterrupt, SystemExit):
raise
# Skip fit silently if we get an exception
except Exception as e:
pyfalog.error("Caught exception on fit.")
pyfalog.error(e)
pass
return fits
def _importPrepare(lines):
for i in range(len(lines)):
lines[i] = lines[i].strip()
while lines and not lines[0]:
del lines[0]
while lines and not lines[-1]:
del lines[-1]
return lines
mutantHeaderPattern = re.compile('^\[(?P<ref>\d+)\](?P<tail>.*)')
def _importGetMutationData(lines):
data = {}
# Format: {ref: [lines]}
mutaLinesMap = {}
currentMutaRef = None
currentMutaLines = []
consumedIndices = set()
def completeMutaLines():
if currentMutaRef is not None and currentMutaLines:
mutaLinesMap[currentMutaRef] = currentMutaLines
for i, line in enumerate(lines):
m = mutantHeaderPattern.match(line)
# Start and reset at header line
if m:
completeMutaLines()
currentMutaRef = int(m.group('ref'))
currentMutaLines = []
currentMutaLines.append(m.group('tail'))
consumedIndices.add(i)
# Reset at blank line
elif not line:
completeMutaLines()
currentMutaRef = None
currentMutaLines = []
elif currentMutaRef is not None:
currentMutaLines.append(line)
consumedIndices.add(i)
else:
completeMutaLines()
# Clear mutant info from source
for i in sorted(consumedIndices, reverse=True):
del lines[i]
# Run parsing
data = {}
for ref, mutaLines in mutaLinesMap.items():
_, mutaType, mutaAttrs = parseMutant(mutaLines)
data[ref] = (mutaType, mutaAttrs)
return data
def _importSectionIter(lines):
section = Section()
for line in lines:
if not line:
if section.lines:
yield section
section = Section()
else:
section.lines.append(line)
if section.lines:
yield section
def _importCreateFit(lines):
"""Create fit and set top-level entity (ship or citadel)."""
fit = Fit()
header = lines.pop(0)
m = re.match('\[(?P<shipType>[^,]+),\s*(?P<fitName>.+)\]', header)
if not m:
pyfalog.warning('service.port.eft.importEft: corrupted fit header')
raise EftImportError
shipType = m.group('shipType').strip()
fitName = m.group('fitName').strip()
try:
ship = fetchItem(shipType)
try:
fit.ship = Ship(ship)
except ValueError:
fit.ship = Citadel(ship)
fit.name = fitName
except (KeyboardInterrupt, SystemExit):
raise
except:
pyfalog.warning('service.port.eft.importEft: exception caught when parsing header')
raise EftImportError
return fit
def _clearTail(lst):
while lst and lst[-1] is None:
del lst[-1]
class EftImportError(Exception):
"""Exception class emitted and consumed by EFT importer internally."""
...
class Section:
def __init__(self):
self.lines = []
self.itemSpecs = []
self.__itemDataCats = None
@property
def itemDataCats(self):
if self.__itemDataCats is None:
cats = set()
for itemSpec in self.itemSpecs:
if itemSpec is None:
continue
cats.add(itemSpec.item.category.name)
self.__itemDataCats = tuple(sorted(cats))
return self.__itemDataCats
@property
def isModuleRack(self):
return all(i is None or i.isModule for i in self.itemSpecs)
@property
def isImplantRack(self):
return all(i is not None and i.isImplant for i in self.itemSpecs)
@property
def isDroneBay(self):
return all(i is not None and i.isDrone for i in self.itemSpecs)
@property
def isFighterBay(self):
return all(i is not None and i.isFighter for i in self.itemSpecs)
@property
def isCargoHold(self):
return (
all(i is not None and i.isCargo for i in self.itemSpecs) and
not self.isDroneBay and not self.isFighterBay)
class BaseItemSpec:
def __init__(self, typeName):
item = fetchItem(typeName, eagerCat=True)
if item is None:
raise EftImportError
self.typeName = typeName
self.item = item
@property
def isModule(self):
return False
@property
def isImplant(self):
return False
@property
def isDrone(self):
return False
@property
def isFighter(self):
return False
@property
def isCargo(self):
return False
class RegularItemSpec(BaseItemSpec):
def __init__(self, typeName, chargeName=None):
super().__init__(typeName)
self.charge = self.__fetchCharge(chargeName)
self.offline = False
self.mutationIdx = None
def __fetchCharge(self, chargeName):
if chargeName:
charge = fetchItem(chargeName, eagerCat=True)
if not charge or charge.category.name != 'Charge':
charge = None
else:
charge = None
return charge
@property
def isModule(self):
return self.item.category.name in MODULE_CATS
@property
def isImplant(self):
return (
self.item.category.name == 'Implant' and (
'implantness' in self.item.attributes or
'boosterness' in self.item.attributes))
class MultiItemSpec(BaseItemSpec):
def __init__(self, typeName):
super().__init__(typeName)
self.amount = 0
@property
def isDrone(self):
return self.item.category.name == 'Drone'
@property
def isFighter(self):
return self.item.category.name == 'Fighter'
@property
def isCargo(self):
return True
class AbstractFit:
def __init__(self):
# Modules
self.modulesHigh = []
self.modulesMed = []
self.modulesLow = []
self.rigs = []
self.subsystems = []
self.services = []
# Non-modules
self.implants = []
self.boosters = []
self.drones = {} # Format: {item: Drone}
self.fighters = []
self.cargo = {} # Format: {item: Cargo}
# Other stuff
self.mutations = {} # Format: {reference: (mutaplamid item, {attr ID: attr value})}
@property
def __slotContainerMap(self):
return {
FittingSlot.HIGH: self.modulesHigh,
FittingSlot.MED: self.modulesMed,
FittingSlot.LOW: self.modulesLow,
FittingSlot.RIG: self.rigs,
FittingSlot.SUBSYSTEM: self.subsystems,
FittingSlot.SERVICE: self.services}
def getContainerBySlot(self, slotType):
return self.__slotContainerMap.get(slotType)
def getSlotByContainer(self, container):
slotType = None
for k, v in self.__slotContainerMap.items():
if v is container:
slotType = k
break
return slotType
def addModules(self, itemSpecs):
modules = []
slotTypes = set()
for itemSpec in itemSpecs:
if itemSpec is None:
modules.append(None)
continue
m = self.__makeModule(itemSpec)
if m is None:
modules.append(None)
continue
modules.append(m)
slotTypes.add(m.slot)
_clearTail(modules)
# If all the modules have same slot type, put them to appropriate
# container with stubs
if len(slotTypes) == 1:
slotType = tuple(slotTypes)[0]
self.getContainerBySlot(slotType).extend(modules)
# Otherwise, put just modules
else:
for m in modules:
if m is None:
continue
self.getContainerBySlot(m.slot).append(m)
def addModule(self, itemSpec):
if itemSpec is None:
return
m = self.__makeModule(itemSpec)
if m is not None:
self.getContainerBySlot(m.slot).append(m)
def __makeModule(self, itemSpec):
# Mutate item if needed
m = None
if itemSpec.mutationIdx in self.mutations:
mutaItem, mutaAttrs = self.mutations[itemSpec.mutationIdx]
mutaplasmid = getDynamicItem(mutaItem.ID)
if mutaplasmid:
try:
m = Module(mutaplasmid.resultingItem, itemSpec.item, mutaplasmid)
except ValueError:
pass
else:
for attrID, mutator in m.mutators.items():
if attrID in mutaAttrs:
mutator.value = mutaAttrs[attrID]
# If we still don't have item (item is not mutated or we
# failed to construct mutated item), try to make regular item
if m is None:
try:
m = Module(itemSpec.item)
except ValueError:
return None
if itemSpec.charge is not None and m.isValidCharge(itemSpec.charge):
m.charge = itemSpec.charge
if itemSpec.offline and m.isValidState(FittingModuleState.OFFLINE):
m.state = FittingModuleState.OFFLINE
elif m.isValidState(FittingModuleState.ACTIVE):
m.state = activeStateLimit(m.item)
return m
def addImplant(self, itemSpec):
if itemSpec is None:
return
if 'implantness' in itemSpec.item.attributes:
self.implants.append(Implant(itemSpec.item))
elif 'boosterness' in itemSpec.item.attributes:
self.boosters.append(Booster(itemSpec.item))
else:
pyfalog.error('Failed to import implant: {}', itemSpec.typeName)
def addDrone(self, itemSpec):
if itemSpec is None:
return
if itemSpec.item not in self.drones:
self.drones[itemSpec.item] = Drone(itemSpec.item)
self.drones[itemSpec.item].amount += itemSpec.amount
def addFighter(self, itemSpec):
if itemSpec is None:
return
fighter = Fighter(itemSpec.item)
fighter.amount = itemSpec.amount
self.fighters.append(fighter)
def addCargo(self, itemSpec):
if itemSpec is None:
return
if itemSpec.item not in self.cargo:
self.cargo[itemSpec.item] = Cargo(itemSpec.item)
self.cargo[itemSpec.item].amount += itemSpec.amount
def _lineIter(text):
"""Iterate over non-blank lines."""
for line in text.splitlines():
line = line.strip()
if line:
yield line
def parseAdditions(text):
items = []
sMkt = Market.getInstance()
pattern = '^(?P<typeName>{}+?)( x(?P<amount>\d+?))?$'.format(NAME_CHARS)
for line in _lineIter(text):
m = re.match(pattern, line)
if not m:
continue
item = sMkt.getItem(m.group('typeName'))
if item is None:
continue
amount = m.group('amount')
amount = 1 if amount is None else int(amount)
items.append((item, amount))
return items
def isValidDroneImport(text):
pattern = 'x\d+$'
for line in _lineIter(text):
if not re.search(pattern, line):
return False, ()
itemData = parseAdditions(text)
if not itemData:
return False, ()
for item, amount in itemData:
if not item.isDrone:
return False, ()
return True, itemData
def isValidFighterImport(text):
pattern = 'x\d+$'
for line in _lineIter(text):
if not re.search(pattern, line):
return False, ()
itemData = parseAdditions(text)
if not itemData:
return False, ()
for item, amount in itemData:
if not item.isFighter:
return False, ()
return True, itemData
def isValidCargoImport(text):
pattern = 'x\d+$'
for line in _lineIter(text):
if not re.search(pattern, line):
return False, ()
itemData = parseAdditions(text)
if not itemData:
return False, ()
for item, amount in itemData:
if item.isAbyssal:
return False, ()
return True, itemData
def isValidImplantImport(text):
pattern = 'x\d+$'
for line in _lineIter(text):
if re.search(pattern, line):
return False, ()
itemData = parseAdditions(text)
if not itemData:
return False, ()
for item, amount in itemData:
if not item.isImplant:
return False, ()
return True, itemData
def isValidBoosterImport(text):
pattern = 'x\d+$'
for line in _lineIter(text):
if re.search(pattern, line):
return False, ()
itemData = parseAdditions(text)
if not itemData:
return False, ()
for item, amount in itemData:
if not item.isBooster:
return False, ()
return True, itemData
| gpl-3.0 | 3,213,832,208,081,448,000 | 34.113402 | 154 | 0.555255 | false |
jfmorcillo/mss | mss/agent/managers/module.py | 1 | 25781 | # -*- coding: UTF-8 -*-
#
# (c) 2010 Mandriva, http://www.mandriva.com/
#
# This file is part of Mandriva Server Setup
#
# MSS is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# MSS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with MSS; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
import os
import uuid
import copy
import re
import glob
import sys
import logging
import platform
import json
import urllib
import urllib2
import time
import xmlrpclib
from mss.agent.config import Config
from mss.agent.lib.utils import Singleton
from mss.agent.lib.db import get_session, OptionTable, LogTypeTable, LogTable, ModuleTable
from mss.agent.managers.process import ProcessManager
from mss.agent.managers.translation import TranslationManager
from mss.agent.classes.media import remove_medias_cmd
_ = TranslationManager().translate
logger = logging.getLogger(__name__)
def expose(f):
"Decorator to set exposed flag on a function."
f.exposed = True
return f
def is_exposed(f):
"Test whether another function should be publicly exposed."
return getattr(f, 'exposed', False)
class ModuleManager:
"""
Class for managing modules
"""
__metaclass__ = Singleton
def _dispatch(self, method, params):
func = getattr(self, method)
if not is_exposed(func):
raise Exception('Method "%s" is not supported' % method)
return func(*params)
def __init__(self):
if platform.machine() == 'x86_64':
self.arch = 'x86_64'
else:
self.arch = 'i586'
# Setup BDD access
self.session = get_session(Config().db_file)
self._token = False
self._mode = None
self.modules = {}
self.sections_modules = {}
self.sections = {}
self.packages = []
# Get machine-id
with open('/etc/machine-id', 'r') as f:
machine_id = f.read().strip()
logger.info("Machine id is %s" % machine_id)
self.set_option("machine-id", machine_id)
# Translation manager
TranslationManager().set_catalog('agent', os.path.join(os.path.dirname(__file__), '..'))
# Load packages
self.load_packages()
def load(self):
""" Load data in the agent """
self.modules = {}
self.sections_modules = {}
self.sections = {}
self.load_sections()
logger.debug("Sections loaded.")
self.load_modules()
logger.debug("Modules loaded.")
self.init_modules()
logger.debug("Modules init done.")
def setup_python_path(self):
"""
Setup the python path to load modules
"""
local_path = Config().localDir
cache_path = Config().cacheDir
try:
sys.path.remove(local_path)
except ValueError:
pass
try:
sys.path.remove(cache_path)
except ValueError:
pass
sys.path.insert(0, local_path)
if self._mode == "api":
sys.path.insert(0, cache_path)
def load_modules(self):
""" load modules """
logger.debug("Using local modules")
modules_list = self.get_local_modules()
if self._mode == "api":
logger.debug("Using API modules")
modules_list += self.get_api_modules()
self.setup_python_path()
from mss.agent.classes.module import Module
for module_desc in modules_list:
if "module" in module_desc:
if "path" not in module_desc["module"]:
module_desc["module"]["path"] = os.path.join(Config().cacheDir,
module_desc["slug"])
self.modules[module_desc['slug']] = Module(module_desc)
section = self.modules[module_desc['slug']].section
if section not in self.sections_modules:
self.sections_modules[section] = []
if not module_desc["slug"] in self.sections_modules[section]:
self.sections_modules[section].append(module_desc["slug"])
def init_modules(self):
for slug, module in self.modules.items():
if hasattr(module, "init"):
module.init()
def get_local_modules(self):
paths = []
result = []
for item in glob.glob(os.path.join(Config().localDir,
"*", "__init__.py")):
module = item.split("/")[-2]
path = os.path.join(Config().localDir, module)
paths.append(path)
for path in paths:
try:
with open(os.path.join(path, "desc.json")) as f:
desc = json.load(f)
except (ValueError, IOError):
logger.exception("Failed to load %s" % (path))
else:
if "module" not in desc:
if desc['standalone'] is True:
raise Exception('Missing section "module" in module %s' % desc['slug'])
else:
desc["module"] = {}
desc["module"]["path"] = path
result.append(desc)
return result
def get_api_modules(self):
""" return list of modules from the API """
cache_path = os.path.join(Config().cacheDir, "addons.json")
try:
mtime = os.path.getmtime(cache_path)
except OSError:
mtime = 0
# Cache 6 hours
if int(time.time()) - mtime > Config().cache:
logger.debug("Getting new version of %s" % cache_path)
result, code = self.request(Config().addonsUrl)
if code == 200:
with open(cache_path, "w") as f:
json.dump(result, f)
modules_list = result
else:
logger.error("Failed to retrieve modules from the API.")
else:
with open(cache_path) as f:
modules_list = json.load(f)
return modules_list
def load_sections(self):
""" load sections """
logger.debug("Using local sections")
sections = self.get_local_sections()
if self._mode == "api":
logger.debug("Using API sections")
api_sections = self.get_api_sections()
for section in sections:
for api_section in api_sections:
if section['slug'] == api_section['slug']:
section.update(api_section)
self.sections = sections
def get_local_sections(self):
""" return local section list """
path = os.path.join(Config().localDir, "sections.json")
with open(path) as f:
sections = json.load(f)
return sections
def get_api_sections(self):
""" return section list from API """
cache_path = os.path.join(Config().cacheDir, "sections.json")
try:
mtime = os.path.getmtime(cache_path)
except OSError:
mtime = 0
# Cache 6 hours
if int(time.time()) - mtime > Config().cache:
logger.debug("Getting new version of %s" % cache_path)
result, code = self.request(Config().sectionsUrl)
if code == 200:
with open(cache_path, "w") as f:
json.dump(result, f)
sections = result
else:
logger.error("Failed to retrieve sections from the API.")
logger.error("Using local sections.")
sections = self.get_local_sections()
else:
with open(cache_path) as f:
sections = json.load(f)
return sections
@expose
def set_lang(self, lang):
""" change lang during execution """
TranslationManager().set_lang(lang)
@expose
def get_lang(self):
""" return current language """
return TranslationManager().get_lang()
@expose
def set_option(self, slug, value):
""" add an option in the DB """
option = OptionTable(slug, value)
self.session.merge(option)
self.session.commit()
return value
@expose
def get_option(self, slug):
""" get an option from the BDD """
logger.debug("Get option %s" % slug)
option = self.session.query(OptionTable).get(slug)
if option:
result = json.loads(option.value)
else:
result = False
logger.debug("Result: %s" % result)
return result
@expose
def load_packages(self):
logger.info("Load packages...")
ProcessManager().load_packages(self.set_packages)
def set_packages(self, module, code, output):
if code == 0:
packages = output.split('#')
if not packages:
logger.error("No packages found.")
else:
self.packages = packages
logger.info("Loading packages done.")
else:
logger.error("Can't load packages.")
@expose
def check_net(self):
ProcessManager().check_net()
@expose
def update_medias(self):
ProcessManager().update_medias()
@expose
def reboot(self):
ProcessManager().reboot()
def get_conflicts(self, conflicts, module):
""" return a module list of current conflicts
with module """
if module in self.modules:
module = self.modules[module]
_conflicts = module.conflicts
_dependencies = module.dependencies
_configured = module.configured
else:
module = self._hAddons[module]
_conflicts = module['module'].get('conflicts', [])
_dependencies = module['module'].get('dependencies', [])
_configured = module['module'].get('configured', False)
for m in _conflicts:
try:
if m not in conflicts and _configured:
conflicts.append(m)
logger.debug("Conflict with : %s" % m)
conflicts = self.get_conflicts(conflicts, m)
except KeyError:
pass
for m in _dependencies:
conflicts = self.get_conflicts(conflicts, m)
return conflicts
@expose
def get_modules(self):
""" return all available modules details """
logger.info("Get all available modules")
result = [module.details for slug, module in self.modules.items()]
logger.debug("Result: %s" % str(result))
return result
@expose
def get_modules_details(self, modules):
""" return modules info """
logger.info("Get modules details: %s" % str(modules))
result = [self.modules[slug].details for slug in modules if slug in self.modules]
logger.debug("Result: %s" % str(result))
return result
@expose
def get_module_details(self, module):
""" return module info """
logger.info("Get module detail: %s" % module)
if module in self.modules:
result = self.modules[module].details
else:
logger.error("Module %s doesn't exists" % module)
result = False
logger.debug("Result: %s" % str(result))
return result
@expose
def get_packages(self, module):
""" returns package list for module """
if module in self.modules:
return self.modules[module].packages
return False
@expose
def preinstall_modules(self, install_modules):
"""
get dependencies for modules to install
return modules infos
"""
# force module re-installation
# (not-used for now)
#force_modules = []
#for m in modules:
#if m.startswith("force-"):
#force_modules.append(m.replace("force-", ""))
#modules = [m.replace("force-", "") for m in modules]
logger.info("Pre-install modules: %s" % ", ".join(install_modules))
# store old modules list
# get dependencies for modules
modules = self.check_dependencies(install_modules, [])
modules = self.order_dependencies(modules)
# get difference for dep list
deps = list(set(modules).difference(install_modules))
# get modules info (modules + dependencies)
modules = self.get_modules_details(modules)
to_install = []
for m in modules:
# don't install already configured deps
if m['slug'] in deps and not m['configured']:
m['dep'] = True
to_install.append(m)
elif m['slug'] not in deps and m['can_configure']:
m['dep'] = False
to_install.append(m)
logger.debug("Result: %s" % to_install)
return to_install
def order_dependencies(self, modules, cnt=1):
for module in modules:
# if the module has dependencies and is not indexed
if module[1] and module[2] == -1:
# for each dep of current module
set_index = True
for m1 in module[1]:
# for each module
for m2 in modules:
# if the dep is not indexed (not >=0)
if m1 == m2[0] and not m2[2] >= 0:
set_index = False
# set the current module index to cnt
# if all dependencies are indexed
if set_index:
module[2] = cnt
# make 10 pass to determine indexes
# FIXME! this limits the nb max of the modules list
if(cnt < 10):
cnt += 1
modules = self.order_dependencies(modules, cnt)
# calcule module list from indexes
else:
result = []
for i in range(cnt):
for module in modules:
if module[2] == i:
if not module[0] in result:
result.append(module[0])
modules = result
return modules
def check_dependencies(self, modules, dependencies):
""" get dependencies for modules
create a list with the form : [ [ module, [dependencies], index ],... ]
"""
for module in modules:
deps = self.get_dependencies(module)
if deps:
# set the index a -1 to calculate index
dependencies.append([module, deps, -1])
dependencies = self.check_dependencies(deps, dependencies)
else:
# set the index at 0 as the module has no dependencies
dependencies.append([module, None, 0])
return dependencies
def get_dependencies(self, module):
""" get dependencies for module """
if module in self.modules:
return [d for d in self.modules[module].dependencies if d in self.modules]
return []
@expose
def download_modules(self, modules):
""" download modules from the API """
for module in modules:
self.download_module(module)
@expose
def download_module(self, module):
logger.debug("Download module: %s" % module)
self.modules[module].download()
@expose
def get_repositories(self, modules):
""" get repositories for modules """
logger.debug("Get packages repositories for modules: %s" % ", ".join(modules))
repositories = []
for module in modules:
repositories += self.modules[module].repositories
logger.debug("Result: %s" % repositories)
return repositories
@expose
def add_repository(self, module_slug, repo_slug, login=None, passwd=None):
""" add repository of a module """
repositories = self.modules[module_slug].repositories
for repository in repositories:
if repository.slug == repo_slug:
if repository.clean:
p = ProcessManager().launch("repository", _("Removing medias"), remove_medias_cmd())
p.join()
logger.info("Add repository: %s" % repository.name)
ProcessManager().add_repository(repository.get_command(login, passwd))
@expose
def install_modules(self, modules):
""" install modules packages """
logger.info("Install modules: %s" % str(modules))
packages = []
for module in modules:
packages += self.modules[module].packages
if packages:
logger.debug("Install packages: %s" % str(packages))
ProcessManager().install_packages(packages)
return True
else:
logger.info("No packages to install")
return False
@expose
def get_config(self, modules):
""" get modules config """
logger.info("Get config for modules: %s" % ", ".join(modules))
config = []
for module in modules:
if module in self.modules:
config.append(self.modules[module].get_config())
else:
logger.error("Module %s is not available" % module)
logger.debug("Result: %s" % str(config))
return config
@expose
def valid_config(self, modules, modules_config):
""" validate user configuration for modules """
logger.info("Valid config for modules: %s" % ", ".join(modules))
logger.debug("Configuration is: %s" % str(modules_config))
config = []
for module in modules:
module_config = self.modules[module].valid_config(modules_config)
config.append(module_config)
logger.debug("Result: %s" % str(config))
return config
@expose
def run_config(self, module):
""" run configuration for module """
logger.debug("Run configuration for %s" % str(module))
path, script, args = self.modules[module].info_config()
logger.debug("Run script: %s, args: %s" % (str(script), str(args)))
logger.debug("Path is: %s" % path)
return ProcessManager().run_script(script, args, path, module, self.end_config)
@expose
def end_config(self, module, code, output):
"""
Callback after run script
"""
if code == 0 and not self.modules[module].configured:
logger.debug("Set %s as configured" % str(module))
self.modules[module].configured = True
# try to store the config log
try:
log_type = self.session.query(LogTypeTable).filter(LogTypeTable.name == "config").first()
if not log_type:
log_type = LogTypeTable("config")
self.session.add(log_type)
self.session.commit()
module_obj = self.session.query(ModuleTable).filter(ModuleTable.name == module).first()
config_log = LogTable(log_type.id, module_obj.id, self.get_state("config", module))
logger.debug("Saving %s configuration log in the DB" % str(module))
self.session.add(config_log)
self.session.commit()
except:
pass
return 0
def clean_output(self, string):
# remove ANSI codes
string = re.sub('\x1b[^m]*m', '', string)
return string
@expose
def get_state(self, type, module="agent"):
""" return execution output """
code, output = ProcessManager().p_state(type, module)
# format output
tmp = output.splitlines()
if not tmp:
output = [{'code': 0, 'text': u''}]
else:
output = []
for line in tmp:
try:
if int(line[0]) in range(9):
text_code = line[0]
text = line[1:]
else:
text_code = 0
text = line
output.append({'code': text_code, 'text': self.clean_output(text)})
# no code at line start
except ValueError:
text_code = 0
text = line
output.append({'code': text_code, 'text': self.clean_output(text)})
# no char in line
except IndexError:
pass
logger.debug("Get state: %s - %s" % (code, output))
return (code, output)
@expose
def get_status(self):
""" return current agent status """
status = []
statuses = ProcessManager().pm_state()
for sts in statuses:
status.append(_(sts, "agent"))
return ', '.join(status)
@expose
def get_sections(self):
""" return list of sections """
sections = copy.deepcopy(self.sections)
for section in sections:
section["name"] = _(section["name"], "agent")
return sections
@expose
def get_section(self, section):
""" return modules belonging to section
organized by category
"""
logger.info("Getting section %s modules" % section)
result = []
if section in self.sections_modules:
modules_list = self.sections_modules[section]
for module_slug in modules_list:
if self.modules[module_slug].standalone:
category = self.modules[module_slug].category
details = self.modules[module_slug].details
exist = False
for cat in result:
if category["slug"] == cat["slug"]:
exist = True
break
if not exist:
result.append(category)
for i, cat in enumerate(result[:]):
if category["slug"] == cat["slug"]:
if "modules" not in cat:
result[i]["modules"] = []
result[i]["modules"].append(details)
break
logger.debug("Result: %s" % str(result))
return result
@expose
def authenticate(self, user, password):
""" Authenticate mss-www to the agent """
if not user or not password:
return False
# Logout the current user
self.logout()
# Local auth with PAM
if user == "root":
logger.debug("PAM authentication")
from mss.agent.lib import pam
result = pam.authenticate(user, password, service="passwd")
if result:
logger.debug("Logged with PAM.")
# Generate an uuid for this session
self._token = str(uuid.uuid4())
self._mode = "local"
self.load()
return self._token
logger.error("Login failed against PAM.")
return False
# API auth
else:
logger.debug("ServicePlace authentication")
url = Config().tokenUrl
result, code = self.request(url, {'username': user, 'password': password.encode('utf-8')})
if code == 200:
if 'token' in result:
logger.debug("Logged with the ServicePlace !")
self._token = result['token']
self._mode = "api"
self.load()
return self._token
logger.error("Login failed against the ServicePlace.")
return False
def check_token(self, token):
if not self._token:
return False
if not token:
return False
return token == self._token
@expose
def logout(self):
self._token = False
self._mode = None
logger.info("User logged out")
def request(self, url, params=None):
"""
Used to query the ServicePlace API
Handles token and language headers
"""
if params:
params = urllib.urlencode(params)
request = urllib2.Request(url, params)
if self._token:
request.add_header('Authorization', 'Token ' + self._token)
request.add_header('Accept-Language', TranslationManager().get_lang().split('_')[0] + ',en')
try:
response = urllib2.urlopen(request)
if response.info().gettype() == "application/json":
result = json.loads(response.read())
else:
result = response.read()
code = response.getcode()
except urllib2.HTTPError as e:
code = e.code
result = ""
if code in (404, 500):
raise xmlrpclib.Fault(code, _("Connection failed with the ServicePlace.", "agent"))
except urllib2.URLError as e:
logger.exception("URL error")
raise xmlrpclib.Fault(777, str(e.reason))
logger.debug("Return code %s" % code)
return (result, code)
| gpl-3.0 | -4,889,416,704,443,938,000 | 34.028533 | 105 | 0.541135 | false |
lexxito/monitoring | ceilometer/tests/api/v2/test_app.py | 1 | 10718 | # -*- encoding: utf-8 -*-
#
# Copyright 2013 IBM Corp.
# Copyright © 2013 Julien Danjou
#
# Author: Julien Danjou <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test basic ceilometer-api app
"""
import json
import os
import mock
import wsme
from ceilometer.api import acl
from ceilometer.api import app
from ceilometer.openstack.common import fileutils
from ceilometer.openstack.common.fixture import config
from ceilometer.openstack.common import gettextutils
from ceilometer import service
from ceilometer.tests.api.v2 import FunctionalTest
from ceilometer.tests import base
from ceilometer.tests import db as tests_db
class TestApp(base.BaseTestCase):
def setUp(self):
super(TestApp, self).setUp()
self.CONF = self.useFixture(config.Config()).conf
def test_keystone_middleware_conf(self):
self.CONF.set_override("auth_protocol", "foottp",
group=acl.OPT_GROUP_NAME)
self.CONF.set_override("auth_version", "v2.0",
group=acl.OPT_GROUP_NAME)
self.CONF.set_override("pipeline_cfg_file",
self.path_get("etc/ceilometer/pipeline.yaml"))
self.CONF.set_override('connection', "log://", group="database")
self.CONF.set_override("auth_uri", None, group=acl.OPT_GROUP_NAME)
api_app = app.setup_app()
self.assertTrue(api_app.auth_uri.startswith('foottp'))
def test_keystone_middleware_parse_conffile(self):
pipeline_conf = self.path_get("etc/ceilometer/pipeline.yaml")
content = "[DEFAULT]\n"\
"pipeline_cfg_file = {0}\n"\
"[{1}]\n"\
"auth_protocol = barttp\n"\
"auth_version = v2.0\n".format(pipeline_conf,
acl.OPT_GROUP_NAME)
tmpfile = fileutils.write_to_tempfile(content=content,
prefix='ceilometer',
suffix='.conf')
service.prepare_service(['ceilometer-api',
'--config-file=%s' % tmpfile])
self.CONF.set_override('connection', "log://", group="database")
api_app = app.setup_app()
self.assertTrue(api_app.auth_uri.startswith('barttp'))
os.unlink(tmpfile)
class TestPecanApp(FunctionalTest):
database_connection = tests_db.MongoDBFakeConnectionUrl()
def test_pecan_extension_guessing_unset(self):
# check Pecan does not assume .jpg is an extension
response = self.app.get(self.PATH_PREFIX + '/meters/meter.jpg')
self.assertEqual(response.content_type, 'application/json')
class TestApiMiddleware(FunctionalTest):
# This doesn't really matter
database_connection = tests_db.MongoDBFakeConnectionUrl()
no_lang_translated_error = 'No lang translated error'
en_US_translated_error = 'en-US translated error'
def _fake_get_localized_message(self, message, user_locale):
if user_locale is None:
return self.no_lang_translated_error
else:
return self.en_US_translated_error
def test_json_parsable_error_middleware_404(self):
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"application/json"}
)
self.assertEqual(response.status_int, 404)
self.assertEqual(response.content_type, "application/json")
self.assertTrue(response.json['error_message'])
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"application/json,application/xml"}
)
self.assertEqual(response.status_int, 404)
self.assertEqual(response.content_type, "application/json")
self.assertTrue(response.json['error_message'])
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"application/xml;q=0.8, \
application/json"}
)
self.assertEqual(response.status_int, 404)
self.assertEqual(response.content_type, "application/json")
self.assertTrue(response.json['error_message'])
response = self.get_json('/invalid_path',
expect_errors=True
)
self.assertEqual(response.status_int, 404)
self.assertEqual(response.content_type, "application/json")
self.assertTrue(response.json['error_message'])
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"text/html,*/*"}
)
self.assertEqual(response.status_int, 404)
self.assertEqual(response.content_type, "application/json")
self.assertTrue(response.json['error_message'])
def test_json_parsable_error_middleware_translation_400(self):
# Ensure translated messages get placed properly into json faults
with mock.patch.object(gettextutils, 'get_localized_message',
side_effect=self._fake_get_localized_message):
response = self.post_json('/alarms', params={'name': 'foobar',
'type': 'threshold'},
expect_errors=True,
headers={"Accept":
"application/json"}
)
self.assertEqual(response.status_int, 400)
self.assertEqual(response.content_type, "application/json")
self.assertTrue(response.json['error_message'])
self.assertEqual(response.json['error_message']['faultstring'],
self.no_lang_translated_error)
def test_xml_parsable_error_middleware_404(self):
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"application/xml,*/*"}
)
self.assertEqual(response.status_int, 404)
self.assertEqual(response.content_type, "application/xml")
self.assertEqual(response.xml.tag, 'error_message')
response = self.get_json('/invalid_path',
expect_errors=True,
headers={"Accept":
"application/json;q=0.8 \
,application/xml"}
)
self.assertEqual(response.status_int, 404)
self.assertEqual(response.content_type, "application/xml")
self.assertEqual(response.xml.tag, 'error_message')
def test_xml_parsable_error_middleware_translation_400(self):
# Ensure translated messages get placed properly into xml faults
with mock.patch.object(gettextutils, 'get_localized_message',
side_effect=self._fake_get_localized_message):
response = self.post_json('/alarms', params={'name': 'foobar',
'type': 'threshold'},
expect_errors=True,
headers={"Accept":
"application/xml,*/*"}
)
self.assertEqual(response.status_int, 400)
self.assertEqual(response.content_type, "application/xml")
self.assertEqual(response.xml.tag, 'error_message')
fault = response.xml.findall('./error/faultstring')
for fault_string in fault:
self.assertEqual(fault_string.text, self.no_lang_translated_error)
def test_best_match_language(self):
# Ensure that we are actually invoking language negotiation
with mock.patch.object(gettextutils, 'get_localized_message',
side_effect=self._fake_get_localized_message):
response = self.post_json('/alarms', params={'name': 'foobar',
'type': 'threshold'},
expect_errors=True,
headers={"Accept":
"application/xml,*/*",
"Accept-Language":
"en-US"}
)
self.assertEqual(response.status_int, 400)
self.assertEqual(response.content_type, "application/xml")
self.assertEqual(response.xml.tag, 'error_message')
fault = response.xml.findall('./error/faultstring')
for fault_string in fault:
self.assertEqual(fault_string.text, self.en_US_translated_error)
def test_translated_then_untranslated_error(self):
resp = self.get_json('/alarms/alarm-id-3', expect_errors=True)
self.assertEqual(resp.status_code, 404)
self.assertEqual(json.loads(resp.body)['error_message']
['faultstring'], "Alarm alarm-id-3 Not Found")
with mock.patch('ceilometer.api.controllers.v2.EntityNotFound') \
as CustomErrorClass:
CustomErrorClass.return_value = wsme.exc.ClientSideError(
"untranslated_error", status_code=404)
resp = self.get_json('/alarms/alarm-id-5', expect_errors=True)
self.assertEqual(resp.status_code, 404)
self.assertEqual(json.loads(resp.body)['error_message']
['faultstring'], "untranslated_error")
| apache-2.0 | -7,755,225,670,747,542,000 | 46.211454 | 78 | 0.547355 | false |
danforthcenter/plantcv | plantcv/plantcv/visualize/obj_size_ecdf.py | 1 | 1554 | # Plot Empirical Cumulative Distribution Function for Object Size
import os
import cv2
import pandas as pd
from plantcv.plantcv import params
from plantcv.plantcv._debug import _debug
from statsmodels.distributions.empirical_distribution import ECDF
from plotnine import ggplot, aes, geom_point, labels, scale_x_log10
def obj_size_ecdf(mask, title=None):
"""
Plot empirical cumulative distribution for object size based on binary mask.
Inputs:
mask = binary mask
title = a custom title for the plot (default=None)
Returns:
fig_ecdf = empirical cumulative distribution function plot
:param mask: numpy.ndarray
:param title: str
:return fig_ecdf: plotnine.ggplot.ggplot
"""
objects, _ = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)[-2:]
areas = [cv2.contourArea(cnt) for cnt in objects]
# Remove objects with areas < 1px
areas = [i for i in areas if i >= 1.0]
ecdf = ECDF(areas, side='right')
ecdf_df = pd.DataFrame({'object area': ecdf.x[1:], 'cumulative probability': ecdf.y[1:]})
# create ecdf plot and apply log-scale for x-axis (areas)
fig_ecdf = (ggplot(data=ecdf_df, mapping=aes(x='object area', y='cumulative probability'))
+ geom_point(size=.1)
+ scale_x_log10())
if title is not None:
fig_ecdf = fig_ecdf + labels.ggtitle(title)
# Plot or print the ecdf
_debug(visual=fig_ecdf,
filename=os.path.join(params.debug_outdir, str(params.device) + '_area_ecdf.png'))
return fig_ecdf
| mit | -1,732,821,960,471,904,500 | 33.533333 | 94 | 0.677606 | false |
shoopio/shoop | shuup_tests/browser/admin/test_picotable.py | 2 | 6430 | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2019, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import os
import time
import pytest
from django.core.urlresolvers import reverse
from shuup.testing.browser_utils import (
click_element, move_to_element, wait_until_appeared,
wait_until_appeared_xpath, wait_until_condition
)
from shuup.testing.factories import (
create_product, create_random_person, get_default_shop,
get_default_supplier
)
from shuup.testing.browser_utils import initialize_admin_browser_test
pytestmark = pytest.mark.skipif(os.environ.get("SHUUP_BROWSER_TESTS", "0") != "1", reason="No browser tests run.")
def create_contacts(shop):
for i in range(0, 200):
contact = create_random_person()
contact.save()
def create_products(shop):
supplier = get_default_supplier()
for i in range(0, 200):
sku = "sku-%d" % i
create_product(sku, shop, supplier, default_price=i)
# used in settings
list_view_settings = {
"contact": {
"page_header": "Contacts",
"default_column_count": 7,
"addable_fields": [(1, "Account Manager")],
"creator": create_contacts,
"test_pagination": True
},
"shop_product": {
"page_header": "Shop Products",
"default_column_count": 7,
"addable_fields": [(13, "Gtin"), (6, "Default Price")],
"creator": create_products,
"test_pagination": False
},
"permission_group": {
"page_header": "Permission Groups",
"default_column_count": 1,
"addable_fields": [(2, "Permissions"), (1, "Id")], # use reverse order due idx
"creator": None,
"test_pagination": False
}
}
@pytest.mark.browser
@pytest.mark.djangodb
@pytest.mark.parametrize("visit_type", list_view_settings.keys())
def test_list_views(browser, admin_user, live_server, settings, visit_type):
shop = get_default_shop()
creator = list_view_settings[visit_type].get("creator", None)
if creator and callable(creator):
creator(shop)
initialize_admin_browser_test(browser, live_server, settings)
_visit_list_view(browser, live_server, visit_type, creator)
if list_view_settings[visit_type].get("test_pagination", False):
_test_pagination(browser)
_set_settings(browser, visit_type, creator)
def _visit_list_view(browser, live_server, list_view_name, creator):
url = reverse("shuup_admin:%s.list" % list_view_name)
browser.visit("%s%s" % (live_server, url))
wait_until_condition(browser, lambda x: x.is_text_present(list_view_settings[list_view_name]["page_header"]))
_check_picotable_item_info(browser, creator)
def _test_pagination(browser):
ellipses = u"\u22ef"
items = _get_pagination_content(browser)
_assert_pagination_content(items, ["Previous", "1", "2", "3", ellipses, "11", "Next"])
_goto_page(browser, 3)
items = _get_pagination_content(browser)
_assert_pagination_content(items, ["Previous", "1", "2", "3", "4", "5", ellipses, "11", "Next"])
_goto_page(browser, 5)
items = _get_pagination_content(browser)
_assert_pagination_content(items, ["Previous", "1", ellipses, "3", "4", "5", "6", "7", ellipses, "11", "Next"])
_goto_page(browser, 7)
items = _get_pagination_content(browser)
_assert_pagination_content(items, ["Previous", "1", ellipses, "5", "6", "7", "8", "9", ellipses, "11", "Next"])
_goto_page(browser, 9)
items = _get_pagination_content(browser)
_assert_pagination_content(items, ["Previous", "1", ellipses, "7", "8", "9", "10", "11", "Next"])
_goto_page(browser, 11)
items = _get_pagination_content(browser)
_assert_pagination_content(items, ["Previous", "1", ellipses, "9", "10", "11", "Next"])
def _get_pagination_content(browser):
pagination = browser.find_by_css(".pagination")[0]
return pagination.find_by_tag("a")
def _assert_pagination_content(items, content):
assert [item.text for item in items] == content
def _goto_page(browser, page_number):
click_element(browser, "a[rel='%s']" % page_number)
element = "li.active a[rel='%s']" % page_number
wait_until_appeared(browser, element)
move_to_element(browser, element)
def _click_item(items, value):
index = [item.text for item in items].index(value)
items[index].click()
time.sleep(0.5) # Wait mithril for a half sec
def _set_settings(browser, setting_type, creator):
used_settings = list_view_settings[setting_type]
default_column_count = used_settings["default_column_count"]
addable_fields = used_settings["addable_fields"]
# not selected by default
for idx, text in addable_fields:
assert not browser.is_text_present(text)
browser.find_by_css(".shuup-toolbar .btn.btn-inverse").first.click()
# select settings
for idx, (index_key, text) in enumerate(addable_fields):
expected_index = default_column_count + 1 + idx
assert browser.is_text_present(text)
browser.find_by_xpath("//ul[@id='source-sortable']/li[%d]/button" % index_key).first.click()
wait_until_appeared_xpath(browser, "//ul[@id='target-sortable']/li[%d]/button" % expected_index)
# save settings
move_to_element(browser, ".shuup-toolbar .btn.btn-success")
browser.find_by_css(".shuup-toolbar .btn.btn-success").first.click()
_check_picotable_item_info(browser, creator)
if creator:
for idx, text in addable_fields:
wait_until_condition(browser, lambda x: x.is_text_present(text))
# go back to settings
browser.find_by_css(".shuup-toolbar .btn.btn-inverse").first.click()
wait_until_appeared_xpath(browser, "//a[contains(text(),'Reset Defaults')]")
# reset to defaults
browser.find_by_xpath("//a[contains(text(),'Reset Defaults')]").click()
# wait
_check_picotable_item_info(browser, creator)
# not selected by default
if creator:
for idx, text in addable_fields:
assert not browser.is_text_present(text)
def _check_picotable_item_info(browser, creator):
if creator:
wait_until_appeared(browser, ".picotable-item-info")
else:
wait_until_condition(browser, condition=lambda x: x.is_text_present("There are no permission groups to show"))
| agpl-3.0 | -8,168,418,631,158,132,000 | 33.756757 | 118 | 0.655832 | false |
blab/nextstrain-augur | tests/python2/test_fitness_model.py | 1 | 10095 | """
Tests for the `fitness_model` module.
"""
import Bio.Align.AlignInfo
import Bio.Phylo
import Bio.SeqIO
import datetime
import numpy as np
import pytest
import sys
import os
# we assume (and assert) that this script is running from the tests/ directory
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
from base.fitness_model import fitness_model
from base.frequencies import KdeFrequencies
from base.process import process
#
# Fixtures
#
# Set precalculated fitness model parameters which are the mean and standard
# deviation for the model.
MODEL_PARAMS = [1.0, 0.05]
@pytest.fixture
def simple_tree():
"""Returns a tree with three sequences: a root and two direct descendents with
one modification each.
"""
# Build simple tree.
tree = Bio.Phylo.read(StringIO("(A,B);"), "newick")
# Build sequences for tree nodes. One leaf has a Koel and epitope site
# mutation. The other leaf has a signal peptide mutation.
root = sequence()
leaf_a = modify_sequence_at_site(root, 145 + 16 - 1)
leaf_b = modify_sequence_at_site(root, 14)
# Assign sequences to nodes.
sequences = (root, leaf_a, leaf_b)
dates = (2012.5, 2013.25, 2014.8)
index = 0
for node in tree.find_clades(order="preorder"):
node.clade = index
node.aa = sequences[index]
node.attr = {"num_date": dates[index]}
index += 1
return tree
@pytest.fixture
def real_tree(multiple_sequence_alignment):
"""Returns a tree built with FastTree from a small set of nucleotide sequences
for H3N2.
"""
# Load the tree.
tree = Bio.Phylo.read("tests/data/fitness_model/H3N2_tree.newick", "newick")
# Make a lookup table of name to sequence.
sequences_by_name = dict([(alignment.name, str(alignment.seq))
for alignment in multiple_sequence_alignment])
# Assign sequences to the tree.
index = 0
for node in tree.find_clades():
if node.name is not None:
node.sequence = np.fromstring(sequences_by_name[node.name], "S1")
# Since sequence names look like "A/Singapore/TT0495/2017",
# convert the last element to a floating point value for
# simplicity.
node.attr = {"num_date": float(node.name.split("/")[-1])}
else:
# Build a "dumb" consensus from the alignment for the
# ancestral node and assign an arbitrary date in the
# past.
summary = Bio.Align.AlignInfo.SummaryInfo(multiple_sequence_alignment)
node.sequence = np.fromstring(str(summary.dumb_consensus(threshold=0.5, ambiguous="N")), "S1")
node.attr = {"num_date": 2014.8}
node.clade = index
index += 1
return tree
@pytest.fixture
def simple_fitness_model(simple_tree):
time_interval = (
datetime.date(2015, 1, 1),
datetime.date(2012, 1, 1)
)
start_date, end_date = process.get_time_interval_as_floats(time_interval)
return fitness_model(
tree=simple_tree,
frequencies=KdeFrequencies(
start_date=start_date,
end_date=end_date,
include_internal_nodes=True
),
predictor_input=["random"],
pivot_spacing=1.0 / 12,
time_interval=time_interval,
epitope_masks_fname="builds/flu/metadata/ha_masks.tsv",
epitope_mask_version="wolf"
)
@pytest.fixture
def real_fitness_model(real_tree, multiple_sequence_alignment):
time_interval = (
datetime.date(2017, 6, 1),
datetime.date(2014, 6, 1)
)
start_date, end_date = process.get_time_interval_as_floats(time_interval)
model = fitness_model(
tree=real_tree,
frequencies=KdeFrequencies(
start_date=start_date,
end_date=end_date,
include_internal_nodes=True
),
predictor_input=["random"],
pivot_spacing=1.0 / 12,
time_interval=time_interval,
epitope_masks_fname="builds/flu/metadata/ha_masks.tsv",
epitope_mask_version="wolf"
)
model.nuc_aln = multiple_sequence_alignment
model.nuc_alphabet = 'ACGT-N'
model.min_mutation_frequency = 0.01
return model
@pytest.fixture
def precalculated_fitness_model(simple_tree):
"""Provides a simple fitness model with precalculated model parameters such that
the model skips learning new parameters.
"""
time_interval = (
datetime.date(2015, 1, 1),
datetime.date(2012, 1, 1)
)
start_date, end_date = process.get_time_interval_as_floats(time_interval)
return fitness_model(
tree=simple_tree,
frequencies=KdeFrequencies(
start_date=start_date,
end_date=end_date,
include_internal_nodes=True
),
predictor_input={"random": MODEL_PARAMS},
pivot_spacing=1.0 / 12,
time_interval=time_interval,
epitope_masks_fname="builds/flu/metadata/ha_masks.tsv",
epitope_mask_version="wolf"
)
@pytest.fixture
def sequence():
"""Returns an amino acid sequence for an ancestral H3N2 virus (Hong Kong 1968).
"""
with open("tests/data/fitness_model/AAK51718.fasta", "r") as handle:
record = list(Bio.SeqIO.parse(handle, "fasta"))[0]
aa = str(record.seq)
return aa
@pytest.fixture
def multiple_sequence_alignment():
"""Returns a multiple sequence alignment containing a small test set of H3N2
sequences.
"""
msa = Bio.AlignIO.read("tests/data/fitness_model/H3N2_alignment.cleaned.fasta", "fasta")
return msa
#
# Utility functions
#
def modify_sequence_at_site(sequence, site):
"""Returns the given sequence with a modified base at the given site.
"""
other_sequence_list = list(sequence)
other_sequence_list[site] = "Z"
return "".join(other_sequence_list)
#
# Tests
#
class TestFitnessModel(object):
def test_prep_nodes(self, simple_fitness_model):
assert not hasattr(simple_fitness_model, "nodes")
assert not any([hasattr(node, "tips") for node in simple_fitness_model.tree.find_clades()])
simple_fitness_model.prep_nodes()
assert hasattr(simple_fitness_model, "nodes")
assert hasattr(simple_fitness_model, "rootnode")
assert hasattr(simple_fitness_model.rootnode, "pivots")
assert all([hasattr(node, "tips") for node in simple_fitness_model.tree.find_clades()])
def test_calc_node_frequencies(self, simple_fitness_model):
simple_fitness_model.prep_nodes()
assert not hasattr(simple_fitness_model, "freq_arrays")
simple_fitness_model.calc_node_frequencies()
assert hasattr(simple_fitness_model, "freq_arrays")
assert len(simple_fitness_model.freq_arrays) > 0
def test_calc_all_predictors(self, simple_fitness_model):
simple_fitness_model.prep_nodes()
simple_fitness_model.calc_node_frequencies()
assert not hasattr(simple_fitness_model, "predictor_arrays")
simple_fitness_model.calc_all_predictors()
assert hasattr(simple_fitness_model, "predictor_arrays")
assert len(simple_fitness_model.predictor_arrays) > 0
def test_standardize_predictors(self, simple_fitness_model):
simple_fitness_model.prep_nodes()
simple_fitness_model.calc_node_frequencies()
simple_fitness_model.calc_all_predictors()
assert not hasattr(simple_fitness_model, "predictor_means")
simple_fitness_model.standardize_predictors()
assert hasattr(simple_fitness_model, "predictor_means")
def test_select_clades_for_fitting(self, simple_fitness_model):
simple_fitness_model.prep_nodes()
simple_fitness_model.calc_node_frequencies()
simple_fitness_model.calc_all_predictors()
simple_fitness_model.standardize_predictors()
assert not hasattr(simple_fitness_model, "fit_clades")
simple_fitness_model.select_clades_for_fitting()
assert hasattr(simple_fitness_model, "fit_clades")
assert len(simple_fitness_model.fit_clades) > 0
def test_learn_parameters(self, real_fitness_model):
real_fitness_model.prep_nodes()
real_fitness_model.calc_node_frequencies()
real_fitness_model.calc_all_predictors()
real_fitness_model.standardize_predictors()
real_fitness_model.select_clades_for_fitting()
assert not hasattr(real_fitness_model, "last_fit")
real_fitness_model.learn_parameters(niter=1, fit_func="clade")
assert hasattr(real_fitness_model, "last_fit")
def test_assign_fitness(self, real_fitness_model):
real_fitness_model.prep_nodes()
real_fitness_model.calc_node_frequencies()
real_fitness_model.calc_all_predictors()
real_fitness_model.standardize_predictors()
real_fitness_model.select_clades_for_fitting()
real_fitness_model.learn_parameters(niter=1, fit_func="clade")
assert not any([hasattr(node, "fitness") for node in real_fitness_model.tree.get_terminals()])
real_fitness_model.assign_fitness()
assert all([hasattr(node, "fitness") for node in real_fitness_model.tree.get_terminals()])
def test_assign_fitness_with_precalculated_params(self, precalculated_fitness_model):
# The fitness model should have model parameters assigned by the user.
assert np.array_equal(precalculated_fitness_model.model_params, np.array([MODEL_PARAMS[0]]))
precalculated_fitness_model.predict()
# After prediction, the model parameters should be unchanged as the
# learning step should be skipped.
assert np.array_equal(precalculated_fitness_model.model_params, np.array([MODEL_PARAMS[0]]))
# Recalculate fitness model parameters which should be different from those given.
precalculated_fitness_model.learn_parameters(niter=1, fit_func="clade")
assert not np.array_equal(precalculated_fitness_model.model_params, np.array([MODEL_PARAMS[0]]))
| agpl-3.0 | -5,951,141,255,891,297,000 | 35.709091 | 106 | 0.665478 | false |
lalitkumarj/NEXT-psych | next/apps/TupleBanditsPureExploration/Dashboard.py | 1 | 3313 | """
TupleBanditsPureExplorationDashboard
author: Nick Glattard, [email protected]
last updated: 4/24/2015
######################################
TupleBanditsPureExplorationDashboard
"""
import json
import numpy
import numpy.random
import matplotlib.pyplot as plt
from datetime import datetime
from datetime import timedelta
from next.utils import utils
from next.apps.AppDashboard import AppDashboard
class TupleBanditsPureExplorationDashboard(AppDashboard):
def __init__(self,db,ell):
AppDashboard.__init__(self,db,ell)
def get_app_supported_stats(self):
"""
Returns a list of dictionaries describing the identifier (stat_id) and
necessary params inputs to be used when calling getStats
Expected output (list of dicts, each with fields):
(string) stat_id : the identiifer of the statistic
(string) description : docstring of describing outputs
(list of string) necessary_params : list where each string describes the type of param input like 'alg_label' or 'task'
"""
stat_list = self.get_supported_stats()
stat = {}
stat['stat_id'] = 'most_current_ranking'
stat['description'] = self.most_current_ranking.__doc__
stat['necessary_params'] = ['alg_label']
stat_list.append(stat)
return stat_list
def most_current_ranking(self,app_id,exp_uid,alg_label):
"""
Description: Returns a ranking of arms in the form of a list of dictionaries, which is conveneint for downstream applications
Expected input:
(string) alg_label : must be a valid alg_label contained in alg_list list of dicts
The 'headers' contains a list of dictionaries corresponding to each column of the table with fields 'label' and 'field' where 'label' is the label of the column to be put on top of the table, and 'field' is the name of the field in 'data' that the column correpsonds to
Expected output (in dict):
plot_type : 'columnar_table'
headers : [ {'label':'Rank','field':'rank'}, {'label':'Target','field':'index'} ]
(list of dicts with fields) data (each dict is a row, each field is the column for that row):
(int) index : index of target
(int) ranking : rank (0 to number of targets - 1) representing belief of being best arm
"""
alg_list,didSucceed,message = self.db.get(app_id+':experiments',exp_uid,'alg_list')
for algorithm in alg_list:
if algorithm['alg_label'] == alg_label:
alg_id = algorithm['alg_id']
alg_uid = algorithm['alg_uid']
list_of_log_dict,didSucceed,message = self.ell.get_logs_with_filter(app_id+':ALG-EVALUATION',{'alg_uid':alg_uid})
list_of_log_dict = sorted(list_of_log_dict, key=lambda k: k['num_reported_answers'] )
print didSucceed, message
item = list_of_log_dict[-1]
return_dict = {}
return_dict['headers'] = [{'label':'Rank','field':'rank'},{'label':'Target','field':'index'},{'label':'Score','field':'score'},{'label':'Precision','field':'precision'}]
return_dict['data'] = item['targets']
return_dict['plot_type'] = 'columnar_table'
return return_dict
| apache-2.0 | -895,578,038,832,103,800 | 37.976471 | 278 | 0.635376 | false |
clubcapra/Ibex | src/capra_ui/GpsPointManager/controllers/DialogAddPoint.py | 1 | 1045 | __author__ = 'jstcyr'
from PyQt4 import QtGui, QtCore
from ..views import DialogAddPointUi
from ..models.Coordinates import Coordinates
from ..utilities import CoordinatesUtils
class DialogAddPoint(QtGui.QDialog, DialogAddPointUi.Ui_Dialog_add_point):
def __init__(self, parent=None):
super(DialogAddPoint, self).__init__(parent)
self.setupUi(self)
self.parent = parent
self.buttonBox.accepted.connect(self.saveButtonClicked)
def saveButtonClicked(self):
try:
lat = CoordinatesUtils.ConvertToDecimalDegrees(self.lineEdit_latitude.text())
long = CoordinatesUtils.ConvertToDecimalDegrees(self.lineEdit_longitude.text())
coords = Coordinates(self.lineEdit_id.text(), lat, long)
self.parent.coordinates.append(coords)
self.parent.refreshCoordinatesList()
except ValueError as e:
print e
self.setVisible(True)
QtGui.QMessageBox.critical(self, QtCore.QString("Error"), QtCore.QString(e.message)) | gpl-3.0 | 5,127,037,757,480,092,000 | 39.230769 | 96 | 0.686124 | false |
genialis/resolwe | resolwe/flow/migrations/0023_process_entity_2.py | 1 | 1150 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.14 on 2018-10-01 03:15
from __future__ import unicode_literals
from django.db import migrations
def migrate_flow_collection(apps, schema_editor):
"""Migrate 'flow_collection' field to 'entity_type'."""
Process = apps.get_model("flow", "Process")
DescriptorSchema = apps.get_model("flow", "DescriptorSchema")
for process in Process.objects.all():
process.entity_type = process.flow_collection
process.entity_descriptor_schema = process.flow_collection
if (
process.entity_descriptor_schema is not None
and not DescriptorSchema.objects.filter(
slug=process.entity_descriptor_schema
).exists()
):
raise LookupError(
"Descriptow schema '{}' referenced in 'entity_descriptor_schema' not "
"found.".format(process.entity_descriptor_schema)
)
process.save()
class Migration(migrations.Migration):
dependencies = [
("flow", "0022_process_entity_1"),
]
operations = [migrations.RunPython(migrate_flow_collection)]
| apache-2.0 | 5,028,491,642,079,892,000 | 30.081081 | 86 | 0.633913 | false |
pitunti/alfaPitunti | plugin.video.alfa/channels/peliculashindu.py | 1 | 5105 | # -*- coding: utf-8 -*-
import re
import urlparse
from core import httptools
from core import scrapertools
from core import servertools
from core.item import Item
from platformcode import config, logger
host = "http://www.peliculashindu.com/"
def mainlist(item):
logger.info()
itemlist = list()
itemlist.append(
Item(channel=item.channel, action="lista", title="Top Películas", url=urlparse.urljoin(host, "top")))
itemlist.append(Item(channel=item.channel, action="lista", title="Novedades", url=host))
itemlist.append(Item(channel=item.channel, action="explorar", title="Género", url=urlparse.urljoin(host, "genero")))
itemlist.append(Item(channel=item.channel, action="explorar", title="Listado Alfabético",
url=urlparse.urljoin(host, "alfabetico")))
# itemlist.append(Item(channel=item.channel, action="explorar", title="Listado por año", url=urlparse.urljoin(host, "año")))
itemlist.append(Item(channel=item.channel, action="lista", title="Otras Películas (No Bollywood)",
url=urlparse.urljoin(host, "estrenos")))
itemlist.append(Item(channel=item.channel, title="Buscar", action="search", url=urlparse.urljoin(host, "buscar-")))
return itemlist
def explorar(item):
logger.info()
itemlist = list()
url1 = str(item.url)
data = httptools.downloadpage(host).data
data = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
# logger.info("loca :"+url1+" aaa"+data)
if 'genero' in url1:
patron = '<div class="d"><h3>Pel.+?neros<\/h3>(.+?)<\/h3>'
if 'alfabetico' in url1:
patron = '<\/li><\/ul><h3>Pel.+?tico<\/h3>(.+?)<\/h3>'
if 'año' in url1:
patron = '<ul class="anio"><li>(.+?)<\/ul>'
data_explorar = scrapertools.find_single_match(data, patron)
patron_explorar = '<a href="([^"]+)">([^"]+)<\/a>'
matches = scrapertools.find_multiple_matches(data_explorar, patron_explorar)
for scrapedurl, scrapedtitle in matches:
if 'Acci' in scrapedtitle:
scrapedtitle = 'Acción'
if 'Anima' in scrapedtitle:
scrapedtitle = 'Animación'
if 'Fanta' in scrapedtitle:
scrapedtitle = 'Fantasía'
if 'Hist' in scrapedtitle:
scrapedtitle = 'Histórico'
if 'lico Guerra' in scrapedtitle:
scrapedtitle = 'Bélico Guerra'
if 'Ciencia' in scrapedtitle:
scrapedtitle = 'Ciencia Ficción'
itemlist.append(item.clone(action='lista', title=scrapedtitle, url=scrapedurl))
return itemlist
def search(item, texto):
logger.info()
texto = texto.replace(" ", "-")
item.url = item.url + texto
# logger.info("item="+item.url)
if texto != '':
return lista(item)
def lista(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}| ", "", data) # Eliminamos tabuladores, dobles espacios saltos de linea, etc...
url1 = str(item.url)
if 'http://www.peliculashindu.com/' in url1:
url1 = url1.replace("http://www.peliculashindu.com/", "")
if url1 != 'estrenos':
data = scrapertools.find_single_match(data, '<div id="cuerpo"><div class="iz">.+>Otras')
# data= scrapertools.find_single_match(data,'<div id="cuerpo"><div class="iz">.+>Otras')
patron = '<a href="([^"]+)"><img src="([^"]+)" alt="([^"]+)"' # scrapedurl, scrapedthumbnail, scrapedtitle
matches = scrapertools.find_multiple_matches(data, patron)
for scrapedurl, scrapedthumbnail, scrapedtitle in matches: # scrapedthumbnail, scrapedtitle in matches:
itemlist.append(item.clone(title=scrapedtitle, url=scrapedurl, thumbnail=scrapedthumbnail, action="findvideos",
show=scrapedtitle))
# Paginacion
patron_pag = '<a href="([^"]+)" title="Siguiente .+?">'
paginasig = scrapertools.find_single_match(data, patron_pag)
next_page_url = item.url + paginasig
if paginasig != "":
item.url = next_page_url
itemlist.append(Item(channel=item.channel, action="lista", title=">> Página siguiente", url=next_page_url,
thumbnail='https://s32.postimg.org/4zppxf5j9/siguiente.png'))
return itemlist
def findvideos(item):
logger.info()
itemlist = []
data = httptools.downloadpage(item.url).data
data = re.sub(r"\n|\r|\t|\s{2}| ", "", data)
itemlist.extend(servertools.find_video_items(data=data))
logger.info("holaa" + data)
patron_show = '<strong>Ver Pel.+?a([^<]+) online<\/strong>'
show = scrapertools.find_single_match(data, patron_show)
logger.info("holaa" + show)
for videoitem in itemlist:
videoitem.channel = item.channel
if config.get_videolibrary_support() and len(itemlist) > 0:
itemlist.append(
Item(channel=item.channel, title='[COLOR yellow]Añadir esta pelicula a la videoteca[/COLOR]', url=item.url,
action="add_pelicula_to_library", extra="findvideos", contentTitle=show))
return itemlist
| gpl-3.0 | 7,154,396,034,331,516,000 | 39.704 | 128 | 0.630896 | false |
NCI-GDC/gdcdatamodel | docs/bin/schemata_to_graphviz.py | 1 | 1305 | import os
from gdcdatamodel import models as m
from graphviz import Digraph
def build_visualization():
print('Building schema documentation...')
# Load directory tree info
bin_dir = os.path.dirname(os.path.realpath(__file__))
root_dir = os.path.join(os.path.abspath(
os.path.join(bin_dir, os.pardir, os.pardir)))
# Create graph
dot = Digraph(
comment="High level graph representation of GDC data model", format='pdf')
dot.graph_attr['rankdir'] = 'RL'
dot.node_attr['fillcolor'] = 'lightblue'
dot.node_attr['style'] = 'filled'
# Add nodes
for node in m.Node.get_subclasses():
label = node.get_label()
print label
dot.node(label, label)
# Add edges
for edge in m.Edge.get_subclasses():
if edge.__dst_class__ == 'Case' and edge.label == 'relates_to':
# Skip case cache edges
continue
src = m.Node.get_subclass_named(edge.__src_class__)
dst = m.Node.get_subclass_named(edge.__dst_class__)
dot.edge(src.get_label(), dst.get_label(), edge.get_label())
gv_path = os.path.join(root_dir, 'docs', 'viz', 'gdc_data_model.gv')
dot.render(gv_path)
print('graphviz output to {}'.format(gv_path))
if __name__ == '__main__':
build_visualization()
| apache-2.0 | -1,928,741,707,726,198,000 | 29.348837 | 82 | 0.613027 | false |
Diacamma2/asso | diacamma/member/migrations/0002_change_activity.py | 1 | 1810 | # -*- coding: utf-8 -*-
'''
Initial django functions
@author: Laurent GAY
@organization: sd-libre.fr
@contact: [email protected]
@copyright: 2015 sd-libre.fr
@license: This file is part of Lucterios.
Lucterios is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Lucterios is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Lucterios. If not, see <http://www.gnu.org/licenses/>.
'''
from __future__ import unicode_literals
from django.db import migrations, models
from django.utils.translation import ugettext_lazy as _
from diacamma.member.models import Activity, License
def convert_values(*args):
# add default activity
if len(Activity.objects.all()) == 0:
default_act = Activity.objects.create(
name=_("default"), description=_("default"))
else:
default_act = Activity.objects.all()[0]
for lic in License.objects.filter(activity__isnull=True):
lic.activity = default_act
lic.update()
class Migration(migrations.Migration):
dependencies = [
('member', '0001_initial'),
]
operations = [
migrations.RunPython(convert_values),
migrations.AlterField(
model_name='license',
name='activity',
field=models.ForeignKey(
default=None, on_delete=models.deletion.PROTECT, to='member.Activity', verbose_name='activity'),
),
]
| gpl-3.0 | -43,555,210,291,383,250 | 29.677966 | 112 | 0.695028 | false |
bintoro/schematics | schematics/types/base.py | 1 | 27989 | import uuid
import re
import datetime
import decimal
import itertools
import functools
import random
import string
import six
from six import iteritems
from ..exceptions import (
StopValidation, ValidationError, ConversionError, MockCreationError
)
try:
from string import ascii_letters # PY3
except ImportError:
from string import letters as ascii_letters #PY2
try:
basestring #PY2
except NameError:
basestring = str #PY3
try:
unicode #PY2
except:
import codecs
unicode = str #PY3
def utf8_decode(s):
if six.PY3:
s = str(s) #todo: right thing to do?
else:
s = unicode(s, 'utf-8')
return s
def fill_template(template, min_length, max_length):
return template % random_string(
get_value_in(
min_length,
max_length,
padding=len(template) - 2,
required_length=1))
def force_unicode(obj, encoding='utf-8'):
if isinstance(obj, basestring):
if not isinstance(obj, unicode):
#obj = unicode(obj, encoding)
obj = utf8_decode(obj)
elif not obj is None:
#obj = unicode(obj)
obj = utf8_decode(obj)
return obj
def get_range_endpoints(min_length, max_length, padding=0, required_length=0):
if min_length is None and max_length is None:
min_length = 0
max_length = 16
elif min_length is None:
min_length = 0
elif max_length is None:
max_length = max(min_length * 2, 16)
if padding:
max_length = max_length - padding
min_length = max(min_length - padding, 0)
if max_length < required_length:
raise MockCreationError(
'This field is too short to hold the mock data')
min_length = max(min_length, required_length)
return min_length, max_length
def get_value_in(min_length, max_length, padding=0, required_length=0):
return random.randint(
*get_range_endpoints(min_length, max_length, padding, required_length))
def random_string(length, chars=ascii_letters + string.digits):
return ''.join(random.choice(chars) for _ in range(length))
_last_position_hint = -1
_next_position_hint = itertools.count()
class TypeMeta(type):
"""
Meta class for BaseType. Merges `MESSAGES` dict and accumulates
validator methods.
"""
def __new__(mcs, name, bases, attrs):
messages = {}
validators = []
for base in reversed(bases):
if hasattr(base, 'MESSAGES'):
messages.update(base.MESSAGES)
if hasattr(base, "_validators"):
validators.extend(base._validators)
if 'MESSAGES' in attrs:
messages.update(attrs['MESSAGES'])
attrs['MESSAGES'] = messages
for attr_name, attr in iteritems(attrs):
if attr_name.startswith("validate_"):
validators.append(attr)
attrs["_validators"] = validators
return type.__new__(mcs, name, bases, attrs)
class BaseType(TypeMeta('BaseTypeBase', (object, ), {})):
"""A base class for Types in a Schematics model. Instances of this
class may be added to subclasses of ``Model`` to define a model schema.
Validators that need to access variables on the instance
can be defined be implementing methods whose names start with ``validate_``
and accept one parameter (in addition to ``self``)
:param required:
Invalidate field when value is None or is not supplied. Default:
False.
:param default:
When no data is provided default to this value. May be a callable.
Default: None.
:param serialized_name:
The name of this field defaults to the class attribute used in the
model. However if the field has another name in foreign data set this
argument. Serialized data will use this value for the key name too.
:param deserialize_from:
A name or list of named fields for which foreign data sets are
searched to provide a value for the given field. This only effects
inbound data.
:param choices:
A list of valid choices. This is the last step of the validator
chain.
:param validators:
A list of callables. Each callable receives the value after it has been
converted into a rich python type. Default: []
:param serialize_when_none:
Dictates if the field should appear in the serialized data even if the
value is None. Default: True
:param messages:
Override the error messages with a dict. You can also do this by
subclassing the Type and defining a `MESSAGES` dict attribute on the
class. A metaclass will merge all the `MESSAGES` and override the
resulting dict with instance level `messages` and assign to
`self.messages`.
"""
MESSAGES = {
'required': u"This field is required.",
'choices': u"Value must be one of {0}.",
}
def __init__(self, required=False, default=None, serialized_name=None,
choices=None, validators=None, deserialize_from=None,
serialize_when_none=None, messages=None):
super(BaseType, self).__init__()
self.required = required
self._default = default
self.serialized_name = serialized_name
if choices and not isinstance(choices, (list, tuple)):
raise TypeError('"choices" must be a list or tuple')
self.choices = choices
self.deserialize_from = deserialize_from
self.validators = [functools.partial(v, self) for v in self._validators]
if validators:
self.validators += validators
self.serialize_when_none = serialize_when_none
self.messages = dict(self.MESSAGES, **(messages or {}))
self._position_hint = next(_next_position_hint) # For ordering of fields
def __call__(self, value):
return self.to_native(value)
def _mock(self, context=None):
return None
@property
def default(self):
default = self._default
if callable(self._default):
default = self._default()
return default
def to_primitive(self, value, context=None):
"""Convert internal data to a value safe to serialize.
"""
return value
def to_native(self, value, context=None):
"""
Convert untrusted data to a richer Python construct.
"""
return value
def allow_none(self):
if hasattr(self, 'owner_model'):
return self.owner_model.allow_none(self)
else:
return self.serialize_when_none
def validate(self, value):
"""
Validate the field and return a clean value or raise a
``ValidationError`` with a list of errors raised by the validation
chain. Stop the validation process from continuing through the
validators by raising ``StopValidation`` instead of ``ValidationError``.
"""
errors = []
for validator in self.validators:
try:
validator(value)
except ValidationError as exc:
errors.extend(exc.messages)
if isinstance(exc, StopValidation):
break
if errors:
raise ValidationError(errors)
def validate_required(self, value):
if self.required and value is None:
raise ValidationError(self.messages['required'])
def validate_choices(self, value):
if self.choices is not None:
if value not in self.choices:
raise ValidationError(self.messages['choices']
.format(unicode(self.choices)))
def mock(self, context=None):
if not self.required and not random.choice([True, False]):
return self.default
if self.choices is not None:
return random.choice(self.choices)
return self._mock(context)
class UUIDType(BaseType):
"""A field that stores a valid UUID value.
"""
MESSAGES = {
'convert': u"Couldn't interpret '{0}' value as UUID.",
}
def _mock(self, context=None):
return uuid.uuid4()
def to_native(self, value, context=None):
if not isinstance(value, uuid.UUID):
try:
value = uuid.UUID(value)
except (AttributeError, TypeError, ValueError):
raise ConversionError(self.messages['convert'].format(value))
return value
def to_primitive(self, value, context=None):
return str(value)
class IPv4Type(BaseType):
""" A field that stores a valid IPv4 address """
def _mock(self, context=None):
return '.'.join(str(random.randrange(256)) for _ in range(4))
@classmethod
def valid_ip(cls, addr):
try:
addr = addr.strip().split(".")
except AttributeError:
return False
try:
return len(addr) == 4 and all(0 <= int(octet) < 256 for octet in addr)
except ValueError:
return False
def validate(self, value):
"""
Make sure the value is a IPv4 address:
http://stackoverflow.com/questions/9948833/validate-ip-address-from-list
"""
if not IPv4Type.valid_ip(value):
error_msg = 'Invalid IPv4 address'
raise ValidationError(error_msg)
return True
class StringType(BaseType):
"""A unicode string field. Default minimum length is one. If you want to
accept empty strings, init with ``min_length`` 0.
"""
allow_casts = (int, str)
MESSAGES = {
'convert': u"Couldn't interpret '{0}' as string.",
'max_length': u"String value is too long.",
'min_length': u"String value is too short.",
'regex': u"String value did not match validation regex.",
}
def __init__(self, regex=None, max_length=None, min_length=None, **kwargs):
self.regex = regex
self.max_length = max_length
self.min_length = min_length
super(StringType, self).__init__(**kwargs)
def _mock(self, context=None):
return random_string(get_value_in(self.min_length, self.max_length))
def to_native(self, value, context=None):
if value is None:
return None
if not isinstance(value, unicode):
if isinstance(value, self.allow_casts):
if not isinstance(value, str):
value = str(value)
value = utf8_decode(value) #unicode(value, 'utf-8')
else:
raise ConversionError(self.messages['convert'].format(value))
return value
def validate_length(self, value):
len_of_value = len(value) if value else 0
if self.max_length is not None and len_of_value > self.max_length:
raise ValidationError(self.messages['max_length'])
if self.min_length is not None and len_of_value < self.min_length:
raise ValidationError(self.messages['min_length'])
def validate_regex(self, value):
if self.regex is not None and re.match(self.regex, value) is None:
raise ValidationError(self.messages['regex'])
class URLType(StringType):
"""A field that validates input as an URL.
If verify_exists=True is passed the validate function will make sure
the URL makes a valid connection.
"""
MESSAGES = {
'invalid_url': u"Not a well formed URL.",
'not_found': u"URL does not exist.",
}
URL_REGEX = re.compile(
r'^https?://'
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,2000}[A-Z0-9])?\.)+[A-Z]{2,63}\.?|'
r'localhost|'
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})'
r'(?::\d+)?'
r'(?:/?|[/?]\S+)$', re.IGNORECASE
)
def __init__(self, verify_exists=False, **kwargs):
self.verify_exists = verify_exists
super(URLType, self).__init__(**kwargs)
def _mock(self, context=None):
return fill_template('http://a%s.ZZ', self.min_length,
self.max_length)
def validate_url(self, value):
if not URLType.URL_REGEX.match(value):
raise StopValidation(self.messages['invalid_url'])
if self.verify_exists:
from six.moves import urllib
try:
request = urllib.Request(value)
urllib.urlopen(request)
except Exception:
raise StopValidation(self.messages['not_found'])
class EmailType(StringType):
"""A field that validates input as an E-Mail-Address.
"""
MESSAGES = {
'email': u"Not a well formed email address."
}
EMAIL_REGEX = re.compile(
# dot-atom
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*"
# quoted-string
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016'
r'-\177])*"'
# domain
r')@(?:[A-Z0-9](?:[A-Z0-9-]{0,2000}[A-Z0-9])?\.)+[A-Z]{2,63}\.?$',
re.IGNORECASE
)
def _mock(self, context=None):
return fill_template('%[email protected]', self.min_length,
self.max_length)
def validate_email(self, value):
if not EmailType.EMAIL_REGEX.match(value):
raise StopValidation(self.messages['email'])
class NumberType(BaseType):
"""A number field.
"""
MESSAGES = {
'number_coerce': u"Value '{0}' is not {1}.",
'number_min': u"{0} value should be greater than {1}.",
'number_max': u"{0} value should be less than {1}.",
}
def __init__(self, number_class, number_type,
min_value=None, max_value=None, **kwargs):
self.number_class = number_class
self.number_type = number_type
self.min_value = min_value
self.max_value = max_value
super(NumberType, self).__init__(**kwargs)
def _mock(self, context=None):
return get_value_in(self.min_value, self.max_value)
def to_native(self, value, context=None):
try:
value = self.number_class(value)
except (TypeError, ValueError):
raise ConversionError(self.messages['number_coerce']
.format(value, self.number_type.lower()))
return value
def validate_is_a_number(self, value):
try:
self.number_class(value)
except (TypeError, ValueError):
raise ConversionError(self.messages['number_coerce']
.format(value, self.number_type.lower()))
def validate_range(self, value):
if self.min_value is not None and value < self.min_value:
raise ValidationError(self.messages['number_min']
.format(self.number_type, self.min_value))
if self.max_value is not None and value > self.max_value:
raise ValidationError(self.messages['number_max']
.format(self.number_type, self.max_value))
return value
class IntType(NumberType):
"""A field that validates input as an Integer
"""
def __init__(self, *args, **kwargs):
super(IntType, self).__init__(number_class=int,
number_type='Int',
*args, **kwargs)
class LongType(NumberType):
"""A field that validates input as a Long
"""
def __init__(self, *args, **kwargs):
try:
number_class = long #PY2
except NameError:
number_class = int #PY3
super(LongType, self).__init__(number_class=number_class,
number_type='Long',
*args, **kwargs)
class FloatType(NumberType):
"""A field that validates input as a Float
"""
def __init__(self, *args, **kwargs):
super(FloatType, self).__init__(number_class=float,
number_type='Float',
*args, **kwargs)
class DecimalType(BaseType):
"""A fixed-point decimal number field.
"""
MESSAGES = {
'number_coerce': u"Number '{0}' failed to convert to a decimal.",
'number_min': u"Value should be greater than {0}.",
'number_max': u"Value should be less than {0}.",
}
def __init__(self, min_value=None, max_value=None, **kwargs):
self.min_value, self.max_value = min_value, max_value
super(DecimalType, self).__init__(**kwargs)
def _mock(self, context=None):
return get_value_in(self.min_value, self.max_value)
def to_primitive(self, value, context=None):
return unicode(value)
def to_native(self, value, context=None):
if not isinstance(value, decimal.Decimal):
if not isinstance(value, basestring):
value = unicode(value)
try:
value = decimal.Decimal(value)
except (TypeError, decimal.InvalidOperation):
raise ConversionError(self.messages['number_coerce'].format(value))
return value
def validate_range(self, value):
if self.min_value is not None and value < self.min_value:
error_msg = self.messages['number_min'].format(self.min_value)
raise ValidationError(error_msg)
if self.max_value is not None and value > self.max_value:
error_msg = self.messages['number_max'].format(self.max_value)
raise ValidationError(error_msg)
return value
class HashType(BaseType):
MESSAGES = {
'hash_length': u"Hash value is wrong length.",
'hash_hex': u"Hash value is not hexadecimal.",
}
def _mock(self, context=None):
return random_string(self.LENGTH, string.hexdigits)
def to_native(self, value, context=None):
if len(value) != self.LENGTH:
raise ValidationError(self.messages['hash_length'])
try:
int(value, 16)
except ValueError:
raise ConversionError(self.messages['hash_hex'])
return value
class MD5Type(HashType):
"""A field that validates input as resembling an MD5 hash.
"""
LENGTH = 32
class SHA1Type(HashType):
"""A field that validates input as resembling an SHA1 hash.
"""
LENGTH = 40
class BooleanType(BaseType):
"""A boolean field type. In addition to ``True`` and ``False``, coerces these
values:
+ For ``True``: "True", "true", "1"
+ For ``False``: "False", "false", "0"
"""
TRUE_VALUES = ('True', 'true', '1')
FALSE_VALUES = ('False', 'false', '0')
def _mock(self, context=None):
return random.choice([True, False])
def to_native(self, value, context=None):
if isinstance(value, basestring):
if value in self.TRUE_VALUES:
value = True
elif value in self.FALSE_VALUES:
value = False
if isinstance(value, int) and value in [0, 1]:
value = bool(value)
if not isinstance(value, bool):
raise ConversionError(u"Must be either true or false.")
return value
class DateType(BaseType):
"""Defaults to converting to and from ISO8601 date values.
"""
SERIALIZED_FORMAT = '%Y-%m-%d'
MESSAGES = {
'parse': u"Could not parse {0}. Should be ISO8601 (YYYY-MM-DD).",
}
def __init__(self, **kwargs):
self.serialized_format = self.SERIALIZED_FORMAT
super(DateType, self).__init__(**kwargs)
def _mock(self, context=None):
return datetime.datetime(
year=random.randrange(600) + 1900,
month=random.randrange(12) + 1,
day=random.randrange(28) + 1,
)
def to_native(self, value, context=None):
if isinstance(value, datetime.date):
return value
try:
return datetime.datetime.strptime(value, self.serialized_format).date()
except (ValueError, TypeError):
raise ConversionError(self.messages['parse'].format(value))
def to_primitive(self, value, context=None):
return value.strftime(self.serialized_format)
class DateTimeType(BaseType):
"""Defaults to converting to and from ISO8601 datetime values.
:param formats:
A value or list of values suitable for ``datetime.datetime.strptime``
parsing. Default: `('%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S',
'%Y-%m-%dT%H:%M:%S.%fZ', '%Y-%m-%dT%H:%M:%SZ')`
:param serialized_format:
The output format suitable for Python ``strftime``. Default: ``'%Y-%m-%dT%H:%M:%S.%f'``
"""
DEFAULT_FORMATS = (
'%Y-%m-%dT%H:%M:%S.%f', '%Y-%m-%dT%H:%M:%S',
'%Y-%m-%dT%H:%M:%S.%fZ', '%Y-%m-%dT%H:%M:%SZ',
)
SERIALIZED_FORMAT = '%Y-%m-%dT%H:%M:%S.%f'
MESSAGES = {
'parse_formats': u'Could not parse {0}. Valid formats: {1}',
'parse': u"Could not parse {0}. Should be ISO8601.",
}
def __init__(self, formats=None, serialized_format=None, **kwargs):
"""
"""
if isinstance(formats, basestring):
formats = [formats]
if formats is None:
formats = self.DEFAULT_FORMATS
if serialized_format is None:
serialized_format = self.SERIALIZED_FORMAT
self.formats = formats
self.serialized_format = serialized_format
super(DateTimeType, self).__init__(**kwargs)
def _mock(self, context=None):
return datetime.datetime(
year=random.randrange(600) + 1900,
month=random.randrange(12) + 1,
day=random.randrange(28) + 1,
hour=random.randrange(24),
minute=random.randrange(60),
second=random.randrange(60),
microsecond=random.randrange(1000000),
)
def to_native(self, value, context=None):
if isinstance(value, datetime.datetime):
return value
for fmt in self.formats:
try:
return datetime.datetime.strptime(value, fmt)
except (ValueError, TypeError):
continue
if self.formats == self.DEFAULT_FORMATS:
message = self.messages['parse'].format(value)
else:
message = self.messages['parse_formats'].format(
value, ", ".join(self.formats)
)
raise ConversionError(message)
def to_primitive(self, value, context=None):
if callable(self.serialized_format):
return self.serialized_format(value)
return value.strftime(self.serialized_format)
class GeoPointType(BaseType):
"""A list storing a latitude and longitude.
"""
def _mock(self, context=None):
return (random.randrange(-90, 90), random.randrange(-90, 90))
def to_native(self, value, context=None):
"""Make sure that a geo-value is of type (x, y)
"""
if not len(value) == 2:
raise ValueError('Value must be a two-dimensional point')
if isinstance(value, dict):
for val in value.values():
if not isinstance(val, (float, int)):
raise ValueError('Both values in point must be float or int')
elif isinstance(value, (list, tuple)):
if (not isinstance(value[0], (float, int)) or
not isinstance(value[1], (float, int))):
raise ValueError('Both values in point must be float or int')
else:
raise ValueError('GeoPointType can only accept tuples, lists, or dicts')
return value
class MultilingualStringType(BaseType):
"""
A multilanguage string field, stored as a dict with {'locale': 'localized_value'}.
Minimum and maximum lengths apply to each of the localized values.
At least one of ``default_locale`` or ``context['locale']`` must be defined
when calling ``.to_primitive``.
"""
allow_casts = (int, str)
MESSAGES = {
'convert': u"Couldn't interpret value as string.",
'max_length': u"String value in locale {0} is too long.",
'min_length': u"String value in locale {0} is too short.",
'locale_not_found': u"No requested locale was available.",
'no_locale': u"No default or explicit locales were given.",
'regex_locale': u"Name of locale {0} did not match validation regex.",
'regex_localized': u"String value in locale {0} did not match validation regex.",
}
LOCALE_REGEX = r'^[a-z]{2}(:?_[A-Z]{2})?$'
def __init__(self, regex=None, max_length=None, min_length=None,
default_locale=None, locale_regex=LOCALE_REGEX, **kwargs):
self.regex = re.compile(regex) if regex else None
self.max_length = max_length
self.min_length = min_length
self.default_locale = default_locale
self.locale_regex = re.compile(locale_regex) if locale_regex else None
super(MultilingualStringType, self).__init__(**kwargs)
def _mock(self, context=None):
return random_string(get_value_in(self.min_length, self.max_length))
def to_native(self, value, context=None):
"""Make sure a MultilingualStringType value is a dict or None."""
if not (value is None or isinstance(value, dict)):
raise ValueError('Value must be a dict or None')
return value
def to_primitive(self, value, context=None):
"""
Use a combination of ``default_locale`` and ``context['locale']`` to return
the best localized string.
"""
if value is None:
return None
context_locale = None
if context is not None and 'locale' in context:
context_locale = context['locale']
# Build a list of all possible locales to try
possible_locales = []
for locale in (context_locale, self.default_locale):
if not locale:
continue
if isinstance(locale, basestring):
possible_locales.append(locale)
else:
possible_locales.extend(locale)
if not possible_locales:
raise ConversionError(self.messages['no_locale'])
for locale in possible_locales:
if locale in value:
localized = value[locale]
break
else:
raise ConversionError(self.messages['locale_not_found'])
if not isinstance(localized, unicode):
if isinstance(localized, self.allow_casts):
if not isinstance(localized, str):
localized = str(localized)
#localized = unicode(localized, 'utf-8')
localized = utf8_decode(localized)
else:
raise ConversionError(self.messages['convert'])
return localized
def validate_length(self, value):
for locale, localized in value.items():
len_of_value = len(localized) if localized else 0
if self.max_length is not None and len_of_value > self.max_length:
raise ValidationError(self.messages['max_length'].format(locale))
if self.min_length is not None and len_of_value < self.min_length:
raise ValidationError(self.messages['min_length'].format(locale))
def validate_regex(self, value):
if self.regex is None and self.locale_regex is None:
return
for locale, localized in value.items():
if self.regex is not None and self.regex.match(localized) is None:
raise ValidationError(
self.messages['regex_localized'].format(locale))
if self.locale_regex is not None and self.locale_regex.match(locale) is None:
raise ValidationError(
self.messages['regex_locale'].format(locale))
| bsd-3-clause | -4,595,678,198,879,457,300 | 30.342665 | 95 | 0.586016 | false |
zetaops/ulakbus | selenium_tests/test_admin_donem.py | 1 | 1461 | # -*- coding: utf-8 -*-
from test_settings import Settings
class TestCase(Settings):
def test_sidebar(self):
# Ayarlari yapiyor.
self.do_settings()
# Admin'e tikliyor.
self.driver.find_element_by_css_selector('li.ng-binding:nth-child(4) > a:nth-child(1)').click()
# Donem'e tikliyor.
self.driver.find_element_by_css_selector('li.ng-scope:nth-child(9) > a:nth-child(1)').click()
# Backend ayarlari degistirildigi icin tekrar kullanicinin login olmasini bekliyor.
self.do_login()
# Admin'e tikliyor.
self.driver.find_element_by_css_selector('li.ng-binding:nth-child(4) > a:nth-child(1)').click()
# Donem'e tikliyor.
self.driver.find_element_by_css_selector('li.ng-scope:nth-child(9) > a:nth-child(1)').click()
# Donem'e deger yolluyor.
self.driver.find_element_by_css_selector('#ad').send_keys('Guz Donemi')
# Baslangic Tarihi'ne deger yolluyor.
self.driver.find_element_by_css_selector('#baslangic_tarihi').send_keys('13.04.2009')
# Bitis Tarihi'ne deger yolluyor.
self.driver.find_element_by_css_selector('#bitis_tarihi').send_keys('11.06.2013')
# guncel'e tikliyor.
self.driver.find_element_by_css_selector('.checkbox > label:nth-child(1) > input:nth-child(1)').click()
# Kaydet ve Listeleye tikliyor.
self.driver.find_element_by_css_selector('button.btn:nth-child(2)').click()
| gpl-3.0 | -750,470,740,243,175,200 | 51.178571 | 111 | 0.646817 | false |
tony/pconf | setup.py | 1 | 1673 | #!/usr/bin/env python
# -*- coding: utf8 -*-
"""pconf lives at <https://github.com/tony/pconf>.
pconf
~~~~~
Python configuration management for humans.
"""
import os
import sys
from setuptools import setup, find_packages
with open('requirements.pip') as f:
install_reqs = [line for line in f.read().split('\n') if line]
tests_reqs = []
if sys.version_info < (2, 7):
install_reqs += ['argparse']
tests_reqs += ['unittest2']
import re
VERSIONFILE = "tony/__init__.py"
verstrline = open(VERSIONFILE, "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VSRE, verstrline, re.M)
if mo:
__version__ = mo.group(1)
else:
raise RuntimeError("Unable to find version string in %s." % (VERSIONFILE,))
setup(
name='tony',
version=__version__,
url='https://github.com/tony/pconf',
download_url='https://pypi.python.org/pypi/tony',
license='BSD',
author='Tony Narlock',
author_email='[email protected]',
description='China fit into a python package.',
long_description=open('README.rst').read(),
include_package_data=True,
install_requires=install_reqs,
tests_require=tests_reqs,
test_suite='tony.testsuite',
zip_safe=False,
packages=find_packages(exclude=["doc"]),
classifiers=[
'Development Status :: 3 - Alpha',
"License :: OSI Approved :: BSD License",
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
"Topic :: Utilities",
"Topic :: System :: Shells",
],
)
| bsd-3-clause | 466,822,481,173,191,000 | 26.42623 | 79 | 0.616856 | false |
jgowans/correlation_plotter | rfi_looker.py | 1 | 1835 | #!/usr/bin/env python
import os, time
import numpy as np
import matplotlib.pyplot as plt
import scipy.signal
results_directory = os.getenv('HOME') + "/rfi_capture_results/"
SAMPLE_FREQUENCY = 3600.0 # MHz and us
ADC_SCALE_VALUE = 707.94
# algorithm:
# open a .npy file (or do the disk buffer thing)
filename = raw_input("what file should be open? [most recent] ")
if filename == "": # default to the most recent file
filename = "/tmp/rfi_signal.npy"
else:
filename = results_directory + filename
signal = np.load(filename)
decimation_factor = int(len(signal)/2**20) + 1
print "decimation factor: " + str(decimation_factor)
if decimation_factor >= 2 :
signal_decimated = scipy.signal.decimate(signal, decimation_factor, n=1, ftype="fir")
else:
signal_decimated = signal
print "len : " + str(len(signal_decimated))
axis = np.linspace(0, decimation_factor * len(signal_decimated)/SAMPLE_FREQUENCY, len(signal_decimated), endpoint=False)
plt.plot(axis, signal_decimated, "b.")
plt.show()
# plot the signal decimated by a paramamter (defualt: 1)
# ask the user to input a subplot time
start_time = float(raw_input("At what time (microseconds) does the signal start? "))
end_time = float(raw_input("At what time (microseconds) does the signal end? "))
start_sample = int( start_time * SAMPLE_FREQUENCY )
end_sample = int( end_time * SAMPLE_FREQUENCY )
subsignal = signal[start_sample:end_sample]
subsignal_axis = np.linspace(start_time, end_time, len(subsignal), endpoint=False)
spectrum = np.fft.rfft(subsignal)
spectrum_axis = np.linspace(0, SAMPLE_FREQUENCY/2, len(spectrum), endpoint=False)
plt.subplot(211)
plt.plot(subsignal_axis, subsignal)
plt.subplot(212)
plt.plot(spectrum_axis, 10*np.log10( np.abs(spectrum) / (ADC_SCALE_VALUE*len(spectrum) )))
plt.show()
# plot the subplot and the fft of the subplot
| mit | -6,651,409,812,825,622,000 | 35.7 | 120 | 0.72752 | false |
dblN/misc | utils.py | 1 | 3046 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from keras.layers import Dense
from keras.preprocessing.image import apply_transform
import matplotlib.pyplot as plt
def take_glimpses(image, location, sizes):
glimpses = []
resize = sizes[0]
for size in sizes:
glimpse = tf.image.extract_glimpse(image, size=size, offsets=location,
normalized=True, centered=True, uniform_noise=False)
glimpses += [tf.image.resize_images(glimpse, resize)]
return glimpses
def glimpse_network(image, location, sizes, activation="relu",
glimpse_num_features=128, location_num_features=128, output_dim=256):
assert len(sizes) == 3
with tf.variable_scope("glimpse_network"):
glimpses = []
resize = sizes[0]
for size in sizes:
glimpse = tf.image.extract_glimpse(image, size=size, offsets=location, uniform_noise=False,
normalized=True, centered=True)
glimpses += [tf.image.resize_images(glimpse, resize[0], resize[1])]
glimpse = tf.concat(-1, glimpses)
glimpse = tf.reshape(glimpse, (-1, np.prod(resize) * len(sizes)))
glimpse_feature = Dense(glimpse_num_features, activation=activation)(glimpse)
location_feature = Dense(location_num_features, activation=activation)(location)
feature = Dense(output_dim, activation=activation)(glimpse_feature + location_feature)
return feature, glimpses
def accuracy_score(y_preds, y_true):
return np.sum((y_preds == y_true).astype(np.float32)) / y_preds.shape[0]
def translate(batch_x, size=(128, 128)):
"""Make translated mnist"""
height = batch_x.shape[1]
width = batch_x.shape[2]
X = np.zeros((batch_x.shape[0],) + size + (1,), dtype=batch_x.dtype)
X[:, :height, :width, :] = batch_x
for i, x in enumerate(X[:]):
tx = np.random.uniform(-(size[1] - width), 0)
ty = np.random.uniform(-(size[0] - height), 0)
translation_matrix = np.asarray([
[1, 0, tx],
[0, 1, ty],
[0, 0, 1]
], dtype=batch_x.dtype)
X[i, :, :, :] = apply_transform(x, translation_matrix, channel_index=2, fill_mode="nearest", cval=0.)
return X
def plot_glimpse(images, locations, name="glimpse.png"):
image = images[0]
location = locations[:, 0, :]
fig = plt.figure()
plt.imshow(image, cmap=plt.get_cmap("gray"))
plt.plot(location[:, 0], location[:, 1])
for i, (x, y) in enumerate(location):
plt.annotate("t=%d" % i, xy=(x, y), xytext=(-10, 10),
textcoords="offset points", ha="right", va="bottom",
bbox=dict(boxstyle="round, pad=0.5", fc="white", alpha=0.5),
arrowprops=dict(arrowstyle="->", connectionstyle="arc3, rad=0"))
plt.savefig(name)
plt.gcf().clear()
plt.close("all")
| mit | -7,068,430,309,194,083,000 | 34.418605 | 109 | 0.602101 | false |
banacer/lab221 | building_control/Python/Pubsub.py | 1 | 1316 | import pika
import logging
import sys
__mqtt_host = '172.26.50.120'
__mqtt_port = 1883
def printit(ch, method, properties, body):
"""
prints the body message. It's the default callback method
:param ch: keep null
:param method: keep null
:param properties: keep null
:param body: the message
:return:
"""
print(" [x] %r" % body)
def sub(queue_name,callback=printit):
"""
Connects to queue
:param queue_name: the queue to subscribe to
:param callback: optional callback function
:return:
"""
connection = pika.BlockingConnection(pika.ConnectionParameters(host=__mqtt_host))
channel = connection.channel()
channel.queue_declare(queue=queue_name)
channel.basic_consume(callback,queue=queue_name,no_ack=True)
logging.info(' [*] Waiting for messages. To exit press CTRL+C')
channel.start_consuming()
def pub(queue_name,message):
"""
publish to queue
:param queue_name: queue name
:param message: message
"""
connection = pika.BlockingConnection(pika.ConnectionParameters(host=__mqtt_host))
channel = connection.channel()
channel.queue_declare(queue=queue_name)
channel.basic_publish(exchange='',routing_key=queue_name,body=message)
logging.info(" [x] Sent %s" % message)
connection.close() | mit | 2,891,826,054,302,727,000 | 28.931818 | 85 | 0.681611 | false |
JosefAssad/stock-data-cacher | tests.py | 1 | 19610 | #!/usr/bin/env python
# Copyright 2012 Josef Assad
#
# This file is part of Stock Data Cacher.
#
# Stock Data Cacher is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Stock Data Cacher is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Stock Data Cacher. If not, see <http://www.gnu.org/licenses/>.
import pdb
import unittest
from sqlalchemy.orm import *
from sqlalchemy.orm.exc import NoResultFound
from sqlalchemy import *
import os
import datetime
import hashlib
from nose.tools import with_setup
from StockCollection import StockCollection
from model import Datapoint, Stock, Day
import settings
dp_A_20120323=[u'NYSE', u"A", u"2012-03-23,43.57,44.30,43.15,44.30,3369400,44.20"]
dp_A_20120326=[u'NYSE', u"A", u"2012-03-26,44.87,45.12,44.63,45.05,3467100,44.95"]
dp_A_20120327=[u'NYSE', u"A", u"2012-03-27,45.05,46.28,44.99,45.67,3804100,45.57"]
dp_A_20120328=[u'NYSE', u"A", u"2012-03-28,45.42,45.47,44.09,44.59,3089300,44.49"]
dp_AA_20120323=[u'NYSE', u"AA", u"2012-03-23,10.01,10.26,9.96,10.11,20016500,10.08"]
dp_AA_20120326=[u'NYSE', u"AA", u"2012-03-26,10.25,10.30,10.12,10.22,13772200,10.19"]
dp_AA_20120327=[u'NYSE', u"AA", u"2012-03-27,10.25,10.31,10.06,10.06,19193300,10.03"]
dp_AA_20120328=[u'NYSE', u"AA", u"2012-03-28,10.06,10.06,9.79,9.83,36435200,9.80"]
dp_AAN_20120323=[u'NYSE', u"AAN", u"2012-03-23,25.73,25.83,25.12,25.65,406500,25.65"]
dp_AAN_20120326=[u'NYSE', u"AAN", u"2012-03-26,25.92,26.33,25.90,26.17,537400,26.17"]
dp_AAN_20120327=[u'NYSE', u"AAN", u"2012-03-27,26.12,26.50,26.05,26.06,609900,26.06"]
class testValidation(unittest.TestCase):
def setUp(self):
self.settings = settings
self.stock_collection = StockCollection(self.settings)
def tearDown(self):
pass
def testSymbolValidation(self):
"""Testing validation of stock symbols
"""
result = self.stock_collection.validate_symbol(u"ABC")
assert result[1] == True and result[0] == u"ABC",\
'expected symbol ABC to generate True, instead it generated False'
result = self.stock_collection.validate_symbol(u"AB.C")
assert result[1] == False and result[0] == u"AB.C",\
'expected symbol AB.C to generate False, instead it generated True'
result = self.stock_collection.validate_symbol(u"AB-C")
assert result[1] == False and result[0] == u"AB-PC",\
'expected symbol ABC to generate True and AB-PC, instead it generated [%s, %s]' %\
(result[0], result[1])
class testSymbolLoading(object):
def setUp(self):
self.settings = settings
self.engine = create_engine(settings.db_url)
self.Session = sessionmaker(bind=self.engine)
self.stock_collection = StockCollection(self.settings)
self.stock_collection.wipe()
self.stock_collection.create_db()
def tearDown(self):
cache_file_paths = []
for stock in self.stock_collection.stocks:
cache_file_paths.append(self.stock_collection.get_cache_file_path(stock.symbol, stock.market))
meta = MetaData(bind=self.stock_collection.engine)
meta.reflect()
self.stock_collection.wipe()
for cache_file_path in cache_file_paths:
assert not os.path.exists(cache_file_path),\
'cache file %s was not removed' % cache_file_path
engine = create_engine(self.settings.db_url)
meta2 = MetaData(bind=engine)
meta2.reflect()
assert len(meta2.tables) == 0, 'tables were not deleted. %s remain.' % len(meta.tables)
def test_gen(self):
"""testing stock entity loading
"""
data = []
def create_test_symbols_file(market_name, full_file, rows):
outfile_name = 'data/' + market_name + '_test.txt'
outfile = open(outfile_name, 'w')
infile = open(full_file, 'r')
lines = iter(infile)
for foo in xrange(1, rows+2):
line = lines.next()
outfile.write(line)
outfile.close()
infile.close()
tempdata = {u'name':u'NYSE', u'file_full':u'data/NYSE_full.txt', u'file':u"data/NYSE_test.txt", u'stocks':[]}
tempdata['stocks'].append({u'market':u'NYSE', u'symbol':u'A', u'name':u'Agilent Technologies'})
tempdata['stocks'].append({u'market':u'NYSE', u'symbol':u'AA', u'name':u'Alcoa Inc.'})
tempdata['stocks'].append({u'market':u'NYSE', u'symbol':u'AAN', u'name':u'Aaron\'s Inc.'})
data.append(tempdata)
tempdata = {'name':u'NASDAQ', u'file_full':u'data/NASDAQ_full.txt', u'file':u"data/NASDAQ_test.txt", u'stocks':[]}
tempdata['stocks'].append({u'market': u'NASDAQ', u'symbol':u'AAC', u'name':u'Australia Acquisition'})
tempdata['stocks'].append({u'market': u'NASDAQ', u'symbol':u'AACC', u'name':u'Asset Acceptance Capital'})
tempdata['stocks'].append({u'market': u'NASDAQ', u'symbol':u'AACOU', u'name':u'Australia Acquisition Corp.'})
data.append(tempdata)
max_markets = len(data)
max_stocks = 3
for num_markets in xrange(1, max_markets+1):
markets = data[:num_markets]
market_files = []
for market in markets:
market_files.append({u'name':market[u'name'], u'file':market[u'file']})
for num_stocks in xrange(1, max_stocks+1):
for market in markets:
create_test_symbols_file(market[u'name'], market[u'file_full'], num_stocks)
expected_symbols = []
for d in data[:num_markets]:
for s in d['stocks'][:num_stocks]:
expected_symbols.append(s)
yield self.check_symbol_loading_works, market_files, expected_symbols
@with_setup(setUp, tearDown)
def check_symbol_loading_works(self, markets_list, expected_symbols):
session = self.Session()
stocks_raw = []
self.settings.symbols_files = markets_list
self.stock_collection.load_symbols(self.settings)
for es in expected_symbols:
stock = self.get_stock_from_db(es['market'], es['symbol'], es['name'])
assert stock, 'stock \'%s\' not found in db' % es['name']
assert os.path.exists(self.stock_collection.\
get_cache_file_path(es['symbol'], es['market'])),\
'cache file not found for stock %s' % es['name']
num_stocks = len(session.query(Stock).all())
expected_num_stocks = len(expected_symbols)
assert num_stocks == expected_num_stocks,\
'expected %s stock in db, found %s' % (expected_num_stocks, num_stocks)
def testLoad1Symbol1Market(self):
"""loading of 1 symbol, 1 market file
"""
session = self.Session()
stocks_raw = []
self.settings.symbols_files = [{u'name':u'NYSE', u'file':u"data/NYSE_test1.txt"}]
stocks_raw.append({u'market':u'NYSE', u'symbol':u'A', u'name':u'Agilent Technologies'})
self.stock_collection.load_symbols(self.settings)
for stock_raw in stocks_raw:
stock = self.get_stock_from_db(stock_raw['market'],\
stock_raw['symbol'], stock_raw['name'])
assert stock, 'stock \'%s\' not found in db' % stock_raw['name']
assert os.path.exists(self.stock_collection.\
get_cache_file_path(stock_raw['symbol'], stock_raw['market'])),\
'cache file not found for stock %s' % stock_raw['name']
num_stocks = len(session.query(Stock).all())
expected_num_stocks = len(stocks_raw)
assert num_stocks == expected_num_stocks,\
'expected %s stock in db, found %s' % (expected_num_stocks, num_stocks)
def get_stock_from_db(self, market, symbol, name=""):
session = self.Session()
try:
if not name:
stock = session.query(Stock).\
filter(Stock.market == market).\
filter(Stock.symbol == symbol).one()
return stock
else:
stock = session.query(Stock).\
filter(Stock.market == market).\
filter(Stock.symbol == symbol).\
filter(Stock.name == name).one()
return stock
except NoResultFound:
return False
except:
return False
def testLoad2Symbol1Market(self):
"""loading of 2 symbols, 1 market file
"""
session = self.Session()
stocks_raw = []
self.settings.symbols_files = [{u'name':u'NYSE', u'file':u"data/NYSE_test2.txt"}]
stocks_raw.append({u'market':u'NYSE', u'symbol':u'A', u'name':u'Agilent Technologies'})
stocks_raw.append({u'market':u'NYSE', u'symbol':u'AA', u'name':u'Alcoa Inc.'})
self.stock_collection.load_symbols(self.settings)
for stock_raw in stocks_raw:
stock = self.get_stock_from_db(stock_raw['market'], stock_raw['symbol'], stock_raw['name'])
assert stock, 'stock \'%s\' not found in db' % stock_raw['name']
assert os.path.exists(self.stock_collection.\
get_cache_file_path(stock_raw['symbol'], stock_raw['market'])),\
'cache file not found for stock %s' % stock_raw['name']
num_stocks = len(session.query(Stock).all())
expected_num_stocks = len(stocks_raw)
assert num_stocks == expected_num_stocks, 'expected %s stock in db, found %s' % (expected_num_stocks, num_stocks)
def testLoad1Symbol2Market(self):
"""loading of 2 market files 1 symbol each
"""
session = self.Session()
stocks_raw = []
self.settings.symbols_files = [{u'name':u'NYSE', u'file':u"data/NYSE_test1.txt"},
{u'name':u'NASDAQ', u'file':u"data/NASDAQ_test1.txt"}]
stocks_raw.append({u'market':u'NYSE', u'symbol':u'A', u'name':u'Agilent Technologies'})
stocks_raw.append({u'market':u'NASDAQ', u'symbol':u'AAC', u'name':u'Australia Acquisition'})
self.stock_collection.load_symbols(self.settings)
for stock_raw in stocks_raw:
stock = self.get_stock_from_db(stock_raw['market'], stock_raw['symbol'], stock_raw['name'])
assert stock, 'stock \'%s\' not found in db' % stock_raw['name']
assert os.path.exists(self.stock_collection.\
get_cache_file_path(stock_raw['symbol'], stock_raw['market'])),\
'cache file not found for stock %s' % stock_raw['name']
num_stocks = len(session.query(Stock).all())
expected_num_stocks = len(stocks_raw)
assert num_stocks == expected_num_stocks,\
'expected %s stock in db, found %s' % (expected_num_stocks, num_stocks)
def testLoad2Symbol2Market(self):
"""loading of 2 market files 2 symbols each
"""
session = self.Session()
stocks_raw = []
self.settings.symbols_files = [{u'name':u'NYSE', u'file':u"data/NYSE_test2.txt"},
{u'name':u'NASDAQ', u'file':u"data/NASDAQ_test2.txt"}]
stocks_raw.append({u'market':u'NYSE', u'symbol':u'A', u'name':u'Agilent Technologies'})
stocks_raw.append({u'market':u'NASDAQ', u'symbol':u'AAC', u'name':u'Australia Acquisition'})
stocks_raw.append({u'market':u'NYSE', u'symbol':u'AA', u'name':u'Alcoa Inc.'})
stocks_raw.append({u'market':u'NASDAQ', u'symbol':u'AACC', u'name':u'Asset Acceptance Capital'})
self.stock_collection.load_symbols(self.settings)
for stock_raw in stocks_raw:
stock = self.get_stock_from_db(stock_raw['market'], stock_raw['symbol'], stock_raw['name'])
assert stock, 'stock \'%s\' not found in db' % stock_raw['name']
assert os.path.exists(self.stock_collection.\
get_cache_file_path(stock_raw['symbol'], stock_raw['market'])),\
'cache file not found for stock %s' % stock_raw['name']
num_stocks = len(session.query(Stock).all())
expected_num_stocks = len(stocks_raw)
assert num_stocks == expected_num_stocks,\
'expected %s stock in db, found %s' % (expected_num_stocks, num_stocks)
class testCache(unittest.TestCase):
def setUp(self):
self.settings = settings
self.engine = create_engine(settings.db_url)
self.Session = sessionmaker(bind=self.engine)
self.stock_collection = StockCollection(self.settings)
self.stock_collection.wipe()
self.stock_collection.create_db()
def tearDown(self):
cache_file_paths = []
for stock in self.stock_collection.stocks:
cache_file_paths.append(self.stock_collection.get_cache_file_path(stock.symbol, stock.market))
meta = MetaData(bind=self.stock_collection.engine)
meta.reflect()
self.stock_collection.wipe()
for cache_file_path in cache_file_paths:
assert not os.path.exists(cache_file_path),\
'cache file %s was not removed' % cache_file_path
engine = create_engine(self.settings.db_url)
meta2 = MetaData(bind=engine)
meta2.reflect()
assert len(meta2.tables) == 0, 'tables were not deleted. %s remain.' % len(meta.tables)
def testUseCase1(self):
"""Testing use case 1
This use case consists of following steps:
1. Initialise stock collection
2. Add 1 stock to it.
3. Update the cache
4. Update the db
5. Wait 1 day then update cache and db again
6. Add 1 stock
7. Update cache and db
8. Wait 1 day, then update cache and db
"""
session = self.Session()
# 1. Initialise stock collection was done in setUp()
# 2. Add 1 stock to it.
self.settings.symbols_files = [{u'name':u'NYSE', u'file':u"data/NYSE_test1.txt"}]
self.stock_collection.load_symbols(self.settings)
assert len(session.query(Stock).\
filter(Stock.symbol == u"A").\
filter(Stock.name == u"Agilent Technologies").all()) == 1, \
'error adding stock to db'
stock = session.query(Stock).\
filter(Stock.symbol == u"A").\
filter(Stock.name == u"Agilent Technologies").one()
assert os.path.exists(self.stock_collection.\
get_cache_file_path(stock.symbol, stock.market)), 'foo'
# 3. Update the cache
self.stock_collection.settings.start_date = datetime.date(year=2012, month=3, day=23)
self.stock_collection.settings.today = datetime.date(year=2012, month=3, day=26)
self.stock_collection.update_cache()
stock_cache_file = self.stock_collection.get_cache_file_path(stock.symbol, stock.market)
cache_file = open(stock_cache_file)
cache_contents = cache_file.read()
cache_file.close()
assert hashlib.sha1(cache_contents).\
hexdigest() == "d304d9962bc0c95ced93fe9826ed12b965d398b5",\
"cache file has wrong sha1 hexdigest after initial data load"
# 4. update the db from cache
self.stock_collection.update_db()
num_dps = len(session.query(Datapoint).all())
assert num_dps == 2, 'expected 2 datapoints, found %s' % num_dps
assert self.dps_are_in_db([dp_A_20120323, dp_A_20120326], to_exclusion=True),\
'didn\'t find all the db entries we expected'
# 5. Wait 1 day then update cache and db again
self.stock_collection.settings.today = datetime.date(year=2012, month=3, day=27)
self.stock_collection.update_cache()
cache_file = open(stock_cache_file)
cache_contents = cache_file.read()
cache_file.close()
assert hashlib.sha1(cache_contents).\
hexdigest() == "033aaa5c736c9f44074dfd4d2657b0c44c406793",\
"cache file has wrong sha1 hexdigest after first cache update"
self.stock_collection.update_db()
num_dps = len(session.query(Datapoint).all())
assert num_dps == 3, 'expected 3 datapoints, found %s' % num_dps
assert self.dps_are_in_db([dp_A_20120323, dp_A_20120326, dp_A_20120327], to_exclusion=True),\
'didn\'t find all the db entries we expected'
# 6. Add 1 stock
self.settings.symbols_files = [{u'name':u'NYSE', u'file':u"data/NYSE_test2.txt"}]
self.stock_collection.load_symbols(self.settings)
# 7. Update cache and db
self.stock_collection.update_cache()
self.stock_collection.update_db()
num_dps = len(session.query(Datapoint).all())
assert num_dps == 6, 'expected 6 datapoints, found %s' % num_dps
expected_dps = [dp_A_20120323, dp_A_20120326, dp_A_20120327, dp_AA_20120323, dp_AA_20120326, dp_AA_20120327]
assert self.dps_are_in_db(expected_dps, to_exclusion=True),\
'didn\'t find all the db entries we expected'
# 8. Wait 1 day, then update cache and db
self.stock_collection.settings.today = datetime.date(year=2012, month=3, day=28)
self.stock_collection.update_cache()
self.stock_collection.update_db()
num_dps = len(session.query(Datapoint).all())
assert num_dps == 8, 'expected 8 datapoints, found %s' % num_dps
assert self.dps_are_in_db([dp_A_20120323, dp_A_20120326, dp_A_20120327, dp_A_20120328,
dp_AA_20120323, dp_AA_20120326, dp_AA_20120327, dp_AA_20120328],\
to_exclusion=True), 'didn\'t find all the db entries we expected'
def dps_are_in_db(self, dps, to_exclusion=False):
session = self.Session()
parsed_dps = []
existing_dps = []
for dp in dps:
parsed_dp = self.stock_collection.parse_csv_line(dp[2])
parsed_dp['market'] = dp[0]
parsed_dp['stock'] = dp[1]
parsed_dps.append(parsed_dp)
for existing_dp in session.query(Datapoint).all():
foo = {}
foo['market'] = existing_dp.stock.market
foo['stock'] = existing_dp.stock.symbol
foo['open_val'] = existing_dp.open_val
foo['high'] = existing_dp.high
foo['low'] = existing_dp.low
foo['close'] = existing_dp.close
foo['volume'] = existing_dp.volume
foo['adj_close'] = existing_dp.adj_close
foo['date'] = existing_dp.day.date
existing_dps.append(foo)
if to_exclusion:
for dp in parsed_dps:
if dp not in existing_dps: return False
if len(parsed_dps) != len(existing_dps): return False
return True
else:
for dp in parsed_dps:
if dp not in existing_dps: return False
return True
| gpl-3.0 | 7,874,646,917,034,667,000 | 47.902743 | 122 | 0.594238 | false |
bokeh/bokeh | tests/unit/bokeh/models/test_ranges.py | 1 | 9638 | #-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2021, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import annotations # isort:skip
import pytest ; pytest
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
import datetime as dt
# External imports
import mock
# Bokeh imports
from bokeh.core.validation import check_integrity, process_validation_issues
from _util_models import check_properties_existence
# Module under test
from bokeh.models import Range1d, DataRange1d, FactorRange # isort:skip
#-----------------------------------------------------------------------------
# Setup
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
class Test_Range1d:
def test_basic(self) -> None:
r = Range1d()
check_properties_existence(r, [
"start",
"end",
"reset_start",
"reset_end",
"bounds",
"min_interval",
"max_interval"],
)
def test_init_with_timedelta(self) -> None:
range1d = Range1d(start=-dt.timedelta(seconds=5), end=dt.timedelta(seconds=3))
assert range1d.start == -dt.timedelta(seconds=5)
assert range1d.end == dt.timedelta(seconds=3)
assert range1d.bounds is None
def test_init_with_datetime(self) -> None:
range1d = Range1d(start=dt.datetime(2016, 4, 28, 2, 20, 50), end=dt.datetime(2017, 4, 28, 2, 20, 50))
assert range1d.start == dt.datetime(2016, 4, 28, 2, 20, 50)
assert range1d.end == dt.datetime(2017, 4, 28, 2, 20, 50)
assert range1d.bounds is None
def test_init_with_float(self) -> None:
range1d = Range1d(start=-1.0, end=3.0)
assert range1d.start == -1.0
assert range1d.end == 3.0
assert range1d.bounds is None
def test_init_with_int(self) -> None:
range1d = Range1d(start=-1, end=3)
assert range1d.start == -1
assert range1d.end == 3
assert range1d.bounds is None
def test_init_with_positional_arguments(self) -> None:
range1d = Range1d(1, 2)
assert range1d.start == 1
assert range1d.end == 2
assert range1d.bounds is None
def test_init_with_keyword_arguments(self) -> None:
range1d = Range1d(start=1, end=2)
assert range1d.start == 1
assert range1d.end == 2
assert range1d.bounds is None
def test_cannot_initialize_with_both_keyword_and_positional_arguments(self) -> None:
with pytest.raises(ValueError):
Range1d(1, 2, start=1, end=2)
def test_cannot_initialize_with_three_positional_arguments(self) -> None:
with pytest.raises(ValueError):
Range1d(1, 2, 3)
def test_with_max_bound_smaller_than_min_bounded_raises_valueerror(self) -> None:
with pytest.raises(ValueError):
Range1d(1, 2, bounds=(1, 0))
with pytest.raises(ValueError):
Range1d(1, 2, bounds=[1, 0])
def test_bounds_with_text_rejected_as_the_correct_value_error(self) -> None:
with pytest.raises(ValueError) as e:
Range1d(1, 2, bounds="21") # The string is indexable, so this may not fail properly
assert "expected either None or" in e.value.args[0]
def test_bounds_with_three_item_tuple_raises_valueerror(self) -> None:
with pytest.raises(ValueError):
Range1d(1, 2, bounds=(0, 1, 2))
class Test_DataRange1d:
def test_basic(self) -> None:
r = DataRange1d()
check_properties_existence(r, [
"names",
"renderers",
"range_padding",
"range_padding_units",
"flipped",
"follow",
"follow_interval",
"default_span",
"start",
"end",
"bounds",
"min_interval",
"max_interval",
"only_visible"],
)
def test_init_with_no_arguments(self) -> None:
datarange1d = DataRange1d()
assert datarange1d.start is None
assert datarange1d.end is None
assert datarange1d.bounds is None
def test_init_with_timedelta(self) -> None:
datarange1d = DataRange1d(start=-dt.timedelta(seconds=5), end=dt.timedelta(seconds=3))
assert datarange1d.start == -dt.timedelta(seconds=5)
assert datarange1d.end == dt.timedelta(seconds=3)
assert datarange1d.bounds is None
def test_init_with_datetime(self) -> None:
datarange1d = DataRange1d(start=dt.datetime(2016, 4, 28, 2, 20, 50), end=dt.datetime(2017, 4, 28, 2, 20, 50))
assert datarange1d.start == dt.datetime(2016, 4, 28, 2, 20, 50)
assert datarange1d.end == dt.datetime(2017, 4, 28, 2, 20, 50)
assert datarange1d.bounds is None
def test_init_with_float(self) -> None:
datarange1d = DataRange1d(start=-1.0, end=3.0)
assert datarange1d.start == -1.0
assert datarange1d.end == 3.0
assert datarange1d.bounds is None
def test_init_with_int(self) -> None:
datarange1d = DataRange1d(start=-1, end=3)
assert datarange1d.start == -1
assert datarange1d.end == 3
assert datarange1d.bounds is None
def test_init_with_follow_sets_bounds_to_none(self) -> None:
datarange1d = DataRange1d(follow="start")
assert datarange1d.follow == "start"
assert datarange1d.bounds is None
def test_init_with_bad_bounds(self) -> None:
with pytest.raises(ValueError):
DataRange1d(1, 2, bounds=(1, 0))
with pytest.raises(ValueError):
DataRange1d(1, 2, bounds=[1, 0])
with pytest.raises(ValueError):
Range1d(1, 2, bounds="21")
class Test_FactorRange:
def test_basic(self) -> None:
r = FactorRange()
check_properties_existence(r, [
"factors",
"factor_padding",
"group_padding",
"subgroup_padding",
"range_padding",
"range_padding_units",
"start",
"end",
"bounds",
"min_interval",
"max_interval"],
)
def test_init_defauls(self) -> None:
factor_range = FactorRange("a", "b")
assert factor_range.factors == ["a", "b"]
assert factor_range.range_padding == 0
assert factor_range.range_padding_units == "percent"
assert factor_range.factor_padding == 0
assert factor_range.group_padding == 1.4
assert factor_range.subgroup_padding == 0.8
assert factor_range.bounds == None
assert factor_range.min_interval == None
assert factor_range.max_interval == None
def test_init_with_positional_arguments(self) -> None:
factor_range = FactorRange("a", "b")
assert factor_range.factors == ["a", "b"]
factor_range = FactorRange(["a", "x"], ["b", "y"])
assert factor_range.factors == [["a", "x"], ["b", "y"]]
factor_range = FactorRange(["a", "x", "1'"], ["b", "y", "2"])
assert factor_range.factors == [["a", "x", "1'"], ["b", "y", "2"]]
def test_init_with_keyword_arguments(self) -> None:
factor_range = FactorRange(factors=["a", "b", "c", "d", "e"])
assert factor_range.factors == ["a", "b", "c", "d", "e"]
def test_cannot_initialize_with_both_keyword_and_positional_arguments(self) -> None:
with pytest.raises(ValueError):
FactorRange(["a", "b", "c"], factors=["a", "b", "c"])
def test_duplicate_factors_raises_validation_error(self) -> None:
r = FactorRange("foo", "bar", "foo")
with mock.patch('bokeh.core.validation.check.log') as mock_logger:
issues = check_integrity([r])
process_validation_issues(issues)
assert mock_logger.error.call_count == 1
r = FactorRange(factors=[("foo", "a"), ("foo", "b"), ("foo", "a")])
with mock.patch('bokeh.core.validation.check.log') as mock_logger:
issues = check_integrity([r])
process_validation_issues(issues)
assert mock_logger.error.call_count == 1
r = FactorRange(factors=[("foo", "a", "1"), ("foo", "a", "2"), ("foo", "a", "1")])
with mock.patch('bokeh.core.validation.check.log') as mock_logger:
issues = check_integrity([r])
process_validation_issues(issues)
assert mock_logger.error.call_count == 1
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
| bsd-3-clause | -2,753,696,687,269,874,700 | 36.069231 | 117 | 0.513903 | false |
BL-Labs/jokedbapp | jokedbapp/tests/test_load_omeka.py | 1 | 1534 | import unittest
from models import *
from utils.handle_transcript import TranscriptionParser, OmekaXML, om
from test_data import TRANSCRIPTIONS
OMEKA_COLLECTION = "test_omeka_collection.xml"
# User, Transcription, Joke, Picture
class TestUserClass(unittest.TestCase):
def setUp(self):
self.TP = TranscriptionParser()
self.o = OmekaXML()
from database import init_test_db
self.db_session = init_test_db()
self.u = User('admin', 'admin@localhost', 'admin', 'saltypasswordhash')
self.db_session.add(self.u)
self.db_session.commit()
def test_u01_user_create_regular(self):
u = User('ben', 'regular@localhost', 'transcriber', 'saltypasswordhash')
self.db_session.add(u)
self.db_session.commit()
def test_u02_user_create_publisher(self):
u = User('bob', 'bob@localhost', 'publisher', 'saltypasswordhash')
self.db_session.add(u)
self.db_session.commit()
def test_u03_find_an_admin(self):
admin = User.query.filter(User.role == 'admin').first()
self.assertEquals(admin.name, 'admin')
def test_u04_test_is_admin(self):
admin = User.query.filter(User.role == 'admin').first()
self.assertEquals(admin.is_admin(), True)
def test_u05_user_alter_email(self):
self.u.email = 'newadminemail'
self.db_session.add(self.u)
self.db_session.commit()
# now query for it
email_match = User.query.filter(User.email == "newadminemail").first()
self.assertEquals(email_match.name, 'admin')
def tearDown(self):
self.db_session.remove()
| mit | 3,395,962,392,050,536,000 | 29.078431 | 77 | 0.690352 | false |
pobear/django-xadmin | xadmin/plugins/actions.py | 1 | 10988 | from django import forms
from django.core.exceptions import PermissionDenied
from django.db import router
from django.http import HttpResponse, HttpResponseRedirect
from django.template import loader
from django.template.response import TemplateResponse
# from django.utils.datastructures import SortedDict
from collections import OrderedDict as SortedDict
from django.utils.encoding import force_unicode
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _, ungettext
from django.utils.text import capfirst
from xadmin.sites import site
from xadmin.util import model_format_dict, get_deleted_objects, model_ngettext
from xadmin.views import BaseAdminPlugin, ListAdminView
from xadmin.views.base import filter_hook, ModelAdminView
ACTION_CHECKBOX_NAME = '_selected_action'
checkbox = forms.CheckboxInput({'class': 'action-select'}, lambda value: False)
def action_checkbox(obj):
return checkbox.render(ACTION_CHECKBOX_NAME, force_unicode(obj.pk))
action_checkbox.short_description = mark_safe(
'<input type="checkbox" id="action-toggle" />')
action_checkbox.allow_tags = True
action_checkbox.allow_export = False
action_checkbox.is_column = False
class BaseActionView(ModelAdminView):
action_name = None
description = None
icon = 'fa fa-tasks'
model_perm = 'change'
@classmethod
def has_perm(cls, list_view):
return list_view.get_model_perms()[cls.model_perm]
def init_action(self, list_view):
self.list_view = list_view
self.admin_site = list_view.admin_site
@filter_hook
def do_action(self, queryset):
pass
class DeleteSelectedAction(BaseActionView):
action_name = "delete_selected"
description = _(u'Delete selected %(verbose_name_plural)s')
delete_confirmation_template = None
delete_selected_confirmation_template = None
delete_models_batch = True
model_perm = 'delete'
icon = 'fa fa-times'
@filter_hook
def delete_models(self, queryset):
n = queryset.count()
if n:
if self.delete_models_batch:
queryset.delete()
else:
for obj in queryset:
obj.delete()
self.message_user(_("Successfully deleted %(count)d %(items)s.") % {
"count": n, "items": model_ngettext(self.opts, n)
}, 'success')
@filter_hook
def do_action(self, queryset):
# Check that the user has delete permission for the actual model
if not self.has_delete_permission():
raise PermissionDenied
using = router.db_for_write(self.model)
# Populate deletable_objects, a data structure of all related objects that
# will also be deleted.
deletable_objects, perms_needed, protected = get_deleted_objects(
queryset, self.opts, self.user, self.admin_site, using)
# The user has already confirmed the deletion.
# Do the deletion and return a None to display the change list view again.
if self.request.POST.get('post'):
if perms_needed:
raise PermissionDenied
self.delete_models(queryset)
# Return None to display the change list page again.
return None
if len(queryset) == 1:
objects_name = force_unicode(self.opts.verbose_name)
else:
objects_name = force_unicode(self.opts.verbose_name_plural)
if perms_needed or protected:
title = _("Cannot delete %(name)s") % {"name": objects_name}
else:
title = _("Are you sure?")
context = self.get_context()
context.update({
"title": title,
"objects_name": objects_name,
"deletable_objects": [deletable_objects],
'queryset': queryset,
"perms_lacking": perms_needed,
"protected": protected,
"opts": self.opts,
"app_label": self.app_label,
'action_checkbox_name': ACTION_CHECKBOX_NAME,
})
# Display the confirmation page
return TemplateResponse(self.request, self.delete_selected_confirmation_template or
self.get_template_list('views/model_delete_selected_confirm.html'), context, current_app=self.admin_site.name)
class ActionPlugin(BaseAdminPlugin):
# Actions
actions = []
actions_selection_counter = True
global_actions = [DeleteSelectedAction]
def init_request(self, *args, **kwargs):
self.actions = self.get_actions()
return bool(self.actions)
def get_list_display(self, list_display):
if self.actions:
list_display.insert(0, 'action_checkbox')
self.admin_view.action_checkbox = action_checkbox
return list_display
def get_list_display_links(self, list_display_links):
if self.actions:
if len(list_display_links) == 1 and list_display_links[0] == 'action_checkbox':
return list(self.admin_view.list_display[1:2])
return list_display_links
def get_context(self, context):
if self.actions and self.admin_view.result_count:
av = self.admin_view
selection_note_all = ungettext('%(total_count)s selected',
'All %(total_count)s selected', av.result_count)
new_context = {
'selection_note': _('0 of %(cnt)s selected') % {'cnt': len(av.result_list)},
'selection_note_all': selection_note_all % {'total_count': av.result_count},
'action_choices': self.get_action_choices(),
'actions_selection_counter': self.actions_selection_counter,
}
context.update(new_context)
return context
def post_response(self, response, *args, **kwargs):
request = self.admin_view.request
av = self.admin_view
# Actions with no confirmation
if self.actions and 'action' in request.POST:
action = request.POST['action']
if action not in self.actions:
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
av.message_user(msg)
else:
ac, name, description, icon = self.actions[action]
select_across = request.POST.get('select_across', False) == '1'
selected = request.POST.getlist(ACTION_CHECKBOX_NAME)
if not selected and not select_across:
# Reminder that something needs to be selected or nothing will happen
msg = _("Items must be selected in order to perform "
"actions on them. No items have been changed.")
av.message_user(msg)
else:
queryset = av.list_queryset._clone()
if not select_across:
# Perform the action only on the selected objects
queryset = av.list_queryset.filter(pk__in=selected)
response = self.response_action(ac, queryset)
# Actions may return an HttpResponse, which will be used as the
# response from the POST. If not, we'll be a good little HTTP
# citizen and redirect back to the changelist page.
if isinstance(response, HttpResponse):
return response
else:
return HttpResponseRedirect(request.get_full_path())
return response
def response_action(self, ac, queryset):
if isinstance(ac, type) and issubclass(ac, BaseActionView):
action_view = self.get_model_view(ac, self.admin_view.model)
action_view.init_action(self.admin_view)
return action_view.do_action(queryset)
else:
return ac(self.admin_view, self.request, queryset)
def get_actions(self):
if self.actions is None:
return SortedDict()
actions = [self.get_action(action) for action in self.global_actions]
for klass in self.admin_view.__class__.mro()[::-1]:
class_actions = getattr(klass, 'actions', [])
if not class_actions:
continue
actions.extend(
[self.get_action(action) for action in class_actions])
# get_action might have returned None, so filter any of those out.
actions = filter(None, actions)
# Convert the actions into a SortedDict keyed by name.
actions = SortedDict([
(name, (ac, name, desc, icon))
for ac, name, desc, icon in actions
])
return actions
def get_action_choices(self):
"""
Return a list of choices for use in a form object. Each choice is a
tuple (name, description).
"""
choices = []
for ac, name, description, icon in self.actions.itervalues():
choice = (name, description % model_format_dict(self.opts), icon)
choices.append(choice)
return choices
def get_action(self, action):
if isinstance(action, type) and issubclass(action, BaseActionView):
if not action.has_perm(self.admin_view):
return None
return action, getattr(action, 'action_name'), getattr(action, 'description'), getattr(action, 'icon')
elif callable(action):
func = action
action = action.__name__
elif hasattr(self.admin_view.__class__, action):
func = getattr(self.admin_view.__class__, action)
else:
return None
if hasattr(func, 'short_description'):
description = func.short_description
else:
description = capfirst(action.replace('_', ' '))
return func, action, description, getattr(func, 'icon', 'tasks')
# View Methods
def result_header(self, item, field_name, row):
if item.attr and field_name == 'action_checkbox':
item.classes.append("action-checkbox-column")
return item
def result_item(self, item, obj, field_name, row):
if item.field is None and field_name == u'action_checkbox':
item.classes.append("action-checkbox")
return item
# Media
def get_media(self, media):
if self.actions and self.admin_view.result_count:
media = media + self.vendor('xadmin.plugin.actions.js', 'xadmin.plugins.css')
return media
# Block Views
def block_results_bottom(self, context, nodes):
if self.actions and self.admin_view.result_count:
nodes.append(loader.render_to_string('xadmin/blocks/model_list.results_bottom.actions.html', context_instance=context))
site.register_plugin(ActionPlugin, ListAdminView)
| bsd-3-clause | -8,988,290,766,967,416,000 | 36.630137 | 142 | 0.608664 | false |
kimus/django-blocks | blocks/migrations/0007_auto__chg_field_pagetranslation_title.py | 1 | 6688 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'PageTranslation.title'
db.alter_column(u'blocks_page_translation', 'title', self.gf('django.db.models.fields.CharField')(max_length=200))
def backwards(self, orm):
# Changing field 'PageTranslation.title'
db.alter_column(u'blocks_page_translation', 'title', self.gf('django.db.models.fields.CharField')(max_length=80))
models = {
u'blocks.menu': {
'Meta': {'object_name': 'Menu'},
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keyword': ('django.db.models.fields.CharField', [], {'max_length': '20', 'null': 'True', 'blank': 'True'}),
u'level': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
u'lft': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'order': ('blocks.fields.OrderField', [], {'default': '0', 'db_index': 'True', 'blank': 'True'}),
'parent': ('mptt.fields.TreeForeignKey', [], {'blank': 'True', 'related_name': "'children'", 'null': 'True', 'to': u"orm['blocks.Menu']"}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'rght': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sites.Site']", 'db_index': 'True', 'symmetrical': 'False'}),
'slug': ('blocks.fields.SlugURLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1', 'db_index': 'True'}),
u'tree_id': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True'}),
'type': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'url': ('django.db.models.fields.CharField', [], {'max_length': '200', 'db_index': 'True'})
},
u'blocks.menutranslation': {
'Meta': {'unique_together': "[('language_code', 'master')]", 'object_name': 'MenuTranslation', 'db_table': "u'blocks_menu_translation'"},
'description': ('django.db.models.fields.TextField', [], {'max_length': '200', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'null': 'True', 'to': u"orm['blocks.Menu']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '80'})
},
u'blocks.page': {
'Meta': {'ordering': "['url', 'order']", 'object_name': 'Page'},
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_relative': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'menu': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'order': ('blocks.fields.OrderField', [], {'default': '0', 'db_index': 'True', 'blank': 'True'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sites.Site']", 'db_index': 'True', 'symmetrical': 'False'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1', 'db_index': 'True'}),
'template_name': ('django.db.models.fields.CharField', [], {'max_length': '70', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200', 'db_index': 'True'})
},
u'blocks.pagetranslation': {
'Meta': {'unique_together': "[('language_code', 'master')]", 'object_name': 'PageTranslation', 'db_table': "u'blocks_page_translation'"},
'content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'max_length': '15', 'db_index': 'True'}),
'master': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'translations'", 'null': 'True', 'to': u"orm['blocks.Page']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'blocks.promotable': {
'Meta': {'object_name': 'Promotable'},
'expiry_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'promoted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'publish_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'sites': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sites.Site']", 'db_index': 'True', 'symmetrical': 'False'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1', 'db_index': 'True'})
},
u'blocks.template': {
'Meta': {'object_name': 'Template'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}),
'template': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
u'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
}
}
complete_apps = ['blocks'] | mit | 2,696,382,157,176,265,700 | 70.924731 | 154 | 0.54994 | false |
Ichimonji10/robottelo | tests/foreman/ui/test_discoveryrule.py | 1 | 22334 | # -*- encoding: utf-8 -*-
"""Test class for Foreman Discovery Rules
@Requirement: Discoveryrule
@CaseAutomation: Automated
@CaseLevel: Acceptance
@CaseComponent: UI
@TestType: Functional
@CaseImportance: High
@Upstream: No
"""
from fauxfactory import gen_integer, gen_ipaddr, gen_string
from nailgun import entities
from robottelo.datafactory import (
filtered_datapoint,
invalid_values_list,
valid_data_list,
)
from robottelo.decorators import run_only_on, skip_if_bug_open, tier1
from robottelo.test import UITestCase
from robottelo.ui.factory import make_discoveryrule
from robottelo.ui.locators import common_locators
from robottelo.ui.session import Session
@filtered_datapoint
def valid_search_queries():
"""Generates a list of all the input strings, (excluding html)"""
return [
'cpu_count ^ 10',
'disk_count > 5',
'disks_size <= {0}'.format(gen_string('numeric', 8)),
'ip = {0}'.format(gen_ipaddr()),
'model = KVM',
u'organization ~ {0}'.format(entities.Organization().create().name),
u'subnet = {0}'.format(entities.Subnet().create().name),
]
class DiscoveryRuleTestCase(UITestCase):
"""Implements Foreman discovery Rules in UI."""
@classmethod
def setUpClass(cls):
"""Display all the discovery rules on the same page"""
super(DiscoveryRuleTestCase, cls).setUpClass()
cls.per_page = entities.Setting().search(
query={'search': 'name="entries_per_page"'})[0]
cls.saved_per_page = str(cls.per_page.value)
cls.per_page.value = '100000'
cls.per_page.update({'value'})
cls.host_group = entities.HostGroup().create()
@classmethod
def tearDownClass(cls):
"""Restore previous 'entries_per_page' value"""
cls.per_page.value = cls.saved_per_page
cls.per_page.update({'value'})
super(DiscoveryRuleTestCase, cls).tearDownClass()
@run_only_on('sat')
@tier1
def test_positive_create_with_name(self):
"""Create Discovery Rule using different names
@id: afdf7000-4bd0-41ec-9773-96ff68e27b8d
@Assert: Rule should be successfully created
"""
with Session(self.browser) as session:
for name in valid_data_list():
with self.subTest(name):
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
@run_only_on('sat')
@tier1
def test_positive_create_with_search(self):
"""Create Discovery Rule using different search queries
@id: 973ff6e5-572e-401c-bc8c-d614a583e883
@Assert: Rule should be successfully created and has expected search
field value
"""
with Session(self.browser) as session:
for query in valid_search_queries():
with self.subTest(query):
name = gen_string('alpha')
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
search_rule=query,
)
self.assertIsNotNone(self.discoveryrules.search(name))
self.assertEqual(
self.discoveryrules.get_attribute_value(
name, 'search'),
query
)
@run_only_on('sat')
@tier1
def test_positive_create_with_hostname(self):
"""Create Discovery Rule using valid hostname value
@id: e6742ca5-1d41-4ba3-8f2c-2169db92485b
@Assert: Rule should be successfully created and has expected hostname
field value
"""
name = gen_string('alpha')
hostname = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
hostname=hostname,
)
self.assertIsNotNone(self.discoveryrules.search(name))
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'hostname'),
hostname
)
@run_only_on('sat')
@tier1
def test_positive_create_with_hosts_limit(self):
"""Create Discovery Rule providing any number from range 1..100 for
hosts limit field
@id: 64b90586-c1a9-4be4-8c44-4fa19ca998f8
@Assert: Rule should be successfully created and has expected hosts
limit field value
"""
name = gen_string('alpha')
limit = str(gen_integer(1, 100))
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
host_limit=limit,
)
self.assertIsNotNone(self.discoveryrules.search(name))
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'host_limit'),
limit
)
@run_only_on('sat')
@tier1
def test_positive_create_with_priority(self):
"""Create Discovery Rule providing any number from range 1..100 for
priority field
@id: de847288-257a-4f0e-9cb6-9a0dd0877d23
@Assert: Rule should be successfully created and has expected priority
field value
"""
name = gen_string('alpha')
priority = str(gen_integer(1, 100))
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
priority=priority,
)
self.assertIsNotNone(self.discoveryrules.search(name))
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'priority'),
priority
)
@run_only_on('sat')
@tier1
def test_positive_create_disabled(self):
"""Create Discovery Rule in disabled state
@id: 0b98d467-aabf-4efe-890f-50d6edcd99ff
@Assert: Disabled rule should be successfully created
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
enabled=False,
)
self.assertIsNotNone(self.discoveryrules.search(name))
self.assertEqual(
self.discoveryrules.get_attribute_value(
name, 'enabled', element_type='checkbox'),
False
)
@run_only_on('sat')
@tier1
def test_negative_create_with_invalid_name(self):
"""Create Discovery Rule with invalid names
@id: 79d950dc-4ca1-407e-84ca-9092d1cba978
@Assert: Error should be raised and rule should not be created
"""
with Session(self.browser) as session:
for name in invalid_values_list(interface='ui'):
with self.subTest(name):
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(
self.discoveryrules.wait_until_element(
common_locators['name_haserror'])
)
self.assertIsNone(self.discoveryrules.search(name))
@run_only_on('sat')
@tier1
def test_negative_create_with_invalid_hostname(self):
"""Create Discovery Rule with invalid hostname
@id: a322c8ce-4f05-401a-88cb-a3d30b4ac446
@Assert: Error should be raised and rule should not be created
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
hostname=gen_string('numeric'),
)
self.assertIsNotNone(self.discoveryrules.wait_until_element(
common_locators['haserror']
))
self.assertIsNone(self.discoveryrules.search(name))
@run_only_on('sat')
@tier1
def test_negative_create_with_limit(self):
"""Create Discovery Rule with invalid host limit
@id: 743d29f4-a901-400c-ad98-a3b8942f02b5
@Assert: Error should be raised and rule should not be created
"""
name = gen_string('alpha')
with Session(self.browser) as session:
for limit in '-1', gen_string('alpha'):
with self.subTest(limit):
make_discoveryrule(
session,
name=name,
host_limit=limit,
hostgroup=self.host_group.name,
)
self.assertIsNotNone(
self.discoveryrules.wait_until_element(
common_locators['haserror'])
)
self.assertIsNone(self.discoveryrules.search(name))
@run_only_on('sat')
@skip_if_bug_open('bugzilla', 1308831)
@tier1
def test_negative_create_with_too_long_limit(self):
"""Create Discovery Rule with too long host limit value
@id: 450b49d9-1058-4186-9b23-15cc615e5bd6
@Assert: Validation error should be raised and rule should not be
created
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
host_limit=gen_string('numeric', 50),
hostgroup=self.host_group.name,
)
self.assertIsNotNone(self.discoveryrules.wait_until_element(
common_locators['haserror']
))
self.assertIsNone(self.discoveryrules.search(name))
@run_only_on('sat')
@tier1
def test_negative_create_with_same_name(self):
"""Create Discovery Rule with name that already exists
@id: 5a914e76-de01-406d-9860-0e4e1521b074
@Assert: Error should be raised and rule should not be created
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.wait_until_element(
common_locators['name_haserror']
))
@run_only_on('sat')
@tier1
def test_negative_create_with_invalid_priority(self):
"""Create Discovery Rule with invalid priority
@id: f8829cce-86c0-452c-b866-d5645174e9e1
@Assert: Error should be raised and rule should not be created
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
priority=gen_string('alpha'),
)
self.assertIsNotNone(self.discoveryrules.wait_until_element(
common_locators['haserror']
))
self.assertIsNone(self.discoveryrules.search(name))
@run_only_on('sat')
@tier1
def test_positive_delete(self):
"""Delete existing Discovery Rule
@id: fc5b714c-e5bc-4b0f-bc94-88e080318704
@Assert: Rule should be successfully deleted
"""
with Session(self.browser) as session:
for name in valid_data_list():
with self.subTest(name):
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
self.discoveryrules.delete(name)
@run_only_on('sat')
@tier1
def test_positive_update_name(self):
"""Update discovery rule name
@id: 16a79449-7200-492e-9ddb-65fc034e510d
@Assert: Rule name is updated
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
for new_name in valid_data_list():
with self.subTest(new_name):
self.discoveryrules.update(name=name, new_name=new_name)
self.assertIsNotNone(self.discoveryrules.search(new_name))
name = new_name # for next iteration
@run_only_on('sat')
@tier1
def test_positive_update_query(self):
"""Update discovery rule search query
@id: bcf85a4c-0b27-47a5-8d5d-7ede0f6eea41
@Assert: Rule search field is updated
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
for new_query in valid_search_queries():
with self.subTest(new_query):
self.discoveryrules.update(
name=name, search_rule=new_query)
self.assertEqual(
self.discoveryrules.get_attribute_value(
name, 'search'),
new_query
)
@run_only_on('sat')
@tier1
def test_positive_update_hostgroup(self):
"""Update discovery rule host group
@id: e10274e9-bf1b-42cd-a809-f19e707e7f4c
@Assert: Rule host group is updated
"""
name = gen_string('alpha')
new_hostgroup_name = entities.HostGroup().create().name
with Session(self.browser) as session:
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
self.assertEqual(
self.discoveryrules.get_attribute_value(
name, 'hostgroup', element_type='select'),
self.host_group.name
)
self.discoveryrules.update(name=name, hostgroup=new_hostgroup_name)
self.assertEqual(
self.discoveryrules.get_attribute_value(
name, 'hostgroup', element_type='select'),
new_hostgroup_name
)
@run_only_on('sat')
@tier1
def test_positive_update_hostname(self):
"""Update discovery rule hostname value
@id: 753ff15b-da73-4fb3-87cd-14d504d8e882
@Assert: Rule host name is updated
"""
name = gen_string('alpha')
hostname = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
self.discoveryrules.update(name=name, hostname=hostname)
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'hostname'),
hostname
)
@run_only_on('sat')
@tier1
def test_positive_update_limit(self):
"""Update discovery rule limit value
@id: 69d59c34-407b-47d0-a2b8-46decb95ef47
@Assert: Rule host limit field is updated
"""
name = gen_string('alpha')
limit = str(gen_integer(1, 100))
with Session(self.browser) as session:
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
self.discoveryrules.update(name=name, host_limit=limit)
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'host_limit'),
limit
)
@run_only_on('sat')
@tier1
def test_positive_update_priority(self):
"""Update discovery rule priority value
@id: be4de7a9-df8e-44ae-9910-7397341f6d07
@Assert: Rule priority is updated
"""
name = gen_string('alpha')
priority = str(gen_integer(1, 100))
with Session(self.browser) as session:
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
self.discoveryrules.update(name=name, priority=priority)
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'priority'),
priority
)
@run_only_on('sat')
@tier1
def test_positive_update_disable_enable(self):
"""Update discovery rule enabled state. (Disabled->Enabled)
@id: 60d619e4-a039-4f9e-a16c-b05f0598e8fa
@Assert: Rule enabled checkbox is updated
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
enabled=False,
)
self.assertIsNotNone(self.discoveryrules.search(name))
self.discoveryrules.update(name=name, enabled=True)
self.assertEqual(
self.discoveryrules.get_attribute_value(
name, 'enabled', element_type='checkbox'),
True
)
@run_only_on('sat')
@tier1
def test_negative_update_name(self):
"""Update discovery rule name using invalid names only
@id: 65f32628-796a-4d7e-bf2c-c84c6b06f309
@Assert: Rule name is not updated
"""
name = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session, name=name, hostgroup=self.host_group.name)
self.assertIsNotNone(self.discoveryrules.search(name))
for new_name in invalid_values_list(interface='ui'):
with self.subTest(new_name):
self.discoveryrules.update(name=name, new_name=new_name)
self.assertIsNotNone(
self.discoveryrules.wait_until_element(
common_locators['name_haserror'])
)
self.assertIsNone(self.discoveryrules.search(new_name))
@run_only_on('sat')
@tier1
def test_negative_update_hostname(self):
"""Update discovery rule host name using number as a value
@id: 18713425-22fe-4eaa-a515-8e08aa07e116
@Assert: Rule host name is not updated
"""
name = gen_string('alpha')
hostname = gen_string('alpha')
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
hostname=hostname,
)
self.assertIsNotNone(self.discoveryrules.search(name))
self.discoveryrules.update(
name=name, hostname=gen_string('numeric'))
self.assertIsNotNone(self.discoveryrules.wait_until_element(
common_locators['haserror']
))
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'hostname'),
hostname
)
@run_only_on('sat')
@tier1
def test_negative_update_limit(self):
"""Update discovery rule host limit using invalid values
@id: 7e8b7218-3c8a-4b03-b0df-484e0d793ceb
@Assert: Rule host limit is not updated
"""
name = gen_string('alpha')
limit = str(gen_integer(1, 100))
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
host_limit=limit,
)
self.assertIsNotNone(self.discoveryrules.search(name))
for new_limit in '-1', gen_string('alpha'):
with self.subTest(new_limit):
self.discoveryrules.update(
name=name, host_limit=new_limit)
self.assertIsNotNone(
self.discoveryrules.wait_until_element(
common_locators['haserror'])
)
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'host_limit'),
limit
)
@run_only_on('sat')
@tier1
def test_negative_update_priority(self):
"""Update discovery rule priority using invalid values
@id: d44ad49c-5d95-442f-a1b3-cd82dd8ffabf
@Assert: Rule priority is not updated
"""
name = gen_string('alpha')
priority = str(gen_integer(1, 100))
with Session(self.browser) as session:
make_discoveryrule(
session,
name=name,
hostgroup=self.host_group.name,
priority=priority,
)
self.assertIsNotNone(self.discoveryrules.search(name))
for new_priority in '-1', gen_string('alpha'):
with self.subTest(new_priority):
self.discoveryrules.update(
name=name, priority=new_priority)
self.assertIsNotNone(
self.discoveryrules.wait_until_element(
common_locators['haserror'])
)
self.assertEqual(
self.discoveryrules.get_attribute_value(name, 'priority'),
priority
)
| gpl-3.0 | 5,553,202,389,800,551,000 | 34.116352 | 79 | 0.563849 | false |
phwallen/smrc | heSensor.py | 1 | 4357 | '''
Simimple Model Railway Automation
Hall-effect Sensor Support Module
Author : Peter Wallen
Created : 21/1/13
Version 1.0
This code encapulates hardware associated with sensors used to detect the location of trains.
The hardware supported comprises of :
One or more Microchip MCP23017 16-Bit I/O Expanders acting as sensor controllers.
Each sensor controller can be connected to a maximum of 16 hall-effect sensors.
This module requires python-smbus
'''
import smbus
import time
bus = 0
def i2Cbus_open():
'''
This function must be called once by the automation script to open the I2C bus between
the Rpi and the sensor controller(s).
'''
global bus
try:
bus = smbus.SMBus(0)
except EnvironmentError as e:
print e
raise RuntimeError("Unable to open I2C bus")
def config(address):
'''
This function must be called once by the automation script for each sensor controller.
The address of the controller is determined by the A10,A1,A2 pins on the MCP23017 chip.
eg. If A0,A1 and A2 are LOW then the address should be 0x20.
For information about configuring the sensor controller see the Microchip MCP23017 datasheet.
For eaxample to connect sensors to GPA0 - GPA7, use GPB0 - GPB7 to drive LED indicators and
enable interupts to allow the last sensor triggered to be stored in the interupt capture register,
configure as follows:
bus.write_byte_data(address,IODIRA,0xff) # set all ports in bank A to input
bus.write_byte_data(address,IODIRB,0x00) # set all ports in bank B to output
bus.write_byte_data(address,GPPUA,0xff) # enable pullup resistors for bank A
bus.write_byte_data(address,GPINTENA,0xff) # enable interupts on port A
'''
global bus
# MCP23017 register constants
IODIRA = 0x00
IODIRB = 0x01
GPINTENA = 0X04
GPINTENB = 0x05
GPPUA = 0x0c
GPPUB = 0x0d
INTCAPA= 0x10
INTCAPB= 0x11
GPIOA = 0x12
GPIOB = 0x13
bus.write_byte_data(address,IODIRA,0xff) # set all ports in bank A to input
bus.write_byte_data(address,IODIRB,0x00) # set all ports in bank B to output
bus.write_byte_data(address,GPPUA,0xff) # enable pullup resistors for bank A
bus.write_byte_data(address,GPINTENA,0xff) # enable interupts on port A
class Sensor(object):
'''
The class describing a sensor object.
A sensor object is associate with each train detection sensor.
'''
def __init__(self,address,bank,port):
'''
The class constructor is called with the following parameters:
address : the address of the sensor controller on the I2C bus eg. 0X20
bank : the register group the sensor is connected to: 'A' for GPA0 - GPA7 and 'B' for GPB0 - GPB7
port : the port on the sensor controller the sensor is connected to (1 - 8).
NB. port 1 corresponds to pin GPx0 and port 8 corresponds to pin GPx7
where x = A or B
'''
global bus
mask_table = [0x00,0x01,0x02,0x04,0x08,0x10,0x20,0x40,0x80]
if bus == 0 :
raise RuntimeError("I2C bus has not been opened")
self.address = address
self.port = 0
if bank == "A" :
self.iodir = 0x00
self.gpinten = 0x04
self.gppu = 0x0c
self.intcap = 0x10
self.gpio = 0x12
elif bank == "B" :
self.iodir = 0x01
self.gpinten = 0x05
self.gppu = 0x0d
self.intcap = 0x11
self.gpio = 0x13
else :
raise RuntimeError("Invalid bank must be A or B")
if port > 8 or port < 1 :
raise RuntimeError("Invalid port must be between 1 and 8")
else :
self.port |= mask_table[port]
def wait(self) :
'''
This method will poll the interupt capture registor for the sensor until its triggered.
In addition, it will control a status LED connected to the corresponding port on bank A.
'''
x = bus.read_byte_data(self.address,self.intcap)
# switch off indicator for appropriate port
status = bus.read_byte_data(self.address,0x13)
status &= self.port
bus.write_byte_data(self.address,0x13,status)
while (x & self.port) :
x = bus.read_byte_data(self.address,self.intcap)
time.sleep(1)
# switch on indicator for appropriate port
status = bus.read_byte_data(self.address,0x13)
status |= self.port
bus.write_byte_data(self.address,0x13,status)
| mit | 5,683,908,779,810,081,000 | 32.775194 | 103 | 0.684416 | false |
babelsberg/babelsberg-r | tests/modules/ffi/test_function.py | 1 | 15258 | from tests.modules.ffi.base import BaseFFITest
from topaz.modules.ffi.type import type_names, aliases
from rpython.rlib import clibffi
import sys
if sys.platform == 'darwin':
ext = 'dylib'
libm = 'libm.' + ext
libc = 'libc.' + ext
else:
libm = 'libm.so'
libc = 'libc.so.6'
substitutions = {}
code_ffi_type = "FFI::Type::"
flat_aliases = reduce(lambda x, y: x + y, aliases)
for tn in type_names + flat_aliases:
substitutions[tn.lower()] = code_ffi_type + tn
def typeformat(rubycode):
return (rubycode.replace('\{', 'BRACE_OPEN').
replace('\}', 'BRACE_CLOSE').
format(**substitutions).
replace('BRACE_OPEN', '{').
replace('BRACE_CLOSE', '}'))
class TestFunction(BaseFFITest):
def test_it_has_FFI_Pointer_as_ancestor(self, space):
assert self.ask(space, "FFI::Function.ancestors.include? FFI::Pointer")
class TestFunction__new(BaseFFITest):
def test_it_needs_at_least_a_type_signature(self, space):
space.execute(typeformat("FFI::Function.new({void}, [{int8}, {int16}])"))
def test_it_takes_a_DynamicLibrabry__Symbol_as_3rd_argument(self, space):
space.execute(typeformat("""
dlsym = FFI::DynamicLibrary.open('%s').find_function(:sin)
FFI::Function.new({void}, [{int8}, {int16}], dlsym)
""" % libm))
with self.raises(space, "TypeError",
"can't convert Fixnum into FFI::DynamicLibrary::Symbol"):
space.execute(typeformat("""
FFI::Function.new({void}, [{uint8}], 500)"""))
def test_it_takes_a_hash_as_4_argument(self, space):
space.execute(typeformat("""
FFI::Function.new({void}, [{int8}, {int16}],
FFI::DynamicLibrary.open('%s').find_function(:cos),
\{\})
""" % libm))
#def test_it_reacts_to_messy_signature_with_TypeError(self, space):
# with self.raises(space, "TypeError", "unable to resolve type '1'"):
# space.execute("FFI::Function.new(1, [])")
# with self.raises(space, "TypeError", "unable to resolve type '2'"):
# space.execute("FFI::Function.new({void}, [2])")
# with self.raises(space, "TypeError",
# "unable to resolve type 'null'"):
# space.execute("FFI::Function.new(:null, [])")
# with self.raises(space, "TypeError",
# "unable to resolve type 'array'"):
# space.execute("FFI::Function.new({int32}, [:array])")
def test_it_creates_the_following_low_level_data(self, space):
w_function = space.execute(typeformat("""
tan = FFI::DynamicLibrary.open('%s').find_function(:tan)
FFI::Function.new({float64}, [{float64}], tan, \{\})
""" % libm))
w_float64 = space.execute("FFI::Type::FLOAT64")
assert w_function.w_info.arg_types_w == [w_float64]
assert w_function.w_info.w_ret_type == w_float64
tan = clibffi.CDLL(libm).getpointer('tan',
[clibffi.ffi_type_double],
clibffi.ffi_type_double)
assert w_function.ptr == tan.funcsym
class TestFunction_attach(BaseFFITest):
def make_mock_library_code(self, libname):
return """
module LibraryMock
local = FFI::DynamicLibrary::RTLD_LOCAL
@ffi_libs = [FFI::DynamicLibrary.open('%s', local)]
@attachments = \{\}
self.singleton_class.attr_reader :attachments
def self.find_function(name)
@ffi_libs[0].find_function(name)
end
end
""" % libname
def test_it_works_with_pow_from_libm(self, space):
w_res = space.execute(typeformat("""
%s
sym_pow = LibraryMock.find_function(:pow)
func = FFI::Function.new({float64}, [{float64}, {float64}], sym_pow)
func.attach(LibraryMock, 'power')
LibraryMock.attachments.include? :power
(0..5).each.map \{ |x| LibraryMock.power(x, 2) \}
""" % self.make_mock_library_code(libm)))
assert self.unwrap(space, w_res) == [0.0, 1.0, 4.0, 9.0, 16.0, 25.0]
def test_it_works_with_abs_from_libc(self, space):
w_res = space.execute(typeformat("""
%s
sym_abs = LibraryMock.find_function(:abs)
func = FFI::Function.new({int32}, [{int32}], sym_abs)
func.attach(LibraryMock, 'abs')
LibraryMock.attachments.include? :abs
(-3..+3).each.map \{ |x| LibraryMock.abs(x) \}
""" % self.make_mock_library_code(libc)))
assert self.unwrap(space, w_res) == [3, 2, 1, 0, 1, 2, 3]
def test_it_works_with_strings(self, space):
w_res = space.execute(typeformat("""
%s
sym_strcat = LibraryMock.find_function(:strcat)
func = FFI::Function.new({string}, [{string}, {string}], sym_strcat)
func.attach(LibraryMock, 'strcat')
LibraryMock.strcat("Well ", "done!")
""" % self.make_mock_library_code(libc)))
assert self.unwrap(space, w_res) == "Well done!"
def test_it_works_with_float(self, space, libtest_so):
w_res = space.execute(typeformat("""
%s
sym_add_float = LibraryMock.find_function(:add_float)
func = FFI::Function.new({float32}, [{float32}, {float32}],
sym_add_float)
func.attach(LibraryMock, 'add_float')
LibraryMock.add_float(1.5, 2.25)
""" % self.make_mock_library_code(libtest_so)))
assert self.unwrap(space, w_res) == 3.75
def make_question_code(self, signchar, size, left=1, right=2,
with_name=None):
default_T = '%sint%s' %('' if signchar == 's' else 'u', size)
T = default_T if with_name is None else with_name
fn = 'add_%s%s' %(signchar, size)
plus_or_minus = '-' if signchar == 's' else '+'
return ("""
FFI::Function.new({T}, [{T}, {T}],
LibraryMock.find_function(:fn)).
attach(LibraryMock, 'fn')
LibraryMock.fn(+|-%s, +|-%s) == +|-%s
""".replace('T', T).replace('fn', fn).replace('+|-', plus_or_minus) %
(left, right, left+right))
def type_works(self, space, libtest_so, typechar, size, left=1, right=2,
with_name=None):
return self.ask(space,
typeformat(self.make_mock_library_code(libtest_so) +
self.make_question_code(typechar, size,
left, right,
with_name)))
def test_it_works_with_unsigned_int8(self, space, libtest_so):
assert self.type_works(space, libtest_so, 'u', '8')
def test_it_works_with_signed_int8(self, space, libtest_so):
assert self.type_works(space, libtest_so, 's', '8')
def test_it_works_with_unsigned_int16(self, space, libtest_so):
assert self.type_works(space, libtest_so, 'u', '16')
def test_it_works_with_signed_int16(self, space, libtest_so):
assert self.type_works(space, libtest_so, 's', '16')
def test_it_works_with_unsigned_shorts(self, space, libtest_so):
assert self.type_works(space, libtest_so, 'u', '16', with_name='short')
def test_it_works_with_signed_shorts(self, space, libtest_so):
assert self.type_works(space, libtest_so, 's', '16', with_name='short')
def test_it_works_with_unsigned_int32(self, space, libtest_so):
assert self.type_works(space, libtest_so, 'u', '32')
def test_it_works_with_signed_int32(self, space, libtest_so):
assert self.type_works(space, libtest_so, 's', '32')
def test_it_works_with_unsigned_ints(self, space, libtest_so):
assert self.type_works(space, libtest_so, 'u', '32', with_name='int')
def test_it_works_with_signed_ints(self, space, libtest_so):
assert self.type_works(space, libtest_so, 's', '32', with_name='int')
def test_it_works_with_unsigned_int64(self, space, libtest_so):
assert self.type_works(space, libtest_so, 'u', '64', 2**61, 2**61)
def test_it_works_with_signed_int64(self, space, libtest_so):
assert self.type_works(space, libtest_so, 's', '64', 2**61, 2**61)
def test_it_works_with_unsigned_long_longs(self, space, libtest_so):
assert self.type_works(space, libtest_so, 'u', '64', 2**61, 2**61,
with_name='long_long')
def test_it_works_with_signed_long_longs(self, space, libtest_so):
assert self.type_works(space, libtest_so, 's', '64', 2**61, 2**61,
with_name='long_long')
def test_it_works_with_long(self, space, libtest_so):
w_res = space.execute(typeformat("""
%s
sym_add_long = LibraryMock.find_function(:add_long)
func = FFI::Function.new({long}, [{long}, {long}], sym_add_long)
func.attach(LibraryMock, 'add_long')
LibraryMock.add_long(-2, -10)
""" % self.make_mock_library_code(libtest_so)))
res = self.unwrap(space, w_res)
assert (res == -12 if isinstance(res, int) else res.toint() == -12)
def test_it_works_with_ulong(self, space, libtest_so):
w_res = space.execute(typeformat("""
%s
sym_add_ulong = LibraryMock.find_function(:add_ulong)
func = FFI::Function.new({ulong}, [{ulong}, {ulong}], sym_add_ulong)
func.attach(LibraryMock, 'add_ulong')
LibraryMock.add_ulong(2, 10)
""" % self.make_mock_library_code(libtest_so)))
res = self.unwrap(space, w_res)
assert (res == 12 if isinstance(res, int) else res.toint() == 12)
def test_it_returns_nil_for_void(self, space, libtest_so):
w_res = space.execute(typeformat("""
%s
FFI::Function.new({void}, [{uint8}],
LibraryMock.find_function(:set_u8)).
attach(LibraryMock, 'do_nothing')
LibraryMock.do_nothing(0)
""" % self.make_mock_library_code(libtest_so)))
assert w_res is space.w_nil
def test_it_works_with_bools(self, space, libtest_so):
space.execute(typeformat("""
%s
FFI::Function.new({bool}, [{bool}],
LibraryMock.find_function(:bool_reverse_val)).
attach(LibraryMock, 'not')
""" % self.make_mock_library_code(libtest_so)))
w_res = space.execute("LibraryMock.not(true)")
assert w_res is space.w_false
w_res = space.execute("LibraryMock.not(false)")
assert w_res is space.w_true
def test_it_can_convert_nil_to_NULL(self, space, libtest_so):
self.ask(space, typeformat("""
%s
FFI::Function.new({bool}, [{pointer}],
LibraryMock.find_function(:testIsNULL)).
attach(LibraryMock, 'test_is_NULL')
LibraryMock.test_is_NULL(nil)
""" % self.make_mock_library_code(libtest_so)))
def test_it_works_with_pointer_argument(self, ffis, libtest_so):
w_res = ffis.execute(typeformat("""
%s
FFI::Function.new({void}, [{int}, {int}, {pointer}],
LibraryMock.find_function(:ref_add_int32_t)).
attach(LibraryMock, 'add')
res = FFI::MemoryPointer.new(:int, 1)
LibraryMock.add(4, 6, res)
res.read_int32
""" % self.make_mock_library_code(libtest_so)))
assert self.unwrap(ffis, w_res) == 10
def test_it_returns_pointer_object(self, space, libtest_so):
space.execute(typeformat("""
%s
FFI::Function.new({pointer}, [{int}],
LibraryMock.find_function(:ptr_malloc)).
attach(LibraryMock, 'malloc')
""" % self.make_mock_library_code(libtest_so)))
assert self.ask(space, """
LibraryMock.malloc(8).kind_of?(FFI::Pointer)
""")
def test_it_can_use_one_proc_as_callback(self, ffis):
w_res = ffis.execute(typeformat("""
%s
comparator = FFI::CallbackInfo.new({int},
[{pointer},
{pointer}])
FFI::Function.new({int},
[{pointer},
{ulong},
{ulong},
comparator],
LibraryMock.find_function(:qsort)).
attach(LibraryMock, 'qsort')
p = FFI::MemoryPointer.new(:int32, 2)
p.put_int32(0, 5)
p.put_int32(4, 3)
LibraryMock.qsort(p, 2, 4) do |p1, p2|
i1 = p1.get_int32(0)
i2 = p2.get_int32(0)
i1 < i2 ? -1 : (i1 > i2 ? 1 : 0)
end
[p.get_int32(0), p.get_int32(4)]
""" % self.make_mock_library_code(libc)))
assert self.unwrap(ffis, w_res) == [3, 5]
def test_it_can_take_enum_arguments(self, ffis, libtest_so):
w_res = ffis.execute(typeformat("""
%s
color_enum = FFI::Enum.new([:black, 0,
:white, 255,
:gray, 128])
Color = FFI::Type::Mapped.new(color_enum)
options = \{:type_map => \{color_enum => Color\}\}
FFI::Function.new({uint8}, [Color, Color],
LibraryMock.find_function(:add_u8), options).
attach(LibraryMock, 'add_color')
col1 = LibraryMock.add_color(:black, :white)
""" % self.make_mock_library_code(libtest_so)))
assert self.unwrap(ffis, w_res) == 255
with self.raises(ffis, "ArgumentError",
"invalid enum value, :red"):
ffis.execute("""
LibraryMock.add_color(:gray, :red)
""")
def test_it_can_return_enums(self, ffis, libtest_so):
w_res = ffis.execute(typeformat("""
%s
color_enum = FFI::Enum.new([:black, 0,
:white, 255,
:gray, 128])
Color = FFI::Type::Mapped.new(color_enum)
options = \{:type_map => \{color_enum => Color\}\}
FFI::Function.new(Color, [{uint8}, {uint8}],
LibraryMock.find_function(:add_u8), options).
attach(LibraryMock, 'add_color')
col1 = LibraryMock.add_color(120, 8)
""" % self.make_mock_library_code(libtest_so)))
assert self.unwrap(ffis, w_res) == 'gray'
w_res = ffis.execute("LibraryMock.add_color(1, 2)")
assert self.unwrap(ffis, w_res) == 3
def test_it_raises_ArgumentError_calling_func_with_void_arg(self, space):
with self.raises(space, 'ArgumentError',
"arguments cannot be of type void"):
w_res = space.execute(typeformat("""
%s
FFI::Function.new({uint32}, [{void}],
LibraryMock.find_function(:abs)).
attach(LibraryMock, 'abs')
LibraryMock.abs(-7)
""" % self.make_mock_library_code(libc)))
| bsd-3-clause | -1,216,122,773,162,513,700 | 42.223796 | 81 | 0.54247 | false |
jcmgray/quijy | quimb/tensor/__init__.py | 1 | 2984 | from .tensor_core import (
get_contract_strategy,
set_contract_strategy,
contract_strategy,
get_contract_backend,
set_contract_backend,
contract_backend,
get_tensor_linop_backend,
set_tensor_linop_backend,
tensor_linop_backend,
tensor_contract,
tensor_split,
tensor_direct_product,
rand_uuid,
bonds,
bonds_size,
connect,
new_bond,
Tensor,
TensorNetwork,
TNLinearOperator1D,
PTensor,
)
from .tensor_gen import (
rand_tensor,
rand_phased,
MPS_rand_state,
MPS_product_state,
MPS_computational_state,
MPS_rand_computational_state,
MPS_neel_state,
MPS_ghz_state,
MPS_w_state,
MPS_zero_state,
MPS_sampler,
MPO_identity,
MPO_identity_like,
MPO_zeros,
MPO_zeros_like,
MPO_rand,
MPO_rand_herm,
SpinHam,
MPO_ham_ising,
MPO_ham_XY,
MPO_ham_heis,
MPO_ham_mbl,
NNI_ham_ising,
NNI_ham_XY,
NNI_ham_heis,
NNI_ham_mbl,
)
from .tensor_1d import (
MatrixProductState,
MatrixProductOperator,
Dense1D,
SuperOperator1D,
align_TN_1D,
expec_TN_1D,
gate_TN_1D,
superop_TN_1D,
)
from .tensor_dmrg import (
MovingEnvironment,
DMRG,
DMRG1,
DMRG2,
DMRGX,
)
from .tensor_mera import (
MERA,
)
from .tensor_tebd import (
TEBD,
)
from .circuit import (
Circuit,
CircuitMPS,
CircuitDense,
)
from .circuit_gen import (
circ_ansatz_1D_zigzag,
circ_ansatz_1D_brickwork,
circ_ansatz_1D_rand,
)
__all__ = (
"contract_strategy",
"get_contract_strategy",
"set_contract_strategy",
"contract_backend",
"get_contract_backend",
"set_contract_backend",
"tensor_linop_backend",
"get_tensor_linop_backend",
"set_tensor_linop_backend",
"tensor_contract",
"tensor_split",
"tensor_direct_product",
"rand_uuid",
"bonds",
"bonds_size",
"connect",
"new_bond",
"Tensor",
"TensorNetwork",
"TNLinearOperator1D",
"PTensor",
"rand_tensor",
"rand_phased",
"MPS_rand_state",
"MPS_product_state",
"MPS_computational_state",
"MPS_rand_computational_state",
"MPS_neel_state",
"MPS_ghz_state",
"MPS_w_state",
"MPS_zero_state",
"MPS_sampler",
"MPO_identity",
"MPO_identity_like",
"MPO_zeros",
"MPO_zeros_like",
"MPO_rand",
"MPO_rand_herm",
"SpinHam",
"MPO_ham_ising",
"MPO_ham_XY",
"MPO_ham_heis",
"MPO_ham_mbl",
"NNI_ham_ising",
"NNI_ham_XY",
"NNI_ham_heis",
"NNI_ham_mbl",
"MatrixProductState",
"MatrixProductOperator",
"Dense1D",
"SuperOperator1D",
"align_TN_1D",
"expec_TN_1D",
"gate_TN_1D",
"superop_TN_1D",
"MovingEnvironment",
"DMRG",
"DMRG1",
"DMRG2",
"DMRGX",
"MERA",
"TEBD",
"Circuit",
"CircuitMPS",
"CircuitDense",
"circ_ansatz_1D_zigzag",
"circ_ansatz_1D_brickwork",
"circ_ansatz_1D_rand",
)
| mit | 4,681,047,906,577,972,000 | 18.251613 | 35 | 0.594839 | false |
hazelnusse/pydy | examples/pointfootwalker/GarciasPFW_eoms.py | 1 | 2122 | # Sun Aug 23 13:12:56 2009
from numpy import sin, cos, tan, vectorize
def f(x, t, parameter_list):
# Unpacking the parameters
Mh, mf, g, L, q3 = parameter_list
# Unpacking the states (q's and u's)
q1, q2, u1, u2 = x
s1 = sin(q1)
cos(q3) = cos(q3)
c2 = cos(q2)
sin(q3) = sin(q3)
s2 = sin(q2)
c1 = cos(q1)
# Kinematic differential equations
q1p = u1
q2p = -u2 + u1
# Dynamic differential equations
u1p = -L**4*mf**2*u2**2*s2/(-L**4*mf**2 + L**4*mf**2*c2**2 - Mh*mf*L**4) + g*L**3*mf**2*cos(q3)*c1/(-L**4*mf**2 + L**4*mf**2*c2**2 - Mh*mf*L**4) + L**4*mf**2*u1**2*c2*s2/(-L**4*mf**2 + L**4*mf**2*c2**2 - Mh*mf*L**4) - g*L**3*mf**2*sin(q3)*s1/(-L**4*mf**2 + L**4*mf**2*c2**2 - Mh*mf*L**4) + Mh*g*mf*L**3*cos(q3)*c1/(-L**4*mf**2 + L**4*mf**2*c2**2 - Mh*mf*L**4) + g*L**3*mf**2*c2**2*sin(q3)*s1/(-L**4*mf**2 + L**4*mf**2*c2**2 - Mh*mf*L**4) - Mh*g*mf*L**3*sin(q3)*s1/(-L**4*mf**2 + L**4*mf**2*c2**2 - Mh*mf*L**4) - g*L**3*mf**2*c2**2*cos(q3)*c1/(-L**4*mf**2 + L**4*mf**2*c2**2 - Mh*mf*L**4) - g*L**3*mf**2*cos(q3)*c2*s1*s2/(-L**4*mf**2 + L**4*mf**2*c2**2 - Mh*mf*L**4) - g*L**3*mf**2*c1*c2*sin(q3)*s2/(-L**4*mf**2 + L**4*mf**2*c2**2 - Mh*mf*L**4)
u2p = L**4*mf**2*u1**2*s2/(-L**4*mf**2 + L**4*mf**2*c2**2 - Mh*mf*L**4) + Mh*mf*L**4*u1**2*s2/(-L**4*mf**2 + L**4*mf**2*c2**2 - Mh*mf*L**4) - L**4*mf**2*u2**2*c2*s2/(-L**4*mf**2 + L**4*mf**2*c2**2 - Mh*mf*L**4) - g*L**3*mf**2*cos(q3)*s1*s2/(-L**4*mf**2 + L**4*mf**2*c2**2 - Mh*mf*L**4) - g*L**3*mf**2*c1*sin(q3)*s2/(-L**4*mf**2 + L**4*mf**2*c2**2 - Mh*mf*L**4) - Mh*g*mf*L**3*cos(q3)*s1*s2/(-L**4*mf**2 + L**4*mf**2*c2**2 - Mh*mf*L**4) - Mh*g*mf*L**3*c1*sin(q3)*s2/(-L**4*mf**2 + L**4*mf**2*c2**2 - Mh*mf*L**4)
return [q1p, q2p, u1p, u2p]
def qdot2u(q, qd, parameter_list):
# Unpacking the parameters
Mh, mf, g, L, q3 = parameter_list
# Unpacking the q's and qdots
q1, q2 = q
q1p, q2p = qd
s1 = sin(q1)
cos(q3) = cos(q3)
c2 = cos(q2)
sin(q3) = sin(q3)
s2 = sin(q2)
c1 = cos(q1)
# Kinematic differential equations
u1 = q1p
u2 = q1p - q2p
return [u1, u2] | bsd-3-clause | -8,282,384,684,407,211,000 | 54.868421 | 747 | 0.49623 | false |
pikamar/scoop | config/settings/production.py | 1 | 7423 | # -*- coding: utf-8 -*-
'''
Production Configurations
- Use djangosecure
- Use Amazon's S3 for storing static files and uploaded media
- Use mailgun to send emails
- Use Redis on Heroku
- Use sentry for error logging
'''
from __future__ import absolute_import, unicode_literals
from boto.s3.connection import OrdinaryCallingFormat
from django.utils import six
import logging
from .common import * # noqa
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Raises ImproperlyConfigured exception if DJANGO_SECRET_KEY not in os.environ
SECRET_KEY = env("DJANGO_SECRET_KEY")
# This ensures that Django will be able to detect a secure connection
# properly on Heroku.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# django-secure
# ------------------------------------------------------------------------------
INSTALLED_APPS += ("djangosecure", )
# raven sentry client
# See https://docs.getsentry.com/hosted/clients/python/integrations/django/
INSTALLED_APPS += ('raven.contrib.django.raven_compat', )
SECURITY_MIDDLEWARE = (
'djangosecure.middleware.SecurityMiddleware',
)
RAVEN_MIDDLEWARE = ('raven.contrib.django.raven_compat.middleware.Sentry404CatchMiddleware',
'raven.contrib.django.raven_compat.middleware.SentryResponseErrorIdMiddleware',)
MIDDLEWARE_CLASSES = SECURITY_MIDDLEWARE + \
RAVEN_MIDDLEWARE + MIDDLEWARE_CLASSES
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool(
"DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True)
SECURE_FRAME_DENY = env.bool("DJANGO_SECURE_FRAME_DENY", default=True)
SECURE_CONTENT_TYPE_NOSNIFF = env.bool(
"DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True)
SECURE_BROWSER_XSS_FILTER = True
SESSION_COOKIE_SECURE = False
SESSION_COOKIE_HTTPONLY = True
SECURE_SSL_REDIRECT = env.bool("DJANGO_SECURE_SSL_REDIRECT", default=True)
# SITE CONFIGURATION
# ------------------------------------------------------------------------------
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['example.com'])
# END SITE CONFIGURATION
INSTALLED_APPS += ("gunicorn", )
# STORAGE CONFIGURATION
# ------------------------------------------------------------------------------
# Uploaded Media Files
# ------------------------
# See: http://django-storages.readthedocs.org/en/latest/index.html
INSTALLED_APPS += (
'storages',
)
DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
AWS_ACCESS_KEY_ID = env('DJANGO_AWS_ACCESS_KEY_ID')
AWS_SECRET_ACCESS_KEY = env('DJANGO_AWS_SECRET_ACCESS_KEY')
AWS_STORAGE_BUCKET_NAME = env('DJANGO_AWS_STORAGE_BUCKET_NAME')
AWS_AUTO_CREATE_BUCKET = True
AWS_QUERYSTRING_AUTH = False
AWS_S3_CALLING_FORMAT = OrdinaryCallingFormat()
# AWS cache settings, don't change unless you know what you're doing:
AWS_EXPIRY = 60 * 60 * 24 * 7
# TODO See: https://github.com/jschneier/django-storages/issues/47
# Revert the following and use str after the above-mentioned bug is fixed in
# either django-storage-redux or boto
AWS_HEADERS = {
'Cache-Control': six.b('max-age=%d, s-maxage=%d, must-revalidate' % (
AWS_EXPIRY, AWS_EXPIRY))
}
# URL that handles the media served from MEDIA_ROOT, used for managing
# stored files.
MEDIA_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
# Static Assets
# ------------------------
STATICFILES_STORAGE = DEFAULT_FILE_STORAGE
STATIC_URL = MEDIA_URL
# See: https://github.com/antonagestam/collectfast
# For Django 1.7+, 'collectfast' should come before
# 'django.contrib.staticfiles'
AWS_PRELOAD_METADATA = True
INSTALLED_APPS = ('collectfast', ) + INSTALLED_APPS
# EMAIL
# ------------------------------------------------------------------------------
DEFAULT_FROM_EMAIL = env('DJANGO_DEFAULT_FROM_EMAIL',
default='Scoop <[email protected]>')
EMAIL_BACKEND = 'django_mailgun.MailgunBackend'
MAILGUN_ACCESS_KEY = env('DJANGO_MAILGUN_API_KEY')
MAILGUN_SERVER_NAME = env('DJANGO_MAILGUN_SERVER_NAME')
EMAIL_SUBJECT_PREFIX = env("DJANGO_EMAIL_SUBJECT_PREFIX", default='[Scoop] ')
SERVER_EMAIL = env('DJANGO_SERVER_EMAIL', default=DEFAULT_FROM_EMAIL)
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See:
# https://docs.djangoproject.com/en/dev/ref/templates/api/#django.template.loaders.cached.Loader
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ]),
]
# DATABASE CONFIGURATION
# ------------------------------------------------------------------------------
# Raises ImproperlyConfigured exception if DATABASE_URL not in os.environ
DATABASES['default'] = env.db("DATABASE_URL")
# CACHING
# ------------------------------------------------------------------------------
# Heroku URL does not pass the DB number, so we parse it in
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "{0}/{1}".format(env.cache_url('REDIS_URL', default="redis://127.0.0.1:6379"), 0),
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
"IGNORE_EXCEPTIONS": True, # mimics memcache behavior.
# http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior
}
}
}
# Sentry Configuration
SENTRY_DSN = env('DJANGO_SENTRY_DSN')
SENTRY_CLIENT = env('DJANGO_SENTRY_CLIENT', default='raven.contrib.django.raven_compat.DjangoClient')
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'root': {
'level': 'WARNING',
'handlers': ['sentry'],
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s '
'%(process)d %(thread)d %(message)s'
},
},
'handlers': {
'sentry': {
'level': 'ERROR',
'class': 'raven.contrib.django.raven_compat.handlers.SentryHandler',
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose'
}
},
'loggers': {
'django.db.backends': {
'level': 'ERROR',
'handlers': ['console'],
'propagate': False,
},
'raven': {
'level': 'DEBUG',
'handlers': ['console'],
'propagate': False,
},
'sentry.errors': {
'level': 'DEBUG',
'handlers': ['console'],
'propagate': False,
},
'django.security.DisallowedHost': {
'level': 'ERROR',
'handlers': ['console', 'sentry'],
'propagate': False,
},
},
}
SENTRY_CELERY_LOGLEVEL = env.int('DJANGO_SENTRY_LOG_LEVEL', logging.INFO)
RAVEN_CONFIG = {
'CELERY_LOGLEVEL': env.int('DJANGO_SENTRY_LOG_LEVEL', logging.INFO),
'DSN': SENTRY_DSN
}
# Custom Admin URL, use {% url 'admin:index' %}
ADMIN_URL = env('DJANGO_ADMIN_URL')
# Your production stuff: Below this line define 3rd party library settings
| bsd-3-clause | -8,161,419,392,009,591,000 | 34.014151 | 117 | 0.605011 | false |
jtpereyda/boofuzz | boofuzz/primitives/byte.py | 1 | 1725 | import struct
import six
from .bit_field import BitField
class Byte(BitField):
"""The byte sized bit field primitive.
:type name: str, optional
:param name: Name, for referencing later. Names should always be provided, but if not, a default name will be given,
defaults to None
:type default_value: int, optional
:param default_value: Default integer value, defaults to 0
:type max_num: int, optional
:param max_num: Maximum number to iterate up to, defaults to None
:type endian: char, optional
:param endian: Endianness of the bit field (LITTLE_ENDIAN: <, BIG_ENDIAN: >), defaults to LITTLE_ENDIAN
:type output_format: str, optional
:param output_format: Output format, "binary" or "ascii", defaults to binary
:type signed: bool, optional
:param signed: Make size signed vs. unsigned (applicable only with format="ascii"), defaults to False
:type full_range: bool, optional
:param full_range: If enabled the field mutates through *all* possible values, defaults to False
:type fuzz_values: list, optional
:param fuzz_values: List of custom fuzz values to add to the normal mutations, defaults to None
:type fuzzable: bool, optional
:param fuzzable: Enable/disable fuzzing of this primitive, defaults to true
"""
def __init__(self, *args, **kwargs):
# Inject the one parameter we care to pass in (width)
super(Byte, self).__init__(width=8, *args, **kwargs)
def encode(self, value, mutation_context):
if not isinstance(value, (six.integer_types, list, tuple)):
value = struct.unpack(self.endian + "B", value)[0]
return super(Byte, self).encode(value, mutation_context)
| gpl-2.0 | -9,200,485,191,700,654,000 | 43.230769 | 120 | 0.688116 | false |
Smart-Torvy/torvy-home-assistant | tests/components/test_http.py | 1 | 7912 | """The tests for the Home Assistant HTTP component."""
# pylint: disable=protected-access,too-many-public-methods
import logging
import time
from ipaddress import ip_network
from unittest.mock import patch
import requests
from homeassistant import bootstrap, const
import homeassistant.components.http as http
from tests.common import get_test_instance_port, get_test_home_assistant
API_PASSWORD = "test1234"
SERVER_PORT = get_test_instance_port()
HTTP_BASE = "127.0.0.1:{}".format(SERVER_PORT)
HTTP_BASE_URL = "http://{}".format(HTTP_BASE)
HA_HEADERS = {
const.HTTP_HEADER_HA_AUTH: API_PASSWORD,
const.HTTP_HEADER_CONTENT_TYPE: const.CONTENT_TYPE_JSON,
}
# dont' add 127.0.0.1/::1 as trusted, as it may interfere with other test cases
TRUSTED_NETWORKS = ["192.0.2.0/24",
"2001:DB8:ABCD::/48",
'100.64.0.1',
'FD01:DB8::1']
CORS_ORIGINS = [HTTP_BASE_URL, HTTP_BASE]
hass = None
def _url(path=""):
"""Helper method to generate URLs."""
return HTTP_BASE_URL + path
def setUpModule(): # pylint: disable=invalid-name
"""Initialize a Home Assistant server."""
global hass
hass = get_test_home_assistant()
hass.bus.listen('test_event', lambda _: _)
hass.states.set('test.test', 'a_state')
bootstrap.setup_component(
hass, http.DOMAIN,
{http.DOMAIN: {http.CONF_API_PASSWORD: API_PASSWORD,
http.CONF_SERVER_PORT: SERVER_PORT,
http.CONF_CORS_ORIGINS: CORS_ORIGINS}})
bootstrap.setup_component(hass, 'api')
hass.wsgi.trusted_networks = [
ip_network(trusted_network)
for trusted_network in TRUSTED_NETWORKS]
hass.start()
time.sleep(0.05)
def tearDownModule(): # pylint: disable=invalid-name
"""Stop the Home Assistant server."""
hass.stop()
class TestHttp:
"""Test HTTP component."""
def test_access_denied_without_password(self):
"""Test access without password."""
req = requests.get(_url(const.URL_API))
assert req.status_code == 401
def test_access_denied_with_wrong_password_in_header(self):
"""Test access with wrong password."""
req = requests.get(
_url(const.URL_API),
headers={const.HTTP_HEADER_HA_AUTH: 'wrongpassword'})
assert req.status_code == 401
def test_access_denied_with_untrusted_ip(self, caplog):
"""Test access with an untrusted ip address."""
for remote_addr in ['198.51.100.1',
'2001:DB8:FA1::1',
'127.0.0.1',
'::1']:
with patch('homeassistant.components.http.'
'HomeAssistantWSGI.get_real_ip',
return_value=remote_addr):
req = requests.get(_url(const.URL_API),
params={'api_password': ''})
assert req.status_code == 401, \
"{} shouldn't be trusted".format(remote_addr)
def test_access_with_password_in_header(self, caplog):
"""Test access with password in URL."""
# Hide logging from requests package that we use to test logging
caplog.set_level(logging.WARNING,
logger='requests.packages.urllib3.connectionpool')
req = requests.get(
_url(const.URL_API),
headers={const.HTTP_HEADER_HA_AUTH: API_PASSWORD})
assert req.status_code == 200
logs = caplog.text
# assert const.URL_API in logs
assert API_PASSWORD not in logs
def test_access_denied_with_wrong_password_in_url(self):
"""Test access with wrong password."""
req = requests.get(_url(const.URL_API),
params={'api_password': 'wrongpassword'})
assert req.status_code == 401
def test_access_with_password_in_url(self, caplog):
"""Test access with password in URL."""
# Hide logging from requests package that we use to test logging
caplog.set_level(logging.WARNING,
logger='requests.packages.urllib3.connectionpool')
req = requests.get(_url(const.URL_API),
params={'api_password': API_PASSWORD})
assert req.status_code == 200
logs = caplog.text
# assert const.URL_API in logs
assert API_PASSWORD not in logs
def test_access_with_trusted_ip(self, caplog):
"""Test access with trusted addresses."""
for remote_addr in ['100.64.0.1',
'192.0.2.100',
'FD01:DB8::1',
'2001:DB8:ABCD::1']:
with patch('homeassistant.components.http.'
'HomeAssistantWSGI.get_real_ip',
return_value=remote_addr):
req = requests.get(_url(const.URL_API),
params={'api_password': ''})
assert req.status_code == 200, \
"{} should be trusted".format(remote_addr)
def test_cors_allowed_with_password_in_url(self):
"""Test cross origin resource sharing with password in url."""
req = requests.get(_url(const.URL_API),
params={'api_password': API_PASSWORD},
headers={const.HTTP_HEADER_ORIGIN: HTTP_BASE_URL})
allow_origin = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN
allow_headers = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_HEADERS
all_allow_headers = ", ".join(const.ALLOWED_CORS_HEADERS)
assert req.status_code == 200
assert req.headers.get(allow_origin) == HTTP_BASE_URL
assert req.headers.get(allow_headers) == all_allow_headers
def test_cors_allowed_with_password_in_header(self):
"""Test cross origin resource sharing with password in header."""
headers = {
const.HTTP_HEADER_HA_AUTH: API_PASSWORD,
const.HTTP_HEADER_ORIGIN: HTTP_BASE_URL
}
req = requests.get(_url(const.URL_API),
headers=headers)
allow_origin = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN
allow_headers = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_HEADERS
all_allow_headers = ", ".join(const.ALLOWED_CORS_HEADERS)
assert req.status_code == 200
assert req.headers.get(allow_origin) == HTTP_BASE_URL
assert req.headers.get(allow_headers) == all_allow_headers
def test_cors_denied_without_origin_header(self):
"""Test cross origin resource sharing with password in header."""
headers = {
const.HTTP_HEADER_HA_AUTH: API_PASSWORD
}
req = requests.get(_url(const.URL_API),
headers=headers)
allow_origin = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN
allow_headers = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_HEADERS
assert req.status_code == 200
assert allow_origin not in req.headers
assert allow_headers not in req.headers
def test_cors_preflight_allowed(self):
"""Test cross origin resource sharing preflight (OPTIONS) request."""
headers = {
const.HTTP_HEADER_ORIGIN: HTTP_BASE_URL,
'Access-Control-Request-Method': 'GET',
'Access-Control-Request-Headers': 'x-ha-access'
}
req = requests.options(_url(const.URL_API),
headers=headers)
allow_origin = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_ORIGIN
allow_headers = const.HTTP_HEADER_ACCESS_CONTROL_ALLOW_HEADERS
all_allow_headers = ", ".join(const.ALLOWED_CORS_HEADERS)
assert req.status_code == 200
assert req.headers.get(allow_origin) == HTTP_BASE_URL
assert req.headers.get(allow_headers) == all_allow_headers
| mit | 472,902,248,127,394,500 | 35.127854 | 79 | 0.592391 | false |
pyxll/pyxll-examples | matplotlib/interactiveplot.py | 1 | 3441 | """
Example code showing how to draw an interactive matplotlib figure
in Excel.
While the figure is displayed Excel is still useable in the background
and the chart may be updated with new data by calling the same
function again.
"""
from pyxll import xl_func
from pandas.stats.moments import ewma
# matplotlib imports
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt4agg import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
# Qt imports
from PySide import QtCore, QtGui
import timer # for polling the Qt application
# dict to keep track of any chart windows
_plot_windows = {}
@xl_func("string figname, numpy_column<float> xs, numpy_column<float> ys, int span: string")
def mpl_plot_ewma(figname, xs, ys, span):
"""
Show a matplotlib line plot of xs vs ys and ewma(ys, span) in an interactive window.
:param figname: name to use for this plot's window
:param xs: list of x values as a column
:param ys: list of y values as a column
:param span: ewma span
"""
# Get the Qt app.
# Note: no need to 'exec' this as it will be polled in the main windows loop.
app = get_qt_app()
# create the figure and axes for the plot
fig = Figure(figsize=(600, 600), dpi=72, facecolor=(1, 1, 1), edgecolor=(0, 0, 0))
ax = fig.add_subplot(111)
# calculate the moving average
ewma_ys = ewma(ys, span=span)
# plot the data
ax.plot(xs, ys, alpha=0.4, label="Raw")
ax.plot(xs, ewma_ys, label="EWMA")
ax.legend()
# generate the canvas to display the plot
canvas = FigureCanvas(fig)
# Get or create the Qt windows to show the chart in.
if figname in _plot_windows:
# get from the global dict and clear any previous widgets
window = _plot_windows[figname]
layout = window.layout()
if layout:
for i in reversed(range(layout.count())):
layout.itemAt(i).widget().setParent(None)
else:
# create a new window for this plot and store it for next time
window = QtGui.QWidget()
window.resize(800, 600)
window.setWindowTitle(figname)
_plot_windows[figname] = window
# create the navigation toolbar
toolbar = NavigationToolbar(canvas, window)
# add the canvas and toolbar to the window
layout = window.layout() or QtGui.QVBoxLayout()
layout.addWidget(canvas)
layout.addWidget(toolbar)
window.setLayout(layout)
window.show()
return "[Plotted '%s']" % figname
#
# Taken from the ui/qt.py example
#
def get_qt_app():
"""
returns the global QtGui.QApplication instance and starts
the event loop if necessary.
"""
app = QtCore.QCoreApplication.instance()
if app is None:
# create a new application
app = QtGui.QApplication([])
# use timer to process events periodically
processing_events = {}
def qt_timer_callback(timer_id, time):
if timer_id in processing_events:
return
processing_events[timer_id] = True
try:
app = QtCore.QCoreApplication.instance()
if app is not None:
app.processEvents(QtCore.QEventLoop.AllEvents, 300)
finally:
del processing_events[timer_id]
timer.set_timer(100, qt_timer_callback)
return app
| unlicense | -8,776,649,208,221,896,000 | 30.281818 | 92 | 0.657076 | false |
ColumbiaCMB/kid_readout | apps/data_taking_scripts/2017-05-jpl-lf-n1-optical/single-horn/sweep_and_stream_bb_4.py | 1 | 3439 |
from kid_readout.interactive import *
from kid_readout.equipment import hardware
from kid_readout.measurement import acquire
from kid_readout.roach import analog
from kid_readout.equipment import agilent_33220
import time
fg = agilent_33220.FunctionGenerator(addr=('192.168.1.135', 5025))
fg.set_load_ohms(1000)
fg.set_dc_voltage(0)
fg.enable_output(False)
ri = Roach2Baseband()
ri.set_modulation_output('high')
initial_f0s = np.load('/data/readout/resonances/2017-06-JPL-8x8-LF-N1_single_horn_4.npy')/1e6
nf = len(initial_f0s)
atonce = 4
if nf % atonce > 0:
print "extending list of resonators to make a multiple of ", atonce
initial_f0s = np.concatenate((initial_f0s, np.arange(1, 1 + atonce - (nf % atonce)) + initial_f0s.max()))
print len(initial_f0s)
nsamp = 2**20
offsets = np.arange(-16,16)*512./nsamp
last_f0s = initial_f0s
for heater_voltage in np.sqrt(np.linspace(0,4**2,16)):
fg.set_dc_voltage(heater_voltage)
if heater_voltage == 0:
print "heater voltage is 0 V, skipping wait"
else:
print "waiting 20 minutes", heater_voltage
time.sleep(1200)
fg.enable_output(True)
for dac_atten in [35]:
ri.set_dac_atten(dac_atten)
tic = time.time()
ncf = new_nc_file(suffix='%d_dB_load_heater_%.3f_V' % (dac_atten, heater_voltage))
swpa = acquire.run_sweep(ri, tone_banks=last_f0s[None,:] + offsets[:,None], num_tone_samples=nsamp,
length_seconds=0, verbose=True,
description='bb sweep')
print "resonance sweep done", (time.time()-tic)/60.
ncf.write(swpa)
current_f0s = []
for sidx in range(last_f0s.shape[0]):
swp = swpa.sweep(sidx)
res = swp.resonator
print res.f_0, res.Q, res.current_result.redchi, (last_f0s[sidx]*1e6-res.f_0)
if np.abs(res.f_0 - last_f0s[sidx]*1e6) > 200e3:
current_f0s.append(last_f0s[sidx]*1e6)
print "using original frequency for ",last_f0s[sidx]
else:
current_f0s.append(res.f_0)
print "fits complete", (time.time()-tic)/60.
current_f0s = np.array(current_f0s)/1e6
current_f0s.sort()
if np.any(np.diff(current_f0s)<0.031):
print "problematic resonator collision:",current_f0s
print "deltas:",np.diff(current_f0s)
problems = np.flatnonzero(np.diff(current_f0s)<0.031)+1
current_f0s[problems] = (current_f0s[problems-1] + current_f0s[problems+1])/2.0
if np.any(np.diff(current_f0s)<0.031):
print "repeated problematic resonator collision:",current_f0s
print "deltas:",np.diff(current_f0s)
problems = np.flatnonzero(np.diff(current_f0s)<0.031)+1
current_f0s[problems] = (current_f0s[problems-1] + current_f0s[problems+1])/2.0
ri.set_tone_freqs(current_f0s,nsamp)
ri.select_fft_bins(range(last_f0s.shape[0]))
last_f0s = current_f0s
raw_input("turn off compressor")
meas = ri.get_measurement(num_seconds=30.,description='stream with bb')
raw_input("turn on compressor")
ncf.write(meas)
print "dac_atten %f heater voltage %.3f V done in %.1f minutes" % (dac_atten, heater_voltage, (time.time()-tic)/60.)
ncf.close()
raw_input("check sweeps fit before going to next voltage step")
ri.set_dac_atten(20)
| bsd-2-clause | 2,862,236,436,444,174,300 | 39.458824 | 124 | 0.624891 | false |
RyanYoung25/tensorflow | tensorflow/python/kernel_tests/init_ops_test.py | 1 | 11495 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import init_ops
# Returns true iff the two initializers produce the same tensor to
# within a tiny tolerance.
def identicaltest(tc, init1, init2, use_gpu):
"""Tests if two initializations are identical to within tiny tolerances.
Args:
tc: An instance of TensorFlowTestCase.
init1: An Initializer that generates a tensor of a given shape
init2: An Initializer that generates a tensor of a given shape
use_gpu: Use gpu if true.
Returns:
True or False as determined by test.
"""
num = 100
with tc.test_session(use_gpu=use_gpu, graph=tf.Graph()):
t1 = init1([num]).eval()
with tc.test_session(use_gpu=use_gpu, graph=tf.Graph()):
t2 = init2([num]).eval()
return np.allclose(t1, t2, rtol=1e-15, atol=1e-15)
def duplicated_initializer(tc, init, use_gpu, graph_seed):
"""Tests duplicated random initializer within the same graph.
This test generates two random kernels from the same initializer to the same
graph, and checks if the results are close enough. Even given the same global,
seed, two different instances of random kernels should generate different
results.
Args:
tc: An instance of TensorFlowTestCase.
init: An Initializer that generates a tensor of a given shape
use_gpu: Use gpu if true.
graph_seed: A graph-level seed to use.
Returns:
True or False as determined by test.
"""
num = 100
with tc.test_session(use_gpu=use_gpu, graph=tf.Graph()):
random_seed.set_random_seed(graph_seed)
t1 = init([num]).eval()
t2 = init([num]).eval()
return np.allclose(t1, t2, rtol=1e-15, atol=1e-15)
def _init_sampler(tc, init, num, use_gpu):
"""Returns a func to generate a random tensor of shape [num].
Args:
tc: An instance of TensorFlowTestCase.
init: An Initializer that generates a tensor of a given shape
num: Size of 1D tensor to create.
use_gpu: Use gpu if true.
Returns:
Function to generate a random tensor.
"""
def func():
with tc.test_session(use_gpu=use_gpu):
return init([num]).eval()
return func
class RandomNormalInitializationTest(tf.test.TestCase):
def testInitializerIdentical(self):
for use_gpu in [False, True]:
for dtype in [tf.float32, tf.float64]:
init1 = tf.random_normal_initializer(0.0, 1.0, seed=1, dtype=dtype)
init2 = tf.random_normal_initializer(0.0, 1.0, seed=1, dtype=dtype)
self.assertTrue(identicaltest(self, init1, init2, use_gpu))
def testInitializerDifferent(self):
for use_gpu in [False, True]:
for dtype in [tf.float32, tf.float64]:
init1 = tf.random_normal_initializer(0.0, 1.0, seed=1, dtype=dtype)
init2 = tf.random_normal_initializer(0.0, 1.0, seed=2, dtype=dtype)
self.assertFalse(identicaltest(self, init1, init2, use_gpu=use_gpu))
def testDuplicatedInitializer(self):
for use_gpu in [False, True]:
init = tf.random_normal_initializer(0.0, 1.0)
self.assertFalse(duplicated_initializer(self, init, use_gpu, 1))
def testInvalidDataType(self):
self.assertRaises(
ValueError,
tf.random_normal_initializer, 0.0, 1.0, dtype=tf.string)
class TruncatedNormalInitializationTest(tf.test.TestCase):
def testInitializerIdentical(self):
for use_gpu in [False, True]:
for dtype in [tf.float32, tf.float64]:
init1 = tf.truncated_normal_initializer(0.0, 1.0, seed=1, dtype=dtype)
init2 = tf.truncated_normal_initializer(0.0, 1.0, seed=1, dtype=dtype)
self.assertTrue(identicaltest(self, init1, init2, use_gpu))
def testInitializerDifferent(self):
for use_gpu in [False, True]:
for dtype in [tf.float32, tf.float64]:
init1 = tf.truncated_normal_initializer(0.0, 1.0, seed=1, dtype=dtype)
init2 = tf.truncated_normal_initializer(0.0, 1.0, seed=2, dtype=dtype)
self.assertFalse(identicaltest(self, init1, init2, use_gpu=use_gpu))
def testDuplicatedInitializer(self):
for use_gpu in [False, True]:
init = tf.truncated_normal_initializer(0.0, 1.0)
self.assertFalse(duplicated_initializer(self, init, use_gpu, 1))
def testInvalidDataType(self):
self.assertRaises(
ValueError,
tf.truncated_normal_initializer, 0.0, 1.0, dtype=tf.string)
class RandomUniformInitializationTest(tf.test.TestCase):
def testInitializerIdentical(self):
for use_gpu in [False, True]:
for dtype in [tf.float32, tf.float64]:
init1 = tf.random_uniform_initializer(0.0, 1.0, seed=1, dtype=dtype)
init2 = tf.random_uniform_initializer(0.0, 1.0, seed=1, dtype=dtype)
self.assertTrue(identicaltest(self, init1, init2, use_gpu))
def testInitializerDifferent(self):
for use_gpu in [False, True]:
for dtype in [tf.float32, tf.float64]:
init1 = tf.random_uniform_initializer(0.0, 1.0, seed=1, dtype=dtype)
init2 = tf.random_uniform_initializer(0.0, 1.0, seed=2, dtype=dtype)
self.assertFalse(identicaltest(self, init1, init2, use_gpu))
def testDuplicatedInitializer(self):
for use_gpu in [False, True]:
init = tf.random_uniform_initializer(0.0, 1.0)
self.assertFalse(duplicated_initializer(self, init, use_gpu, 1))
def testInvalidDataType(self):
self.assertRaises(
ValueError,
tf.random_uniform_initializer, 0.0, 1.0, dtype=tf.string)
class UniformUnitScalingInitializationTest(tf.test.TestCase):
def testInitializerIdentical(self):
for use_gpu in [False, True]:
for dtype in [tf.float32, tf.float64]:
init1 = tf.uniform_unit_scaling_initializer(seed=1, dtype=dtype)
init2 = tf.uniform_unit_scaling_initializer(seed=1, dtype=dtype)
self.assertTrue(identicaltest(self, init1, init2, use_gpu))
init3 = tf.uniform_unit_scaling_initializer(1.5, seed=1, dtype=dtype)
init4 = tf.uniform_unit_scaling_initializer(1.5, seed=1, dtype=dtype)
self.assertTrue(identicaltest(self, init3, init4, use_gpu))
def testInitializerDifferent(self):
for use_gpu in [False, True]:
for dtype in [tf.float32, tf.float64]:
init1 = tf.uniform_unit_scaling_initializer(seed=1, dtype=dtype)
init2 = tf.uniform_unit_scaling_initializer(seed=2, dtype=dtype)
init3 = tf.uniform_unit_scaling_initializer(1.5, seed=1, dtype=dtype)
self.assertFalse(identicaltest(self, init1, init2, use_gpu))
self.assertFalse(identicaltest(self, init1, init3, use_gpu))
self.assertFalse(identicaltest(self, init2, init3, use_gpu))
def testDuplicatedInitializer(self):
for use_gpu in [False, True]:
init = tf.uniform_unit_scaling_initializer()
self.assertFalse(duplicated_initializer(self, init, use_gpu, 1))
def testInvalidDataType(self):
self.assertRaises(
ValueError,
tf.uniform_unit_scaling_initializer, dtype=tf.string)
class RandomWalkShapeTest(tf.test.TestCase):
def testRandomWalk(self):
# Fully known shape.
rnd1 = init_ops._random_walk([1, 2], tf.nn.relu)
self.assertEqual([1, 2], rnd1.get_shape())
# TODO(vrv): move to sequence_ops_test?
class RangeTest(tf.test.TestCase):
def _Range(self, start, limit, delta):
with self.test_session():
tf_ans = tf.range(start, limit, delta, name="range")
self.assertEqual([len(range(start, limit, delta))], tf_ans.get_shape())
return tf_ans.eval()
def testBasic(self):
self.assertTrue(np.array_equal(
self._Range(0, 5, 1), np.array([0, 1, 2, 3, 4])))
self.assertTrue(np.array_equal(
self._Range(0, 5, 2), np.array([0, 2, 4])))
self.assertTrue(np.array_equal(
self._Range(0, 6, 2), np.array([0, 2, 4])))
self.assertTrue(np.array_equal(
self._Range(13, 32, 7), np.array([13, 20, 27])))
self.assertTrue(np.array_equal(
self._Range(100, 500, 100), np.array([100, 200, 300, 400])))
self.assertEqual(tf.range(0, 5, 1).dtype, tf.int32)
def testLimitOnly(self):
with self.test_session():
self.assertAllEqual(np.arange(5), tf.range(5).eval())
def testEmpty(self):
for start in 0, 5:
self.assertTrue(np.array_equal(self._Range(start, start, 1), []))
# TODO(vrv): move to sequence_ops_test?
class LinSpaceTest(tf.test.TestCase):
def _LinSpace(self, start, stop, num):
with self.test_session():
tf_ans = tf.linspace(start, stop, num, name="linspace")
self.assertEqual([num], tf_ans.get_shape())
return tf_ans.eval()
def testPositive(self):
self.assertArrayNear(self._LinSpace(1., 5., 1), np.array([1.]), 1e-5)
self.assertArrayNear(self._LinSpace(1., 5., 2), np.array([1., 5.]), 1e-5)
self.assertArrayNear(self._LinSpace(1., 5., 3),
np.array([1., 3., 5.]), 1e-5)
self.assertArrayNear(self._LinSpace(1., 5., 4),
np.array([1., 7. / 3., 11. / 3., 5.]), 1e-5)
def testNegative(self):
self.assertArrayNear(self._LinSpace(-1., -5., 1), np.array([-1.]), 1e-5)
self.assertArrayNear(self._LinSpace(-1., -5., 2),
np.array([-1., -5.]), 1e-5)
self.assertArrayNear(self._LinSpace(-1., -5., 3),
np.array([-1., -3., -5.]), 1e-5)
self.assertArrayNear(self._LinSpace(-1., -5., 4),
np.array([-1., -7. / 3., -11. / 3., -5.]), 1e-5)
def testNegativeToPositive(self):
self.assertArrayNear(self._LinSpace(-1., 5., 1), np.array([-1.]), 1e-5)
self.assertArrayNear(self._LinSpace(-1., 5., 2), np.array([-1., 5.]), 1e-5)
self.assertArrayNear(self._LinSpace(-1., 5., 3),
np.array([-1., 2., 5.]), 1e-5)
self.assertArrayNear(self._LinSpace(-1., 5., 4),
np.array([-1., 1., 3., 5.]), 1e-5)
def testPoint(self):
self.assertArrayNear(self._LinSpace(5., 5., 1), np.array([5.]), 1e-5)
self.assertArrayNear(self._LinSpace(5., 5., 2), np.array([5.] * 2), 1e-5)
self.assertArrayNear(self._LinSpace(5., 5., 3), np.array([5.] * 3), 1e-5)
self.assertArrayNear(self._LinSpace(5., 5., 4), np.array([5.] * 4), 1e-5)
class DeviceTest(tf.test.TestCase):
def testNoDevice(self):
with tf.Graph().as_default():
var = tf.Variable([[1.0, 1.0]])
self.assertDeviceEqual(None, var.device)
self.assertDeviceEqual(None, var.initializer.device)
def testDevice(self):
with tf.Graph().as_default():
with tf.device("/job:ps"):
var = tf.Variable([[1.0, 1.0]])
self.assertDeviceEqual("/job:ps", var.device)
self.assertDeviceEqual("/job:ps", var.initializer.device)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 | -5,162,727,929,526,822,000 | 37.189369 | 80 | 0.654806 | false |
jerpat/csmake | csmake-providers/CsmakeModules/HttpStopService.py | 1 | 2642 | # <copyright>
# (c) Copyright 2017 Hewlett Packard Enterprise Development LP
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# </copyright>
# <copyright>
# (c) Copyright 2017 Hewlett Packard Enterprise Development LP
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# </copyright>
from CsmakeProviders.HttpServiceProvider import HttpServiceProvider
from Csmake.CsmakeAspect import CsmakeAspect
class HttpStopService(CsmakeAspect):
"""Purpose: End execution of a HttpService section
May be used as an aspect on a section that would
need the service ended
Options:
tag - (OPTIONAL) Must match the tag given to HttpService that
this section is ending
Phases/JoinPoints:
build - end execution of HttpService
end__build - end execution of HttpService at the conclusion of the
decorated regular section"""
def _stopService(self, tag):
try:
self._unregisterOtherClassOnExitCallback(
"HttpService",
"_stopService" )
except:
pass
HttpServiceProvider.disposeServiceProvider(tag)
def build(self, options):
tag = ''
if 'tag' in options:
tag = options['tag']
self._stopService()
self.log.passed()
return None
def end__build(self, phase, options, step, stepoptions):
return self.build(options)
| gpl-3.0 | 5,305,169,420,175,332,000 | 38.432836 | 77 | 0.691522 | false |
el33th4x0r/crosstex | crosstex/cmd.py | 1 | 12121 | import argparse
import importlib
import logging
import os
import os.path
import sys
import crosstex
import crosstex.style
logger = logging.getLogger('crosstex')
parser = argparse.ArgumentParser(prog='crosstex',
description='A modern, object-oriented bibliographic tool.')
#parser.add_argument('--quiet',
# help='Do not sanity check the input (XXX: ignored.')
#parser.add_argument('--strict',
# help='Apply stricter checks and check all entries (XXX:ignored.')
#parser.add_argument('--dump', metavar='TYPE',
# help='After parsing the bibliography, dump a list of all '
# 'objects of the type specified, or, with "file", print '
# 'a list of files processed. XXX: ignored')
#parser.add_argument('--no-sort', help='XXX: ignored')
#parser.add_argument('--capitalize', metavar='TYPE', action='append',
# help='Specify any string-like object, i.e. one with name and '
# 'shortname fields. Strings of the specified types will '
# 'appear in ALL CAPS. XXX: ignored')
#parser.add_argument('--no-field', metavar='TYPE', action='append',
# help='Specify a field name, and in any objects where that '
# 'field is optional it will be unassigned no matter what '
# 'appears in the database. For example, to turn off '
# 'page numbers, use "--no-field pages". XXX: ignored')
#parser.add_argument('-l', '--link', metavar='FIELD', action='append',
# help='Add to the list of fields used to generate links. '
# 'LaTeX documents should make use of links by including '
# 'the hyperref package. When converting to HTML, this '
# 'defaults to [Abstract, URL, PS, PDF, HTML, DVI, TEX, '
# 'BIB, FTP, HTTP, and RTF]. XXX: ignored')
#parser.add_argument('--no-link', help='XXX: ignored')
#parser.add_argument('--abstract',
# help='In the bibliography, include paper abstracts if available. XXX: ignored')
#parser.add_argument('--no-abstract')
#parser.add_argument('--keywords',
# help='In the bibliography, include paper keywords if available. XXX: ignored')
#parser.add_argument('--no-keywords')
#parser.add_argument('--popups',
# help='If abstracts or keywords are to appear for an entry'
# 'when generating HTML, instead hide these extra blocks'
# 'and reveal them as popups when the mouse hovers over'
# 'the entry. XXX: ignored')
#parser.add_argument('--no-popups')
#parser.add_argument('--title-head',
# help='In the bibliography, put the title bold and first. XXX:ignored')
#parser.add_argument('--no-title-head')
#parser.add_argument('--blank-labels',
# help='In the bibliography, leave out item labels. XXX:ignored')
#parser.add_argument('--no-blank-labels')
#parser.add_argument('--break-lines',
# help='In the bibliography, put author, title, and '
# 'publication information on separate lines. XXX:ignored')
#parser.add_argument('--no-break-lines')
#parser.add_argument('--last-first',
# help='The first name in each author list will appear "Last, '
# 'First" instead of "First Last" (the latter is the '
# 'default). XXX:ignored')
#parser.add_argument('--no-last-first')
parser.add_argument('--version', version='CrossTeX 0.9.0', action='version')
parser.add_argument('-v', '--verbose', action='store_true')
parser.add_argument('-d', '--dir', metavar='DIR', action='append', dest='dirs',
help='Add a directory in which to find data files, searched '
'from last specified to first.')
parser.add_argument('--cite', metavar='CITE', action='append',
help='Cite a key exactly as with the \cite LaTeX command.')
parser.add_argument('--cite-by', metavar='CITE_BY', default='style',
help='With "number", use numeric labels such as [1]. With '
'"initials", use labels based on author last-names such '
'as [SBRD04b]. With "fullname", use labels based on '
'author names such as [Sirer et al. 2004]. With '
'"style", use the style default.')
parser.add_argument('--style', metavar='STYLE', default='plain',
help='Use a standard style such as plain, unsrt, abbrv, '
'full, or alpha. Options set by the style may be '
'overidden by further command-line options.')
parser.add_argument('--short', metavar='TYPE', action='append',
help='Specify any string-like object, i.e. one with name and '
'shortname fields. Whenever possible,the short name '
'will be used, e.g. two-letter state codes for '
'"state", conference acronyms such as NSDI for '
'"conference", or initials such as E. G. Sirer for '
'"author".')
parser.add_argument('--titlecase', metavar='TITLECASE', default='default',
choices=('default', 'lower', 'upper', 'title'),
help='In the bibliography, force titles into lower-, upper-, '
'or title-case. Default: leave the titles unchanged.')
parser.add_argument('-f', '--format', metavar='FORMAT', dest='fmt', default='bbl',
help='Select a format for the output. Examples include '
'"bbl", "html", "bib", or "xtx". "bib" and "xtx" are '
'always available and not affected by "--style". '
'Other formats are dependent upon the choice of style.')
class SortAction(argparse.Action):
def __call__(self, parser, args, values, option_string=None):
s = getattr(args, self.dest, []) or []
reverse = option_string in ('-S', '--reverse-sort')
s.append((values, reverse))
setattr(args, self.dest, s)
parser.add_argument('-s', '--sort', metavar='FIELD', dest='sort', action=SortAction,
help='Sort by specified field. Multiple sort orders are '
'applied in the order specified, e.g. "-s year -s '
'author" will cause elements to be grouped primarily by '
'author and sub-grouped by year.'
' XXX: this is currently ignored')
parser.add_argument('-S', '--reverse-sort', metavar='FIELD', dest='sort', action=SortAction,
help='Exactly as --sort, but sort by descending field values '
'rather than ascending.'
' XXX: this is currently ignored')
class HeadingAction(argparse.Action):
def __call__(self, parser, args, values, option_string=None):
s = getattr(args, self.dest, None) or None
reverse = option_string in ('--reverse-heading',)
setattr(args, self.dest, (values, reverse))
parser.add_argument('--heading', metavar='FIELD', dest='heading', action=HeadingAction,
help='Divide entries and create headings in bibliography by '
'the value of the given field. XXX: ignored')
parser.add_argument('--reverse-heading', metavar='FIELD', dest='heading', action=HeadingAction,
help='Exactly as --heading, but sort by descending field '
'values rather than ascending. XXX: ignored')
parser.add_argument('-o', '--output', metavar='FILE',
help='Write the bibliography to the specified output file.')
parser.add_argument('--no-pages', action='store_const', const=True, default=False,
help='Skip pages.')
parser.add_argument('--no-address', action='store_const', const=True, default=False,
help='Skip address.')
parser.add_argument('--add-in', action='store_const', const=True, default=False,
help='Add "In" for articles.')
parser.add_argument('--add-proc', dest='add_proc',
action='store_const', const='proc', default=None,
help='Add "Proc. of" to conference and workshop publications.')
parser.add_argument('--add-proceedings', dest='add_proc',
action='store_const', const='proceedings',
help='Add "Proceedings of the" to conference and workshop publications.')
parser.add_argument('files', metavar='FILES', nargs='+',
help='A list of xtx, aux, or bib files to process.')
def main(argv):
try:
args = parser.parse_args()
path = list(args.dirs or []) + \
[os.path.join(os.path.join(os.path.expanduser('~'), '.crosstex'))] + \
['/usr/local/share/crosstex'] + \
['/XXX']
if args.verbose:
logger.setLevel(logging.DEBUG)
logging.getLogger('crosstex.parse').setLevel(logging.DEBUG)
xtx = crosstex.CrossTeX(xtx_path=path)
xtx.set_titlecase(args.titlecase)
if args.no_pages:
xtx.no_pages()
if args.no_address:
xtx.no_address()
if args.add_in:
xtx.add_in()
if args.add_proc == 'proc':
xtx.add_proc()
if args.add_proc == 'proceedings':
xtx.add_proceedings()
for s in args.short or []:
xtx.add_short(s)
xtx.set_style(args.fmt, args.style, args.cite_by)
for f in reversed(args.files):
xtx.parse(f)
# We'll use this check later
is_aux = os.path.splitext(args.files[-1])[1] == '.aux' or \
xtx.aux_citations() and os.path.splitext(args.files[-1])[1] == ''
# Get a list of things to cite
cite = []
warn_uncitable = True
if args.cite:
cite = args.cite
elif is_aux:
cite = xtx.aux_citations()
elif xtx.has_inline_citations():
cite = xtx.inline_citations()
else:
warn_uncitable = False
cite = xtx.all_citations()
objects = [(c, xtx.lookup(c)) for c in cite]
if warn_uncitable:
for c in [c for c, o in objects if not o or not o.citeable]:
logger.warning('Cannot find object for citation %r' % c)
citeable = [(c, o) for c, o in objects if o and o.citeable]
unique = {}
for c, o in citeable:
if o in unique:
unique[o].append(c)
else:
unique[o] = [c]
for o, cs in unique.items():
if len(cs) > 1:
cites = ', '.join(['%r' % c for c in cs])
logger.warning("Citations %s match to the same object; you'll see duplicates" % cites)
citeable = xtx.sort(citeable, args.sort)
if args.heading:
citeable = xtx.heading(citeable, args.heading[0], args.heading[1])
try:
rendered = xtx.render(citeable)
rendered = rendered.encode('utf8')
except crosstex.style.UnsupportedCitation as e:
logger.error('Style does not support citations for %s' % e.citetype)
return 1
if args.output:
with open(args.output, 'w') as fout:
fout.write(rendered.decode('utf-8'))
fout.flush()
elif is_aux and args.fmt == 'bbl':
with open(os.path.splitext(args.files[-1])[0] + '.bbl', 'w') as fout:
fout.write(rendered.decode('utf-8'))
fout.flush()
else:
sys.stdout.write(rendered.decode('utf-8'))
sys.stdout.flush()
return 0
except crosstex.CrossTeXError as e:
logger.error(str(e))
return 1
| gpl-2.0 | -3,937,128,750,447,653,000 | 51.021459 | 102 | 0.557875 | false |
cschenck/blender_sim | fluid_sim_deps/blender-2.69/2.69/scripts/modules/rna_info.py | 1 | 22978 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
# classes for extracting info from blenders internal classes
import bpy
# use to strip python paths
script_paths = bpy.utils.script_paths()
_FAKE_STRUCT_SUBCLASS = True
def _get_direct_attr(rna_type, attr):
props = getattr(rna_type, attr)
base = rna_type.base
if not base:
return [prop for prop in props]
else:
props_base = getattr(base, attr).values()
return [prop for prop in props if prop not in props_base]
def get_direct_properties(rna_type):
return _get_direct_attr(rna_type, "properties")
def get_direct_functions(rna_type):
return _get_direct_attr(rna_type, "functions")
def rna_id_ignore(rna_id):
if rna_id == "rna_type":
return True
if "_OT_" in rna_id:
return True
if "_MT_" in rna_id:
return True
if "_PT_" in rna_id:
return True
if "_HT_" in rna_id:
return True
if "_KSI_" in rna_id:
return True
return False
def range_str(val):
if val < -10000000:
return "-inf"
elif val > 10000000:
return "inf"
elif type(val) == float:
return '%g' % val
else:
return str(val)
def float_as_string(f):
val_str = "%g" % f
if '.' not in val_str and '-' not in val_str: # value could be 1e-05
val_str += '.0'
return val_str
class InfoStructRNA:
__slots__ = (
"bl_rna",
"identifier",
"name",
"description",
"base",
"nested",
"full_path",
"functions",
"children",
"references",
"properties",
)
global_lookup = {}
def __init__(self, rna_type):
self.bl_rna = rna_type
self.identifier = rna_type.identifier
self.name = rna_type.name
self.description = rna_type.description.strip()
# set later
self.base = None
self.nested = None
self.full_path = ""
self.functions = []
self.children = []
self.references = []
self.properties = []
def build(self):
rna_type = self.bl_rna
parent_id = self.identifier
self.properties[:] = [GetInfoPropertyRNA(rna_prop, parent_id) for rna_prop in get_direct_properties(rna_type) if rna_prop.identifier != "rna_type"]
self.functions[:] = [GetInfoFunctionRNA(rna_prop, parent_id) for rna_prop in get_direct_functions(rna_type)]
def get_bases(self):
bases = []
item = self
while item:
item = item.base
if item:
bases.append(item)
return bases
def get_nested_properties(self, ls=None):
if not ls:
ls = self.properties[:]
if self.nested:
self.nested.get_nested_properties(ls)
return ls
def _get_py_visible_attrs(self):
attrs = []
py_class = getattr(bpy.types, self.identifier)
for attr_str in dir(py_class):
if attr_str.startswith("_"):
continue
attrs.append((attr_str, getattr(py_class, attr_str)))
return attrs
def get_py_properties(self):
properties = []
for identifier, attr in self._get_py_visible_attrs():
if type(attr) is property:
properties.append((identifier, attr))
return properties
def get_py_functions(self):
import types
functions = []
for identifier, attr in self._get_py_visible_attrs():
# methods may be python wrappers to C functions
attr_func = getattr(attr, "__func__", attr)
if type(attr_func) in {types.FunctionType, types.MethodType}:
functions.append((identifier, attr))
return functions
def get_py_c_functions(self):
import types
functions = []
for identifier, attr in self._get_py_visible_attrs():
# methods may be python wrappers to C functions
attr_func = getattr(attr, "__func__", attr)
if type(attr_func) in {types.BuiltinMethodType, types.BuiltinFunctionType}:
functions.append((identifier, attr))
return functions
def __str__(self):
txt = ""
txt += self.identifier
if self.base:
txt += "(%s)" % self.base.identifier
txt += ": " + self.description + "\n"
for prop in self.properties:
txt += prop.__repr__() + "\n"
for func in self.functions:
txt += func.__repr__() + "\n"
return txt
class InfoPropertyRNA:
__slots__ = (
"bl_prop",
"srna",
"identifier",
"name",
"description",
"default_str",
"default",
"enum_items",
"min",
"max",
"array_length",
"collection_type",
"type",
"fixed_type",
"is_argument_optional",
"is_enum_flag",
"is_required",
"is_readonly",
"is_never_none",
)
global_lookup = {}
def __init__(self, rna_prop):
self.bl_prop = rna_prop
self.identifier = rna_prop.identifier
self.name = rna_prop.name
self.description = rna_prop.description.strip()
self.default_str = "<UNKNOWN>"
def build(self):
rna_prop = self.bl_prop
self.enum_items = []
self.min = getattr(rna_prop, "hard_min", -1)
self.max = getattr(rna_prop, "hard_max", -1)
self.array_length = getattr(rna_prop, "array_length", 0)
self.collection_type = GetInfoStructRNA(rna_prop.srna)
self.is_required = rna_prop.is_required
self.is_readonly = rna_prop.is_readonly
self.is_never_none = rna_prop.is_never_none
self.is_argument_optional = rna_prop.is_argument_optional
self.type = rna_prop.type.lower()
fixed_type = getattr(rna_prop, "fixed_type", "")
if fixed_type:
self.fixed_type = GetInfoStructRNA(fixed_type) # valid for pointer/collections
else:
self.fixed_type = None
if self.type == "enum":
self.enum_items[:] = [(item.identifier, item.name, item.description) for item in rna_prop.enum_items]
self.is_enum_flag = rna_prop.is_enum_flag
else:
self.is_enum_flag = False
if self.array_length:
self.default = tuple(getattr(rna_prop, "default_array", ()))
elif self.type == "enum" and self.is_enum_flag:
self.default = getattr(rna_prop, "default_flag", set())
else:
self.default = getattr(rna_prop, "default", None)
self.default_str = "" # fallback
if self.type == "pointer":
# pointer has no default, just set as None
self.default = None
self.default_str = "None"
elif self.type == "string":
self.default_str = "\"%s\"" % self.default
elif self.type == "enum":
if self.is_enum_flag:
self.default_str = "%r" % self.default # repr or set()
else:
self.default_str = "'%s'" % self.default
elif self.array_length:
self.default_str = ''
# special case for floats
if len(self.default) > 0:
if self.type == "float":
self.default_str = "(%s)" % ", ".join(float_as_string(f) for f in self.default)
if not self.default_str:
self.default_str = str(self.default)
else:
if self.type == "float":
self.default_str = float_as_string(self.default)
else:
self.default_str = str(self.default)
self.srna = GetInfoStructRNA(rna_prop.srna) # valid for pointer/collections
def get_arg_default(self, force=True):
default = self.default_str
if default and (force or self.is_required is False):
return "%s=%s" % (self.identifier, default)
return self.identifier
def get_type_description(self, as_ret=False, as_arg=False, class_fmt="%s", collection_id="Collection"):
type_str = ""
if self.fixed_type is None:
type_str += self.type
if self.array_length:
type_str += " array of %d items" % (self.array_length)
if self.type in {"float", "int"}:
type_str += " in [%s, %s]" % (range_str(self.min), range_str(self.max))
elif self.type == "enum":
if self.is_enum_flag:
type_str += " set in {%s}" % ", ".join(("'%s'" % s[0]) for s in self.enum_items)
else:
type_str += " in [%s]" % ", ".join(("'%s'" % s[0]) for s in self.enum_items)
if not (as_arg or as_ret):
# write default property, ignore function args for this
if self.type != "pointer":
if self.default_str:
type_str += ", default %s" % self.default_str
else:
if self.type == "collection":
if self.collection_type:
collection_str = (class_fmt % self.collection_type.identifier) + (" %s of " % collection_id)
else:
collection_str = "%s of " % collection_id
else:
collection_str = ""
type_str += collection_str + (class_fmt % self.fixed_type.identifier)
# setup qualifiers for this value.
type_info = []
if as_ret:
pass
elif as_arg:
if not self.is_required:
type_info.append("optional")
if self.is_argument_optional:
type_info.append("optional argument")
else: # readonly is only useful for self's, not args
if self.is_readonly:
type_info.append("readonly")
if self.is_never_none:
type_info.append("never None")
if type_info:
type_str += (", (%s)" % ", ".join(type_info))
return type_str
def __str__(self):
txt = ""
txt += " * " + self.identifier + ": " + self.description
return txt
class InfoFunctionRNA:
__slots__ = (
"bl_func",
"identifier",
"description",
"args",
"return_values",
"is_classmethod",
)
global_lookup = {}
def __init__(self, rna_func):
self.bl_func = rna_func
self.identifier = rna_func.identifier
# self.name = rna_func.name # functions have no name!
self.description = rna_func.description.strip()
self.is_classmethod = not rna_func.use_self
self.args = []
self.return_values = ()
def build(self):
rna_func = self.bl_func
parent_id = rna_func
self.return_values = []
for rna_prop in rna_func.parameters.values():
prop = GetInfoPropertyRNA(rna_prop, parent_id)
if rna_prop.is_output:
self.return_values.append(prop)
else:
self.args.append(prop)
self.return_values = tuple(self.return_values)
def __str__(self):
txt = ''
txt += ' * ' + self.identifier + '('
for arg in self.args:
txt += arg.identifier + ', '
txt += '): ' + self.description
return txt
class InfoOperatorRNA:
__slots__ = (
"bl_op",
"identifier",
"name",
"module_name",
"func_name",
"description",
"args",
)
global_lookup = {}
def __init__(self, rna_op):
self.bl_op = rna_op
self.identifier = rna_op.identifier
mod, name = self.identifier.split("_OT_", 1)
self.module_name = mod.lower()
self.func_name = name
# self.name = rna_func.name # functions have no name!
self.description = rna_op.description.strip()
self.args = []
def build(self):
rna_op = self.bl_op
parent_id = self.identifier
for rna_id, rna_prop in rna_op.properties.items():
if rna_id == "rna_type":
continue
prop = GetInfoPropertyRNA(rna_prop, parent_id)
self.args.append(prop)
def get_location(self):
op_class = getattr(bpy.types, self.identifier)
op_func = getattr(op_class, "execute", None)
if op_func is None:
op_func = getattr(op_class, "invoke", None)
if op_func is None:
op_func = getattr(op_class, "poll", None)
if op_func:
op_code = op_func.__code__
source_path = op_code.co_filename
# clear the prefix
for p in script_paths:
source_path = source_path.split(p)[-1]
if source_path[0] in "/\\":
source_path = source_path[1:]
return source_path, op_code.co_firstlineno
else:
return None, None
def _GetInfoRNA(bl_rna, cls, parent_id=""):
if bl_rna is None:
return None
key = parent_id, bl_rna.identifier
try:
return cls.global_lookup[key]
except KeyError:
instance = cls.global_lookup[key] = cls(bl_rna)
return instance
def GetInfoStructRNA(bl_rna):
return _GetInfoRNA(bl_rna, InfoStructRNA)
def GetInfoPropertyRNA(bl_rna, parent_id):
return _GetInfoRNA(bl_rna, InfoPropertyRNA, parent_id)
def GetInfoFunctionRNA(bl_rna, parent_id):
return _GetInfoRNA(bl_rna, InfoFunctionRNA, parent_id)
def GetInfoOperatorRNA(bl_rna):
return _GetInfoRNA(bl_rna, InfoOperatorRNA)
def BuildRNAInfo():
# Use for faster lookups
# use rna_struct.identifier as the key for each dict
rna_struct_dict = {} # store identifier:rna lookups
rna_full_path_dict = {} # store the result of full_rna_struct_path(rna_struct)
rna_children_dict = {} # store all rna_structs nested from here
rna_references_dict = {} # store a list of rna path strings that reference this type
# rna_functions_dict = {} # store all functions directly in this type (not inherited)
def full_rna_struct_path(rna_struct):
"""
Needed when referencing one struct from another
"""
nested = rna_struct.nested
if nested:
return "%s.%s" % (full_rna_struct_path(nested), rna_struct.identifier)
else:
return rna_struct.identifier
# def write_func(rna_func, ident):
def base_id(rna_struct):
try:
return rna_struct.base.identifier
except:
return "" # invalid id
#structs = [(base_id(rna_struct), rna_struct.identifier, rna_struct) for rna_struct in bpy.doc.structs.values()]
'''
structs = []
for rna_struct in bpy.doc.structs.values():
structs.append( (base_id(rna_struct), rna_struct.identifier, rna_struct) )
'''
structs = []
for rna_type_name in dir(bpy.types):
rna_type = getattr(bpy.types, rna_type_name)
rna_struct = getattr(rna_type, "bl_rna", None)
if rna_struct:
#if not rna_type_name.startswith('__'):
identifier = rna_struct.identifier
if not rna_id_ignore(identifier):
structs.append((base_id(rna_struct), identifier, rna_struct))
# Simple lookup
rna_struct_dict[identifier] = rna_struct
# Store full rna path 'GameObjectSettings' -> 'Object.GameObjectSettings'
rna_full_path_dict[identifier] = full_rna_struct_path(rna_struct)
# Store a list of functions, remove inherited later
# NOT USED YET
## rna_functions_dict[identifier] = get_direct_functions(rna_struct)
# fill in these later
rna_children_dict[identifier] = []
rna_references_dict[identifier] = []
else:
print("Ignoring", rna_type_name)
structs.sort() # not needed but speeds up sort below, setting items without an inheritance first
# Arrange so classes are always defined in the correct order
deps_ok = False
while deps_ok is False:
deps_ok = True
rna_done = set()
for i, (rna_base, identifier, rna_struct) in enumerate(structs):
rna_done.add(identifier)
if rna_base and rna_base not in rna_done:
deps_ok = False
data = structs.pop(i)
ok = False
while i < len(structs):
if structs[i][1] == rna_base:
structs.insert(i + 1, data) # insert after the item we depend on.
ok = True
break
i += 1
if not ok:
print('Dependancy "%s" could not be found for "%s"' % (identifier, rna_base))
break
# Done ordering structs
# precalculate vars to avoid a lot of looping
for (rna_base, identifier, rna_struct) in structs:
# rna_struct_path = full_rna_struct_path(rna_struct)
rna_struct_path = rna_full_path_dict[identifier]
for rna_prop in get_direct_properties(rna_struct):
rna_prop_identifier = rna_prop.identifier
if rna_prop_identifier == 'RNA' or rna_id_ignore(rna_prop_identifier):
continue
for rna_prop_ptr in (getattr(rna_prop, "fixed_type", None), getattr(rna_prop, "srna", None)):
# Does this property point to me?
if rna_prop_ptr:
rna_references_dict[rna_prop_ptr.identifier].append("%s.%s" % (rna_struct_path, rna_prop_identifier))
for rna_func in get_direct_functions(rna_struct):
for rna_prop_identifier, rna_prop in rna_func.parameters.items():
if rna_prop_identifier == 'RNA' or rna_id_ignore(rna_prop_identifier):
continue
rna_prop_ptr = getattr(rna_prop, "fixed_type", None)
# Does this property point to me?
if rna_prop_ptr:
rna_references_dict[rna_prop_ptr.identifier].append("%s.%s" % (rna_struct_path, rna_func.identifier))
# Store nested children
nested = rna_struct.nested
if nested:
rna_children_dict[nested.identifier].append(rna_struct)
# Sort the refs, just reads nicer
for rna_refs in rna_references_dict.values():
rna_refs.sort()
info_structs = []
for (rna_base, identifier, rna_struct) in structs:
#if rna_struct.nested:
# continue
#write_struct(rna_struct, '')
info_struct = GetInfoStructRNA(rna_struct)
if rna_base:
info_struct.base = GetInfoStructRNA(rna_struct_dict[rna_base])
info_struct.nested = GetInfoStructRNA(rna_struct.nested)
info_struct.children[:] = rna_children_dict[identifier]
info_struct.references[:] = rna_references_dict[identifier]
info_struct.full_path = rna_full_path_dict[identifier]
info_structs.append(info_struct)
for rna_info_prop in InfoPropertyRNA.global_lookup.values():
rna_info_prop.build()
for rna_info_prop in InfoFunctionRNA.global_lookup.values():
rna_info_prop.build()
for rna_info in InfoStructRNA.global_lookup.values():
rna_info.build()
for prop in rna_info.properties:
prop.build()
for func in rna_info.functions:
func.build()
for prop in func.args:
prop.build()
for prop in func.return_values:
prop.build()
# there are too many invalid defaults, unless we intend to fix, leave this off
if 0:
for rna_info in InfoStructRNA.global_lookup.values():
for prop in rna_info.properties:
# ERROR CHECK
default = prop.default
if type(default) in {float, int}:
if default < prop.min or default > prop.max:
print("\t %s.%s, %s not in [%s - %s]" % (rna_info.identifier, prop.identifier, default, prop.min, prop.max))
# now for operators
op_mods = dir(bpy.ops)
for op_mod_name in sorted(op_mods):
if op_mod_name.startswith('__'):
continue
op_mod = getattr(bpy.ops, op_mod_name)
operators = dir(op_mod)
for op in sorted(operators):
try:
rna_prop = getattr(op_mod, op).get_rna()
except AttributeError:
rna_prop = None
except TypeError:
rna_prop = None
if rna_prop:
GetInfoOperatorRNA(rna_prop.bl_rna)
for rna_info in InfoOperatorRNA.global_lookup.values():
rna_info.build()
for rna_prop in rna_info.args:
rna_prop.build()
#for rna_info in InfoStructRNA.global_lookup.values():
# print(rna_info)
return InfoStructRNA.global_lookup, InfoFunctionRNA.global_lookup, InfoOperatorRNA.global_lookup, InfoPropertyRNA.global_lookup
if __name__ == "__main__":
import rna_info
struct = rna_info.BuildRNAInfo()[0]
data = []
for struct_id, v in sorted(struct.items()):
struct_id_str = v.identifier #~ "".join(sid for sid in struct_id if struct_id)
for base in v.get_bases():
struct_id_str = base.identifier + "|" + struct_id_str
props = [(prop.identifier, prop) for prop in v.properties]
for prop_id, prop in sorted(props):
# if prop.type == "boolean":
# continue
prop_type = prop.type
if prop.array_length > 0:
prop_type += "[%d]" % prop.array_length
data.append("%s.%s -> %s: %s%s %s" % (struct_id_str, prop.identifier, prop.identifier, prop_type, ", (read-only)" if prop.is_readonly else "", prop.description))
data.sort()
if bpy.app.background:
import sys
sys.stderr.write("\n".join(data))
sys.stderr.write("\n\nEOF\n")
else:
text = bpy.data.texts.new(name="api.py")
text.from_string(data)
| gpl-3.0 | -7,275,365,268,557,199,000 | 31.002786 | 179 | 0.556402 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.