content
stringlengths 5
1.05M
|
---|
from django.db.models import Count, CharField
from django.db.models import Value, IntegerField
from project_manager_web.models import Project
class SearchHelperResult:
def __init__(self, project):
self.project = project
class SearchHelper:
SEARCH_IN_CHOICES = (
('name', 'Name'),
('description', 'Description'),
('progresses', 'Project Progresses'),
('name+description', 'Name and descriptions'),
('name+description+progresses', 'All')
)
def __init__(self, search_for, search_in):
# Fills GET parameters
self.search_for = search_for
self.search_in = search_in.split('+')
@property
def find_results(self):
results = None
# Name search
if 'name' in self.search_in:
if results is None:
results = Project.objects.filter(name__icontains=self.search_for)
else:
results |= Project.objects.filter(name__icontains=self.search_for)
# Description search
if 'description' in self.search_in:
if results is None:
results = Project.objects.filter(description__icontains=self.search_for)
else:
results |= Project.objects.filter(description__icontains=self.search_for)
# Progresses search
if 'progresses' in self.search_in:
if results is None:
results = Project.objects.filter(projectprogress__notification_text__icontains=self.search_for)
else:
results |= Project.objects.filter(projectprogress__notification_text__icontains=self.search_for)
return results.all().values('name', 'description', 'id').distinct()
|
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
import sys
sys.path.insert(0, 'lib')
import json
import urllib2
import httplib
import time
from bs4 import BeautifulSoup
import requests
from datetime import datetime
from email.mime.text import MIMEText as MIME
import traceback
import smtplib
# Credentials (if needed)
username = 'colinmcd94'
password = 'mensetmanus'
"""
def setup_server():
# The actual mail send
server = smtplib.SMTP('smtp.gmail.com:587')
server.starttls()
server.login(username,password)
return server
def send(server,msg):
m = MIME(msg)
m['Subject'] = "Error in kickdata script"
m['From'] = 'Colin McDonnell <[email protected]>'
m['To'] = "[email protected]"
server.sendmail(m["From"], m["To"].split(","), m.as_string())
"""
def minutes_left(proj):
deadline = proj["deadline"]
current = time.time()
minutes_left = (deadline-current)/60
return minutes_left
def soupify(url):
print "SOUPIFYING"
r = requests.get(url)
print "URL is "+url
data = r.text
print "data: "+data[:100]
soup = BeautifulSoup(data)
return soup
def pretty_print(project):
print json.dumps(data['projects'][0],sort_keys=True,indent=4, separators=(',',': '))
def epoch_to_iso8601(timestamp):
date = {"__type": "Date","iso": datetime.fromtimestamp(timestamp).isoformat()+".000Z"}
print date
return date
def save(project):
connection.request('POST', '/1/classes/Project', json.dumps(project), {
"X-Parse-Application-Id": "QlnlX84K0A6TNyjX14aY56EFSMV00eCzdY8SWcuM",
"X-Parse-REST-API-Key": "VxEy0sA4fkJhBanZXoBkKcoYtZ57AiBvY6gkKfeh",
"Content-Type": "application/json"
})
result = json.loads(connection.getresponse().read())
return result
def create(project):
try:
#dictionary comprehension
good_keys = ["backers_count","slug","blurb","country","currency","goal","name","pledged"]
good = { key: project[key] for key in good_keys }
#flattening out nested dictionaries
good["category"] = project["category"]["name"]
good["deadline"] = epoch_to_iso8601(project["deadline"])
good["creation_date"] = epoch_to_iso8601(project["created_at"])
good["launch_date"] = epoch_to_iso8601(project["launched_at"])
good["project_url"] = project["urls"]["web"]["project"]
good["rewards_url"] = project["urls"]["web"]["rewards"]
good["proj_id"] = project["id"]
good["image"] = project["photo"]["1024x768"]
good["user_id"] = project["creator"]["id"]
#initialize scraper
url = good['project_url']
soup = soupify(url)
#scrape campaign data
description = soup.findAll("div", {"class": "full-description"})[0]
good["campaign_text"] = description.text
video_player = soup.findAll("div", {"class": "video-player"})
if video_player:
video = video_player[0]
good["campaign_video"] = video["data-video-url"]
desc_imgs = description.findAll("img")
if desc_imgs:
good["campaign_images"] = [div["src"] for div in desc_imgs]
desc_iframes = description.findAll("iframe")
if desc_iframes:
good["campaign_secondary_videos"] = [div["src"] for div in desc_iframes]
return good
except:
tb = traceback.format_exc()
print tb
#server = setup_server()
#send(server,tb)
#server.close()
return None
page = 1
url = "https://www.kickstarter.com/projects/1567780277/lusids-spaceship-fund?ref=ending_soon"
soup = soupify(url)
a = soup.findAll(attrs={"class":"full-description js-full-description responsive-media formatted-lists"})
|
# -*- coding: latin-1 -*-
import unicodeparser
import array
errorReport = False
depends = False
def allowErrors():
errorReport = True
if not depends:
import romerror
def ToCode(text,sErrors=False,compressed=False):
if sErrors:
errorReport = True
if not depends:
import romerror
data = []
while len(text)!=0:
#
i=0
if text[0:1]==u"\\":
#print "is escape %c" %text[1:2]
if text[1:2]=='x':
#print "read as raw hex"
data.append(int(text[2:6], 16))
text = text[6:len(text)]
elif text[1:2] == 'v':
#print "VarPrint"
data.append(0xfffe)
data.append(int(text[2:6], 16))
text = text[6:len(text)]
elif text[1:2] == 'z':
var = []
w = 0
while len(text)!=0:
if text[1:2] == 'z':
w += 1
var.append(int(text[2:6], 16))
text = text[6:len(text)]#\z0000
else:
break
data.append(w)
data.extend(var)
elif text[1:2]=='n':
data.append(0xe000)
text = text[2:len(text)]
elif text[1:2]=='r':
data.append(0x25BC)
text = text[2:len(text)]
elif text[1:2]=='f':
data.append(0x25BD)
text = text[2:len(text)]
else:
if errorReport:
romerror.unknownEscape(text[1:2])
print "unknown escape: %s" % text[1:2]
text = text[2:len(text)]
else:
while not(unicodeparser.d.has_key(text[0:6-i]) | (i==6)):
i=i+1
if i==6:
if errorReport:
romerror.charNotFound(text[0:1])
print "Char not found %s(%i)" % (text[0:1],ord(text[0:1]))
text = text[6-i+1:len(text)]
else:
data.append(unicodeparser.d[text[0:6-i]])
text = text[6-i:len(text)]
if compressed:
data.append(0x1FF)
bits=[]
for i in range(0,len(data)):
for j in range(0,9):
bits.append((data[i]>>j)&1)
tmp_uint16=0
data=[]
data.append(0xF100)
for i in range(0,len(bits)):
if i%15==0 and i!=0:
data.append(tmp_uint16)
tmp_uint16=0
tmp_uint16|=(bits[i]<<(i%15))
data.append(tmp_uint16)
data.append(0xffff)
a = array.array('H', data)
return data
def Makefile(textarr,sError=False,compressed=False):
base = len(textarr)*8 + 4
ptrtable = []
rawdata = []
for i in range(len(textarr)):
data = ToCode(textarr[i],sError,compressed)
l=len(data)
ptrtable.extend([base, l])
rawdata.extend(data)
base += l*2
hdr = [len(textarr), 0]
#ptrtable.append(array.array('H', rawdata))
return array.array('H',hdr).tostring() + array.array('I', ptrtable).tostring() +array.array('H', rawdata).tostring()
#xml = pokexml.XMLReader("MyXML.xml")
#xml.ParseptextXML()
#p=Makefile(xml.strings[0][1])
#f = open("out.bin", "wb")
#f.write(p)
#f.close()
#a = ToCode(u"\\v0101\\x0001\\x0000 used\\xE000Pound!")
#f.write(a.tostring())
#f.close()
|
from greenws import __version__
project = "greenws"
copyright = "2021, Auri"
author = "Auri"
release = __version__
html_theme = "alabaster"
extensions = [
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
]
autodoc_member_order = "bysource"
intersphinx_mapping = {
"python": ("https://docs.python.org/3/", None),
"gevent": ("http://www.gevent.org/", None),
}
exclude_patterns = ["_build"]
|
from sklearn.model_selection import train_test_split
from sklearn.metrics import classification_report
from sklearn.pipeline import make_pipeline
from sklearn.feature_extraction.text import CountVectorizer, TfidfTransformer
from sklearn.svm import SVC
from sklearn.ensemble import RandomForestClassifier, AdaBoostClassifier
from tqdm import tqdm
from pprint import pprint
from nlu_flow.utils import meta_db_client
from nlu_flow.preprocessor.text_preprocessor import normalize
import os, sys
import dill
def train_domain_classifier():
# load dataset
scenario_table_list = ["intent-rules", "nlu-intent-entity-utterances"]
faq_table_list = ["nlu-faq-questions"]
out_of_domain_table_list = ["nlu-chitchat-utterances", "nlu-slang-utterances"]
utterances = []
labels = []
total_scenario_utter_num = 0
total_faq_utter_num = 0
total_OOD_utter_num = 0
## scenario domain
for scenario_table in scenario_table_list:
scenario_data = meta_db_client.get(scenario_table)
for data in tqdm(
scenario_data, desc=f"collecting table data : {scenario_table}"
):
if type(data) != dict:
print(f"check data type : {data}")
continue
total_scenario_utter_num += 1
if "data_type" in data.keys():
if data["data_type"] == "training":
if "faq" in data["intent_id"]["Intent_ID"].lower():
utterances.append(normalize(data["utterance"]))
labels.append("faq")
elif data["intent_id"]["Intent_ID"] == "intent_OOD":
utterances.append(normalize(data["utterance"]))
labels.append("out_of_domain")
elif data["intent_id"]["Intent_ID"] not in ["intent_미지원"]:
utterances.append(normalize(data["utterance"]))
labels.append("scenario")
else:
utterances.append(normalize(data["utterance"]))
labels.append("scenario")
## get synonym data for data augmentation(for FAQ domain data augmentation)
synonyms = []
synonym_data = meta_db_client.get("meta-entities")
for data in tqdm(
synonym_data, desc=f"collecting synonym data for data augmentation ..."
):
if type(data) != dict:
print(f"check data type : {data}")
continue
synonyms.append([
normalize(each_synonym.get("synonym"))
for each_synonym in data.get("meta_synonyms")
] + [normalize(data.get("Entity_Value"))])
## FAQ domain
for faq_table in faq_table_list:
faq_data = meta_db_client.get(faq_table)
for data in tqdm(faq_data, desc=f"collecting table data : {faq_table}"):
target_utterance = normalize(data["question"])
if total_faq_utter_num > max(total_scenario_utter_num, total_OOD_utter_num):
break
# check synonym is included
for synonym_list in synonyms:
for i, prev_value in enumerate(synonym_list):
if prev_value in target_utterance:
for j, post_value in enumerate(synonym_list):
if i == j:
continue
utterances.append(
target_utterance.replace(prev_value, post_value)
)
labels.append("faq")
total_faq_utter_num += 1
break
utterances.append(target_utterance)
labels.append("faq")
total_faq_utter_num += 1
## out of domain
for ood_table in out_of_domain_table_list:
ood_data = meta_db_client.get(ood_table)
for data in tqdm(ood_data, desc=f"collecting table data : {ood_table}"):
utterances.append(normalize(data["utterance"]))
labels.append("out_of_domain")
total_OOD_utter_num += 1
### add some additional out of domain data for avoing class imbalance
slang_training_data = meta_db_client.get("nlu-slang-trainings", max_num=total_scenario_utter_num)
for i, data in tqdm(
enumerate(slang_training_data),
desc=f"collecting table data : nlu-slang-trainings ...",
):
if i > total_scenario_utter_num:
break
if type(data) != dict:
print (f'check data type: {data}')
continue
utterances.append(normalize(data["utterance"]))
labels.append("out_of_domain")
total_OOD_utter_num += 1
X_train, X_test, y_train, y_test = train_test_split(
utterances, labels, random_state=88
)
print ('utterance data distribution')
print (f'scenario : {total_scenario_utter_num}')
print (f'FAQ : {total_faq_utter_num}')
print (f'out of domain : {total_OOD_utter_num}')
svc = make_pipeline(CountVectorizer(analyzer="char_wb"), SVC(probability=True))
print("domain classifier training(with SVC)")
svc.fit(X_train, y_train)
print("model training done, validation reporting")
y_pred = svc.predict(X_test)
print(classification_report(y_test, y_pred))
reportDict = {}
for k, v in classification_report(y_test, y_pred, output_dict=True).items():
if 'avg' in k:
reportDict[k] = v
with open("report.md", "w") as reportFile:
print("domain classification result\n", file=reportFile)
print(reportDict, file=reportFile)
# save domain classifier model
with open("domain_classifier_model.svc", "wb") as f:
dill.dump(svc, f)
print("domain_classifier model saved : domain_classifier_model.svc")
train_domain_classifier()
|
"""
@author: Gabriele Girelli
@contact: [email protected]
"""
from fastx_barber import qual
from fastx_barber.const import DEFAULT_PHRED_OFFSET
DEFAULT_FILTER_QUAL_FLAGS = ["flag,30,.2", "test,15,.1"]
def test_QualityIO():
qio = qual.QualityIO()
assert 32 == qio.phred_to_qscore("A")[0]
assert 14 == qio.phred_to_qscore("/")[0]
assert 60 == qio.phred_to_qscore("]")[0]
assert 36 == qio.phred_to_qscore("E")[0]
assert [32, 14, 60, 36] == qio.phred_to_qscore("A/]E")
qio = qual.QualityIO(64)
assert 32 - 31 == qio.phred_to_qscore("A")[0]
assert 60 - 31 == qio.phred_to_qscore("]")[0]
assert 36 - 31 == qio.phred_to_qscore("E")[0]
assert [1, 29, 5] == qio.phred_to_qscore("A]E")
def test_QualityFilter():
qf = qual.QualityFilter(30, 0.2)
assert qf.qual_pass_filter("AAAA")
assert not qf.qual_pass_filter("/AAA")
assert qf.qual_pass_filter("AE]]]/")
assert 3 == qf.parsed
assert 2 == qf.passed
def validate_filters(filters):
assert "qflag" in filters
assert isinstance(filters["qflag"], qual.QualityFilter)
assert 30 == filters["qflag"].min_qscore
assert 0.2 == filters["qflag"].max_perc
assert "qtest" in filters
assert isinstance(filters["qtest"], qual.QualityFilter)
assert 15 == filters["qtest"].min_qscore
assert 0.1 == filters["qtest"].max_perc
def test_QualityFilter_init_flag_filters():
filters = qual.QualityFilter.init_flag_filters(
DEFAULT_FILTER_QUAL_FLAGS, DEFAULT_PHRED_OFFSET
)
validate_filters(filters)
def test_dummy_apply_filter_flag():
assert qual.dummy_apply_filter_flag({}, {})
def test_apply_filter_flag():
filters = qual.QualityFilter.init_flag_filters(
DEFAULT_FILTER_QUAL_FLAGS, DEFAULT_PHRED_OFFSET
)
assert qual.apply_filter_flag(
{"qflag": ("AAAA", -1, -1), "qtest": ("AAAAAAAAA", -1, -1)}, filters
)
assert not qual.apply_filter_flag(
{"qflag": ("/AAA", -1, -1), "qtest": ("AAAAAAAAA", -1, -1)}, filters
)
assert not qual.apply_filter_flag(
{"qflag": ("AAAA", -1, -1), "qtest": ("/AAAAAAAA", -1, -1)}, filters
)
assert not qual.apply_filter_flag(
{"qflag": ("/AAA", -1, -1), "qtest": ("/AAAAAAAA", -1, -1)}, filters
)
assert qual.apply_filter_flag(
{"qflag": ("/EEAAAA", -1, -1), "qtest": ("/EEAAAAAAAA", -1, -1)}, filters
)
def test_setup_qual_filters():
qff, ff = qual.setup_qual_filters(None, DEFAULT_PHRED_OFFSET)
assert 0 == len(qff)
assert qual.dummy_apply_filter_flag == ff
qff, ff = qual.setup_qual_filters(DEFAULT_FILTER_QUAL_FLAGS, DEFAULT_PHRED_OFFSET)
assert 2 == len(qff)
assert qual.apply_filter_flag == ff
validate_filters(qff)
|
import message
print(message.health_room_msg)
print(message.developer_question_msg)
print(message.bus_to_sin_error_msg)
print(message.first_room_msg)
print(message.wifi_msg)
print(message.student_food_info_msg)
print('■ 신창역 지하철 출발 시간 ■\n\n• 이번 지하철은 ' )
|
from flask import Blueprint, jsonify
from app.models import Bar, Review, Image, favorites, User, Reservation, db
from flask import request
import itertools
import math
import json
from datetime import datetime
from datetime import timedelta
search_routes = Blueprint('search', __name__)
# body: JSON.stringify({
# barId: idComp,
# date,
# time,
# userId,
# partySize: 2,
# }
@search_routes.route('/reservation', methods=['POST'])
def reservation():
data = request.get_json()
if data is None:
data = json.loads(request.data.decode('utf-8'))
if (data["userId"]):
date_str = data['date'] + ' ' + data['time']
format_str = '%m/%d/%Y %I:%M %p' # The format
print(date_str)
datetime_obj = datetime.strptime(date_str, format_str)
reservation = Reservation(partySize=int(data["partySize"]), userId=int(
data["userId"]), barId=int(data["barId"]),
time=datetime_obj, date=datetime_obj)
db.session.add(reservation)
db.session.commit()
return {"message": 'received'}
return {'message': 'bad data'}
@search_routes.route('/')
def search():
coord = request.args.get('coord')
business = request.args.get('business')
searchId = request.args.get('id')
day = request.args.get('day')
time = request.args.get('time')
date = request.args.get('date')
def parse_results(res):
d = res.to_dict()
# d['images'] = [img.to_dict() for img in res.images]
time_range = d['dayAndTime'][day]
time_range = time_range.split('-')
open_time = time_range[0]
close_time = time_range[1]
reservation_strings = []
final_time = time
final_day = day
date_str = date # The date - 29 Dec 2017
format_str = '%m/%d/%Y' # The format
datetime_obj = datetime.strptime(date_str, format_str)
if time >= open_time or time <= close_time:
pass
else:
days = ['monday', 'tuesday', 'wednesday',
'thursday', 'friday', 'saturday', 'sunday']
day_index = days.index(day)
day_index = day_index + 1
datetime_obj = datetime_obj + timedelta(days=1)
if day_index > 6:
day_index = 0
final_day = days[day_index]
final_time = d['dayAndTime'][final_day]
final_time = final_time.split('-')
final_time = final_time[0]
time_range = d['dayAndTime'][final_day]
time_range = time_range.split('-')
open_time = time_range[0]
close_time = float(time_range[1])
for i in range(0, 5):
base_time = float(final_time) + .5*i
datetime_copy = datetime_obj
if ((close_time > 6 and base_time <= close_time) or
(close_time < 6 and base_time <= close_time+24)):
suffix = ' PM'
if base_time >= 12:
base_time = base_time - 12
if base_time >= 12:
datetime_copy = datetime_copy + timedelta(days=1)
base_time = base_time - 12
suffix = ' AM'
else:
suffix = ' AM'
str_time = ''
if base_time == 0 or base_time == .5:
base_time = base_time + 12
if base_time != math.floor(base_time):
str_time = str(math.floor(base_time))+':30'
else:
str_time = str(math.floor(base_time))+':00'
reservation_strings.append(
[f'{datetime_copy.month}/{datetime_copy.day}/{datetime_copy.year}', str_time + suffix])
# print('\n', reservation_strings,'\n')
d['time_slots'] = reservation_strings
d['reviews'] = [rev.to_dict() for rev in res.reviews]
d['review_total'] = len(d['reviews'])
d['ratings'] = {
"overall": sum(d['overall'] for d in d['reviews']) / d['review_total'],
"food": sum(d['food'] for d in d['reviews']) / d['review_total'],
"service": sum(d['service'] for d in d['reviews']) / d['review_total'],
"ambience": sum(d['ambience'] for d in d['reviews']) / d['review_total'],
"value": sum(d['value'] for d in d['reviews']) / d['review_total'],
}
del d['reviews']
return d
if business == '' and coord == 'NoLocation':
return {"searchResults": []}
if searchId and searchId != 'undefined' and searchId != 'null':
searchId = int(searchId)
lng = 0
lat = 0
lngRange = 10000
latRange = 10000
if(coord != 'NoLocation'):
coord = coord.split(',')
lng = float(coord[0])
lat = float(coord[1])
latRange = .33
lngRange = .33
search_str = f'%{business}%'
search_results = Bar.query.join(Review).join(Image).filter(
Bar.name.ilike(search_str),
Bar.longitude.between(lng-lngRange, lng+lngRange),
Bar.latitude.between(lat-latRange, lat+latRange),
).all()
search_results = list(map(parse_results, search_results))
if(coord == 'NoLocation'):
if(len(search_results) > 0):
lng = search_results[0]['longitude']
lat = search_results[0]['latitude']
# print('\n', 'Search Results \n', search_results,
# '\n length \n', len(search_results), '\n')
# return {'test': 1.264}
return jsonify({"searchResults": search_results, "searchCenter": {"longitude": lng, "latitude": lat}})
@search_routes.route('/popular')
def popular():
def parse_results(res):
d = res.to_dict()
d['reviews'] = [rev.to_dict() for rev in res.reviews]
d['review_total'] = len(d['reviews'])
d['ratings'] = {
"overall": sum(d['overall'] for d in d['reviews']) / d['review_total'],
"food": sum(d['food'] for d in d['reviews']) / d['review_total'],
"service": sum(d['service'] for d in d['reviews']) / d['review_total'],
"ambience": sum(d['ambience'] for d in d['reviews']) / d['review_total'],
"value": sum(d['value'] for d in d['reviews']) / d['review_total'],
}
del d['reviews']
return d
search_results = Bar.query.join(Review).join(Image).group_by(
Bar.id).order_by(db.func.count(Review.id).desc()).limit(4).all()
winery = Bar.query.join(Review).join(
Image).filter(Bar.name.ilike("%wine%")).all()
winery = winery[0:4]
brewery = Bar.query.join(Review).join(
Image).filter(Bar.name.ilike("%brew%")).all()
brewery = brewery[0:4]
search_results = list(map(parse_results, search_results))
winery_results = list(map(parse_results, winery))
brewery_results = list(map(parse_results, brewery))
return jsonify({"mostPopular": search_results, "winery": winery_results, "brewery": brewery_results})
|
import autoarray.plot as aplt
import pytest
from os import path
directory = path.dirname(path.realpath(__file__))
@pytest.fixture(name="plot_path")
def make_plot_path_setup():
return path.join(
"{}".format(path.dirname(path.realpath(__file__))),
"files",
"plots",
"fit_interferometer",
)
def test__fit_quantities_are_output(fit_interferometer_7, plot_path, plot_patch):
fit_interferometer_plotter = aplt.FitInterferometerPlotter(
fit=fit_interferometer_7,
mat_plot_1d=aplt.MatPlot1D(output=aplt.Output(path=plot_path, format="png")),
mat_plot_2d=aplt.MatPlot2D(output=aplt.Output(path=plot_path, format="png")),
)
fit_interferometer_plotter.figures_2d(
visibilities=True,
noise_map=True,
signal_to_noise_map=True,
model_visibilities=True,
residual_map_real=True,
residual_map_imag=True,
normalized_residual_map_real=True,
normalized_residual_map_imag=True,
chi_squared_map_real=True,
chi_squared_map_imag=True,
dirty_image=True,
dirty_noise_map=True,
dirty_signal_to_noise_map=True,
dirty_model_image=True,
dirty_residual_map=True,
dirty_normalized_residual_map=True,
dirty_chi_squared_map=True,
)
assert path.join(plot_path, "visibilities.png") in plot_patch.paths
assert path.join(plot_path, "noise_map.png") in plot_patch.paths
assert path.join(plot_path, "signal_to_noise_map.png") in plot_patch.paths
assert path.join(plot_path, "model_visibilities.png") in plot_patch.paths
assert (
path.join(plot_path, "real_residual_map_vs_uv_distances.png")
in plot_patch.paths
)
assert (
path.join(plot_path, "real_residual_map_vs_uv_distances.png")
in plot_patch.paths
)
assert (
path.join(plot_path, "real_normalized_residual_map_vs_uv_distances.png")
in plot_patch.paths
)
assert (
path.join(plot_path, "imag_normalized_residual_map_vs_uv_distances.png")
in plot_patch.paths
)
assert (
path.join(plot_path, "imag_chi_squared_map_vs_uv_distances.png")
in plot_patch.paths
)
assert (
path.join(plot_path, "imag_chi_squared_map_vs_uv_distances.png")
in plot_patch.paths
)
assert path.join(plot_path, "dirty_image_2d.png") in plot_patch.paths
assert path.join(plot_path, "dirty_noise_map_2d.png") in plot_patch.paths
assert path.join(plot_path, "dirty_signal_to_noise_map_2d.png") in plot_patch.paths
assert path.join(plot_path, "dirty_model_image_2d.png") in plot_patch.paths
assert path.join(plot_path, "dirty_residual_map_2d.png") in plot_patch.paths
assert (
path.join(plot_path, "dirty_normalized_residual_map_2d.png") in plot_patch.paths
)
assert path.join(plot_path, "dirty_chi_squared_map_2d.png") in plot_patch.paths
plot_patch.paths = []
fit_interferometer_plotter.figures_2d(
visibilities=True,
noise_map=False,
signal_to_noise_map=False,
model_visibilities=True,
chi_squared_map_real=True,
chi_squared_map_imag=True,
)
assert path.join(plot_path, "visibilities.png") in plot_patch.paths
assert path.join(plot_path, "noise_map.png") not in plot_patch.paths
assert path.join(plot_path, "signal_to_noise_map.png") not in plot_patch.paths
assert path.join(plot_path, "model_visibilities.png") in plot_patch.paths
assert (
path.join(plot_path, "real_residual_map_vs_uv_distances.png")
not in plot_patch.paths
)
assert (
path.join(plot_path, "imag_residual_map_vs_uv_distances.png")
not in plot_patch.paths
)
assert (
path.join(plot_path, "real_normalized_residual_map_vs_uv_distances.png")
not in plot_patch.paths
)
assert (
path.join(plot_path, "imag_normalized_residual_map_vs_uv_distances.png")
not in plot_patch.paths
)
assert (
path.join(plot_path, "real_chi_squared_map_vs_uv_distances.png")
in plot_patch.paths
)
assert (
path.join(plot_path, "imag_chi_squared_map_vs_uv_distances.png")
in plot_patch.paths
)
def test__fit_sub_plots(fit_interferometer_7, plot_path, plot_patch):
fit_interferometer_plotter = aplt.FitInterferometerPlotter(
fit=fit_interferometer_7,
mat_plot_1d=aplt.MatPlot1D(output=aplt.Output(path=plot_path, format="png")),
mat_plot_2d=aplt.MatPlot2D(output=aplt.Output(path=plot_path, format="png")),
)
fit_interferometer_plotter.subplot_fit_interferometer()
assert path.join(plot_path, "subplot_fit_interferometer.png") in plot_patch.paths
fit_interferometer_plotter.subplot_fit_dirty_images()
assert path.join(plot_path, "subplot_fit_dirty_images.png") in plot_patch.paths
|
"""
This class is the model for the Minesweeper program.
"""
import os
import random
import math
from controller import Controller
from utility import Option
class Model:
"""
Stores data and logic for Minesweeper.
"""
# Game save information.
SAVE_PATH = "./saves/"
SAVE_FILE = SAVE_PATH + "minefield.save"
def __init__(self, screen):
"""
Constructs an instance of Model and returns it.
Args:
screen (_CursesWindow): A standard screen object from the
curses library.
"""
# The controller class for user interaction.
self.controller = Controller(self, screen)
# Controls the game loop.
self.running = True
# The current view.
self.view = None
# Minefield Options.
self.options = {
"easy": Option(10, 10, 10),
"medium": Option(30, 20, 15),
"hard": Option(60, 30, 20),
"custom": Option(10, 10, 10),
"custom_minimums": Option(2, 2, 1),
"custom_maximums": Option(1024, 1024, 99)
}
# An Option to generate a minefield with.
self.difficulty = None
# A user feedback value for minefield generation (percentage.)
self.gen_progress = 0
# The current state of the minefield.
self.minefield = None
# The currently hovered cell's x-position.
self.hover_x = 0
# The currently hovered cell's y-position.
self.hover_y = 0
# The total number of mines in the minefield.
self.num_mines = 0
# The number of flagged cells in the minefield.
self.num_flagged = 0
class Cell:
"""
Stores information about an individual cell in the minefield.
"""
def __init__(self, opened, mine, flagged, number):
"""
Constructs an instance of Cell and returns it.
Args:
opened (bool): Whether or not this Cell has been opened.
mine (bool): Whether or not this Cell is a mine.
flagged (bool): Whether or not this Cell has been
flagged.
number (int): The supposed number of mines around this
Cell.
"""
self.opened = opened
self.mine = mine
self.flagged = flagged
self.number = number
def is_opened(self):
"""
Checks if this cell has been opened.
Returns:
bool: True if opened, False otherwise.
"""
return self.opened
def open(self):
"""
Opens this cell.
"""
self.opened = True
def is_mine(self):
"""
Checks if this cell is a mine.
Returns:
bool: True if mine, False otherwise.
"""
return self.mine
def set_mine(self, mine):
"""
Sets whether or not this cell is a mine.
Args:
mine (bool): True to make this cell a mine, False
otherwise.
"""
self.mine = mine
def is_flagged(self):
"""
Checks if this cell is flagged.
Returns:
bool: True if flagged, False otherwise.
"""
return self.flagged
def set_flagged(self, flagged):
"""
Sets whether or not this cell is flagged.
Args:
flagged (bool): True to flag this cell, False otherwise.
"""
self.flagged = flagged
def get_number(self):
"""
Gets the number of surrounding mines.
Returns:
int: The number of surrounding mines.
"""
return self.number
def set_number(self, number):
"""
Sets the supposed number of mines around this cell.
Args:
number (int): The supposed number of mines around this
cell.
"""
self.number = number
def save_minefield(self):
"""
Saves the minefield.
Minefield save format:
HEADER:
10 bits: LENGTH
10 bits: HEIGHT
10 bits: HOVERX
10 bits: HOVERY
DATA:
3n bits: CELL
LENGTH: One less than the length of the minefield.
HEIGHT: One less than the height of the minefield.
HOVERX: One less than the x-position of the hovered cell.
HOVERY: One less than the y-position of the hovered cell.
DATA: Sets of flags representing a cell. Each cell,
C_n(O, M, F), where C_n is the nth cell (starting at n=0),
is reconstructed into minefield position x=i%(LENGTH+1),
y=i//(LENGTH+1). O, the "opened" flag, is only True if the
cell has been opened. M, the "mine" flag, is only True if
the cell is a mine. F, the "flagged" flag, is only True if
the cell has been flagged. Any extra cells C_n where
n>=#cells should be ignored.
"""
# Parameters
length = self.difficulty.l
height = self.difficulty.h
hover_x = self.hover_x
hover_y = self.hover_y
# Decide open mode.
open_mode = "ab"
if self.has_saved_game():
open_mode = "wb"
# Make header (length and height)
piece_a = ((length-1)&0x3FF)<<30
piece_b = ((height-1)&0x3FF)<<20
piece_c = ((hover_x-1)&0x3FF)<<10
piece_d = (hover_y-1)&0x3FF
combined = piece_a|piece_b|piece_c|piece_d
bin_header = combined.to_bytes(5, "big")
with open(self.SAVE_FILE, open_mode) as save:
# Write 5 byte header.
save.write(bin_header)
# Write each minefield cell.
# Use a 3-byte buffer to save 8 cells at a time.
mt_buffer = 0xFFFFFF
buffer = mt_buffer
current = 0
max_index = length*height
while current < max_index:
for buffer_index in range(8):
if not current < max_index:
break
# Organize cell information.
cell = self.minefield[current//length][current%length]
opened = cell.is_opened()
mine = cell.is_mine()
flagged = cell.is_flagged()
cell_flags = opened<<2|mine<<1|flagged
# Put cell into buffer.
buffer &= cell_flags<<(3*(7-buffer_index))
current += 1
# Write the buffer to file and reset.
save.write(buffer.to_bytes(3, "big"))
buffer = mt_buffer
def load_minefield(self):
"""
Loads the minefield.
"""
if not self.has_saved_game():
return
with open(self.SAVE_FILE, "rb") as save:
# Extract the header.
header = int.from_bytes(save.read(5), "big")
hover_y = (header&0x3FF)+1
hover_x = ((header>>10)&0x3FF)+1
height = ((header>>20)&0x3FF)+1
length = ((header>>30)&0x3FF)+1
self.hover_x = hover_x
self.hover_y = hover_y
# Create an empty minefield.
self.mk_mt_minefield(length, height)
# Extract cells.
current = 0
max_index = length*height
while current < max_index:
# Read the first 3 bytes into buffer.
buffer = int.from_bytes(save.read(3), "big")
for buffer_index in range(8):
if not current < max_index:
break
# Extract cell.
cell_flags = (buffer>>(3*(7-buffer_index)))&0x7
flagged = cell_flags&0x1
mine = (cell_flags>>1)&0x1
opened = (cell_flags>>2)&0x1
cell = self.Cell(opened, mine, flagged, 0)
# Write cell into minefield.
self.minefield[current//length][current%length] = cell
current += 1
# Re-count numbers.
self.num_mines = 0
self.num_flagged = 0
for y_pos in self.minefield:
for x_pos in self.minefield[y_pos]:
cell = self.minefield[y_pos][x_pos]
if cell.is_opened():
continue
if cell.is_flagged():
self.num_flagged += 1
if cell.is_mine():
# Increment mines.
self.num_mines += 1
# Increment the numbers around the mine.
self.increment_numbers(x_pos, y_pos, 0, 0, length,
height)
# xbound = (max(0, x_pos - 1), min(length, x_pos + 1))
# ybound = (max(0, y_pos - 1), min(height, y_pos + 1))
# for x_near in range(*xbound):
# for y_near in range(*ybound):
# near_cell = self.minefield[y_near][x_near]
# near_cell.set_number(near_cell.get_number() + 1)
# Induce the difficulty.
density = (self.num_mines*100)//length*height
self.difficulty = Option(length, height, density)
def set_hover_x(self, pos):
"""
Sets the hover_x value of the camera.
Args:
pos (int): The hover_x position.
"""
self.hover_x = pos
def set_hover_y(self, pos):
"""
Sets the hover_y value of the camera.
Args:
pos (int): The hover_y position.
"""
self.hover_y = pos
def mk_mt_minefield(self, length, height):
"""
Overrides the minefield in the model with an empty minefield of
size length * height.
Args:
length (int): the length of the minefield.
height (int): the height of the minefield.
"""
mk_mt_cell = lambda: self.Cell(False, False, False, 0)
self.minefield = [
[mk_mt_cell() for _ in range(length)] for _ in range(height)
]
def generate_minefield(self):
"""
Generates a minefield based on the options in self.difficulty.
"""
# Parameters.
length = self.difficulty.l
height = self.difficulty.h
mines = self.calculate_mines(self.difficulty)
# Set up an empty minefield.
self.mk_mt_minefield(length, height)
self.num_mines = mines
self.num_flagged = 0
# Done when mines_left == 0.
mines_left = mines
while mines_left > 0:
# Pick a random cell.
x_pos = random.randint(0, length - 1)
y_pos = random.randint(0, height - 1)
# Try to make it a mine.
selected_cell = self.minefield[y_pos][x_pos]
if not selected_cell.is_mine():
selected_cell.set_mine(True)
# Increment the numbers around the mine.
self.increment_numbers(x_pos, y_pos, 0, 0, length,
height)
# xbound = (max(0, xselected - 1), min(length, xselected + 1))
# ybound = (max(0, yselected - 1), min(height, yselected + 1))
# for xpos in range(*xbound):
# for ypos in range(*ybound):
# cell = self.minefield[ypos][xpos]
# cell.set_number(cell.get_number() + 1)
# Update gen_progress.
mines_left -= 1
self.gen_progress = round((1 - (mines_left / mines)) * 100)
def increment_numbers(self, x_pos, y_pos, x_min, y_min, x_max, y_max):
"""
Increments the numbers around a Cell (including the center
Cell.)
Args:
x_pos (int): The x-position of the center Cell.
y_pos (int): The y-position of the center Cell.
x_min (int): The minimum x-position a Cell could have.
y_min (int): The minimum y-position a Cell could have.
x_max (int): One more than the maximum x-position a Cell
could have.
y_max (int): One more than the maximym y-position a Cell
could have.
"""
xbound = (max(x_min, x_pos - 1), min(x_max, x_pos + 1))
ybound = (max(y_min, y_pos - 1), min(y_max, y_pos + 1))
for x_near in range(*xbound):
for y_near in range(*ybound):
cell = self.minefield[y_near][x_near]
cell.set_number(cell.get_number() + 1)
def reset_gen_progress(self):
"""
Resets the gen_progress.
"""
self.gen_progress = 0
def get_gen_progress(self):
"""
Gets a user feedback value for minefield generation.
Returns:
int: A percentage.
"""
return self.gen_progress
@staticmethod
def calculate_mines(option):
"""
Calculates the number of mines in a minefield.
Args:
option (Option): An Option containing length, height, and
mine density.
Returns:
int: The number of mines generated.
"""
min_mines = 1
max_mines = option.l * option.h - 1
raw_mines = math.floor(option.l * option.h * option.d / 100)
return min(max_mines, max(min_mines, raw_mines))
def set_difficulty(self, option):
"""
Sets the difficulty of the minefield to generate.
Args:
option (Option): The Option containing length, height, and
density information.
"""
self.difficulty = option
def set_custom_field_options(self, option):
"""
Sets the length, height, and density values for generating a
custom minefield.
Args:
option (Option): An Option containing length, height, and
mine density.
"""
self.options["custom"] = option
def has_saved_game(self):
"""
Checks if there's a save file.
Returns:
bool: True if a save file exists, False otherwise.
"""
return os.path.exists(self.SAVE_FILE)
def delete_saved_game(self):
"""
Deletes the save file.
"""
if self.has_saved_game():
os.remove(self.SAVE_FILE)
def change_view(self, view):
"""
Sets the next view to be served to the user.
Args:
view (View): The next view.
"""
self.view = view
def start(self):
"""
Starts the game loop at the main menu view.
"""
self.controller.start()
self.loop()
def stop_game_loop(self):
"""
Stops the game loop.
"""
self.running = False
def loop(self):
"""
The main game loop. The view may change at any time.
"""
while self.running:
self.view.loop()
|
#!coding:utf-8
import os
path_can_have = ['/app/', '/fixtures/', '/src/', '/web/']
path_cant_have = ['/vendor/', '/JMS/', '/cache/']
extension_message_php = """
// This file is part of the ProEthos Software.
//
// Copyright 2013, PAHO. All rights reserved. You can redistribute it and/or modify
// ProEthos under the terms of the ProEthos License as published by PAHO, which
// restricts commercial use of the Software.
//
// ProEthos is distributed in the hope that it will be useful, but WITHOUT ANY
// WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
// PARTICULAR PURPOSE. See the ProEthos License for more details.
//
// You should have received a copy of the ProEthos License along with the ProEthos
// Software. If not, see
// https://github.com/bireme/proethos2/blob/master/doc/license.md
"""
extension_message_css = """
/*
* This file is part of the ProEthos Software.
*
* Copyright 2013, PAHO. All rights reserved. You can redistribute it and/or modify
* ProEthos under the terms of the ProEthos License as published by PAHO, which
* restricts commercial use of the Software.
*
* ProEthos is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A
* PARTICULAR PURPOSE. See the ProEthos License for more details.
*
* You should have received a copy of the ProEthos License along with the ProEthos
* Software. If not, see
* https://github.com/bireme/proethos2/blob/master/doc/license.md
*/
"""
for root, dirs, files in os.walk("."):
for file in files:
file = os.path.join(root, file)
continue_with_file = False
for item in path_can_have:
if item in file:
continue_with_file = True
for item in path_cant_have:
if item in file:
continue_with_file = False
if continue_with_file:
with open(file) as handle:
data = handle.read()
if not 'This file is part of the ProEthos Software' in data:
if file.endswith(".php"):
if data.count("<?php") == 1:
data = data.replace("<?php", "<?php\n\n%s\n" % extension_message_php.strip())
with open(file, 'w') as output:
output.write(data)
else:
print "ERROR: %s" % file
if file.endswith(".css"):
data = "%s\n\n%s" % (extension_message_css, data)
with open(file, 'w') as output:
output.write(data)
if file.endswith(".js"):
data = "%s\n\n%s" % (extension_message_php, data)
with open(file, 'w') as output:
output.write(data)
|
from __future__ import division
from __future__ import print_function
from pathlib import Path
import sys
project_path = Path(__file__).resolve().parents[1]
sys.path.append(str(project_path))
import tensorflow as tf
import os
import scipy.sparse as sp
import numpy as np
from core.GeometricAffinityPropagation import AffinityPropagation
from sklearn.metrics import euclidean_distances
import mygene
from collections import defaultdict
import csv
# Settings
flags = tf.compat.v1.flags
FLAGS = flags.FLAGS
flags.DEFINE_string('dataset', 'brc_microarray_usa', 'Dataset string.')
flags.DEFINE_string('clustering_method', 'geometric_ap', 'Name of output folder for the clustering method.')
flags.DEFINE_integer('distance_threshold', 3, 'Network neighborhood threshold.')
flags.DEFINE_string('clusters_output', 'clusters.txt', 'Name of desired output file of obtained clusters.')
flags.DEFINE_string('clusters_symbols', 'clusters.symbols.txt', 'Name of output file of ensembl names of cluster genes.')
flags.DEFINE_integer('hidden_layers', 1, 'Number of hidden layers.')
flags.DEFINE_integer('embedding_size', 8, 'Dimension of latent space.')
flags.DEFINE_float('p_val', 0.05, 'P-value for t-test.')
#Check data availability
if not os.path.isfile("{}/data/output/{}/clustering/{}/{}L_dim_hidden_{}_p_val_{}_radius_{}/{}".format(project_path, FLAGS.dataset, FLAGS.clustering_method, FLAGS.hidden_layers, FLAGS.embedding_size, FLAGS.p_val, FLAGS.distance_threshold, FLAGS.clusters_output)):
sys.exit("{} file is not available under /data/output/{}/clustering/{}/{}L_dim_hidden_{}_p_val_{}_radius_{}/".format(FLAGS.clusters_output, FLAGS.dataset, FLAGS.clustering_method, FLAGS.hidden_layers, FLAGS.embedding_size, FLAGS.p_val, FLAGS.distance_threshold))
print("----------------------------------------")
print("----------------------------------------")
print("Clustering configuration:")
print("Dataset: {}".format(FLAGS.dataset))
print("Clustering Method: {}".format(FLAGS.clustering_method))
print("Clustering radius: {}".format(FLAGS.distance_threshold))
print("GCN hidden layers: {}".format(FLAGS.hidden_layers))
print("GCN embedding size: {}".format(FLAGS.embedding_size))
print("GCN p_val: {}".format(FLAGS.p_val))
print("----------------------------------------")
print("----------------------------------------")
def prepare_clusters_ensembl_symbols(clusters_file="clusters.txt"):
Ids = np.genfromtxt("{}/data/output/network/ppi.ids.txt".format(project_path), dtype=np.dtype(str), delimiter="\t")
protein_ids = {}
ids_protein = {}
for i in range(Ids.shape[0]):
protein_ids[Ids[i,0]]=Ids[i,1]
ids_protein[Ids[i,1]]=Ids[i,0]
mg = mygene.MyGeneInfo()
map_ensp_symbol={}
print("Request gene symbols by gene query web service...")
annotations = mg.querymany(protein_ids.keys(), scopes='ensembl.protein', fields='symbol', species='human')
#For each query map ENSPs to the gene symbol
for response in annotations:
ensp = response['query']
if('symbol' in response):
map_ensp_symbol[ensp] = response['symbol']
else:
map_ensp_symbol[ensp] = ensp
Clusters = open("{}/data/output/{}/clustering/{}/{}L_dim_hidden_{}_p_val_{}_radius_{}/{}".format(project_path, FLAGS.dataset, FLAGS.clustering_method, FLAGS.hidden_layers, FLAGS.embedding_size, FLAGS.p_val, FLAGS.distance_threshold, clusters_file), encoding="utf-8")
with open("{}/data/output/{}/clustering/{}/{}L_dim_hidden_{}_p_val_{}_radius_{}/{}".format(project_path, FLAGS.dataset, FLAGS.clustering_method, FLAGS.hidden_layers, FLAGS.embedding_size, FLAGS.p_val, FLAGS.distance_threshold, FLAGS.clusters_symbols), "w", newline='', encoding="utf-8") as f:
w_cluster = csv.writer(f, delimiter ='\t')
for line in Clusters:
line = line.strip()
columns = line.split("\t")
cl = []
for i in range(1, len(columns)): #Skip first column that contains the exemplar
if(ids_protein[columns[i]] in map_ensp_symbol.keys()):
cl.append(map_ensp_symbol[ids_protein[columns[i]]])
w_cluster.writerow(cl)
Clusters.close()
print("Preparing gene symbols for saved clusters ...")
prepare_clusters_ensembl_symbols(clusters_file=FLAGS.clusters_output)
print("Clusters with gene symbols saved")
|
from django.db import models
from django.db.models.deletion import CASCADE
from accounts.models import User
class Leave(models.Model):
title = models.CharField(max_length=255, default='', blank=False)
user = models.ForeignKey(User, on_delete=CASCADE, blank=False, related_name='leaves')
creationDate = models.DateTimeField(auto_now_add=True)
approver = models.ForeignKey(User, on_delete=CASCADE, blank=False, related_name='leave_approvals')
approved = models.BooleanField(default=False, blank=True)
approveDate = models.DateTimeField(default=None, blank=True, null=True)
startDate = models.DateTimeField(default=None, blank=False)
endDate = models.DateTimeField(default=None, blank=False)
dayCount = models.PositiveIntegerField(default=0, blank=False)
comment = models.TextField(default='', blank=False)
def as_json(self):
return dict(
id=self.pk,
title=self.title,
user=self.user.pk,
user_first_name=self.user.first_name,
user_last_name=self.user.last_name,
creation_date=str(self.creationDate),
approver=self.approver.pk,
approved=self.approved,
approve_date=str(self.approveDate),
start_date=str(self.startDate),
end_date=str(self.endDate),
day_count=self.dayCount,
comment=self.comment,)
def __str__(self):
return self.title
|
# like greedy_algos/fractional_knapsack_problem.py, but no item breakage allowed
# naive brute-force recursive approach
# modified power set, again
class Item: # this seems unnecessary
def __init__(self, value, weight):
self.value = value
self.weight = weight
def zero_one_knapsack(items, capacity, current_index=0):
# if capacity <= 0 or current_index < 0 or current_index >= len(items): # why would current_index be < 0
if capacity <= 0 or current_index >= len(items):
return 0
elif items[current_index].weight <= capacity:
# pack the item
value_1 = items[current_index].value + zero_one_knapsack(
items, capacity - items[current_index].weight, current_index + 1)
# don't pack the item
value_2 = zero_one_knapsack(items, capacity, current_index + 1)
return max(value_1, value_2)
else:
return 0
items = [Item(31, 3), Item(26, 1), Item(17, 2), Item(72, 5)]
capacity = 7
print(zero_one_knapsack(items, capacity))
|
import socket
s = socket.socket()
server_host = "127.0.0.1"
server_port = 50000
s.connect((server_host, server_port))
print("Connection has been established")
fp = open("receiveddata.txt", "wb")
file_data = s.recv(2048)
fp.write(file_data)
fp.close()
print("File successfully received")
s.close()
|
from typing import Dict
from pytube import YouTube
from settings import Config
class YoutubeDownloader:
def __init__(self, url: str) -> None:
self.url = url
def download(self) -> Dict[str, str]:
try:
yt = YouTube(self.url)
yt_name = yt.title
output_path = Config.DOWNLOADS_PATH.value
print(output_path)
yt_stream = (
yt.streams.filter(progressive=True, file_extension="mp4")
.order_by("resolution")
.desc()
.first()
)
yt_stream.download(output_path=output_path)
metadata = {
"name": yt_name,
"url": self.url,
"path": output_path,
}
except Exception as e:
metadata = {
"error": e,
}
return metadata
|
"""The kafka plugin of the Minos Framework."""
__author__ = "Minos Framework Devs"
__email__ = "[email protected]"
__version__ = "0.7.0"
from .common import (
KafkaBrokerBuilderMixin,
KafkaCircuitBreakerMixin,
)
from .publisher import (
KafkaBrokerPublisher,
KafkaBrokerPublisherBuilder,
)
from .subscriber import (
KafkaBrokerSubscriber,
KafkaBrokerSubscriberBuilder,
)
|
# Author: Frank Cwitkowitz <[email protected]>
# My imports
from .lvqt_real import LVQT as _LVQT
from .utils import *
# Regular imports
import torch
class LVQT(_LVQT):
"""
Implements an extension of the real-only LVQT variant. In this version, the imaginary
weights of the transform are inferred from the real weights by using the Hilbert transform.
"""
def __init__(self, **kwargs):
"""
Initialize LVQT parameters and the PyTorch processing modules.
Parameters
----------
See _LVQT class...
"""
super(LVQT, self).__init__(**kwargs)
# Initialize l2 pooling to recombine real/imag filter channels
self.l2_pool = torch.nn.LPPool1d(norm_type=2, kernel_size=2, stride=2)
def forward(self, audio):
"""
Perform the main processing steps for the filterbank.
Parameters
----------
audio : Tensor (B x 1 x T)
Audio for a batch of tracks,
B - batch size
T - number of samples (a.k.a. sequence length)
Returns
----------
feats : Tensor (B x F x T)
Features calculated for a batch of tracks,
B - batch size
F - dimensionality of features (number of bins)
T - number of time steps (frames)
"""
# Run the standard convolution steps (call grandparent function)
real_feats = super(type(self).__bases__[0], self).forward(audio)
# We manually do the padding for the convolutional
# layer to allow for different front/back padding
padded_audio = torch.nn.functional.pad(audio, self.pd1)
# Obtain the imaginary weights
imag_weights = self.get_imag_weights().unsqueeze(1)
# Convolve the audio with the filterbank of imaginary weights
imag_feats = torch.nn.functional.conv1d(padded_audio,
weight=imag_weights,
stride=self.sd1,
padding=0)
# Add an extra dimension to both sets of features
real_feats = real_feats.unsqueeze(-1)
imag_feats = imag_feats.unsqueeze(-1)
# Concatenate the features along a new dimension
feats = torch.cat((real_feats, imag_feats), dim=-1)
# Switch filter and frame dimension
feats = feats.transpose(1, 2)
# Collapse the last dimension to zip the features
# and make the real/imag responses adjacent
feats = feats.reshape(tuple(list(feats.shape[:-2]) + [-1]))
# Perform l2 pooling across the filter dimension
feats = self.l2_pool(feats)
# Switch the frame and filter dimension back
feats = feats.transpose(1, 2)
# Perform post-processing steps
feats = self.post_proc(feats)
return feats
def get_imag_weights(self):
"""
Obtain the weights of the imaginary part of the transform using Hilbert transform.
Returns
----------
imag_weights : Tensor (F x T)
Weights of the imaginary part of the transform,
F - number of frequency bins
T - number of time steps (samples)
"""
real_weights = self.get_real_weights()
if self.update:
# Calculate HT as normal
imag_weights = torch_hilbert(real_weights)
else:
# Don't allow gradients to propagate through HT
with torch.no_grad():
imag_weights = torch_hilbert(real_weights)
return imag_weights
|
from random import shuffle
continue = 'S'
while continue.upper()
words = input("insira a frase: ").split(" ")
for word in words: shuffle(words)
print(" ".join(words))
continue = input('Digite S para continuar') |
import arcade
import random
ALL_QUADRANTS = [(-1, 1), (1, 1), (1, -1), (-1, -1)]
def count_pressed(tests):
result = {}
for test in tests:
result[(test.x, test.y)]
class TestTemplate:
def __init__(self, x, y):
self.x = x
self.y = y
self.count_total = 0
self.count_hit = 0
self.count_missed = 0
def hit(self):
self.count_hit += 1
def missed(self):
self.count_missed += 1
def is_quadrant(self, quadrant):
return self.x * quadrant[0] > 0 and self.y * quadrant[1] > 0
class Test:
def __init__(self, template):
self.template = template
self.pressed = None
self.template.count_total += 1
def press(self):
self.pressed = True
def done(self):
if self.pressed is None:
self.pressed = False
if self.pressed:
self.template.hit()
else:
self.template.missed()
class Level:
def __init__(self, rows, cols, quadrant_frequency):
self.rows = rows
self.cols = cols
self.tests = []
self.test_templates = []
for quadrant, frequency in zip(ALL_QUADRANTS, quadrant_frequency):
for x in range(1, cols + 1):
for y in range(1, rows + 1):
template = TestTemplate(x * quadrant[0], y * quadrant[1])
self.test_templates.append(template)
self.tests += [Test(template) for x in range(frequency)]
random.shuffle(self.tests)
self.current_test_index = 0
def current_test(self):
return self.tests[self.current_test_index]
def cell_to_pixel(self, x, y, size):
i, j = x + self.cols, y + self.rows + 1
w, h = size
return i / (self.cols * 2 + 1) * w, j / (self.rows * 2 + 1) * h
def draw_test_template(self, test, size):
self.draw_square(test.x, test.y, arcade.color.WHITE, size)
def draw_current_test(self, size):
self.draw_test_template(self.current_test().template, size)
def draw_center(self, size):
self.draw_square(0, 0, arcade.color.WHITE, size)
def draw_square(self, x, y, color, size):
left, top = self.cell_to_pixel(x, y, size)
right, bottom = self.cell_to_pixel(x + 1, y - 1, size)
arcade.draw_lrtb_rectangle_filled(left, right, top, bottom, color)
def draw_score(self, size):
for t in self.test_templates:
r = 255 * t.count_missed / t.count_total
g = 255 * t.count_hit / t.count_total
self.draw_square(t.x, t.y, (r, g, 50), size)
for score, q in zip(self.sum_scores(), ALL_QUADRANTS):
x, y = self.cell_to_pixel(
(self.cols + 1)/ 2 * q[0] + 0.5, (self.rows + 1) / 2 * q[1] - 0.5, size)
played_count = score[1] + score[2]
if played_count == 0:
hit_ratio = 0
else:
hit_ratio = 100 * score[1] / played_count
arcade.draw_text("Hit {:.2f}%".format(hit_ratio), x, y, arcade.color.WHITE,
32, align="center", anchor_x="center", anchor_y="center")
played_ratio = 100 * played_count / score[0]
arcade.draw_text("Completed {:.2f}%".format(played_ratio), x, y - 50, arcade.color.WHITE,
32, align="center", anchor_x="center", anchor_y="center")
def start_next_test(self):
if not(self.is_done()):
self.current_test_index += 1
def on_press(self):
self.current_test().press()
def end_test(self):
self.current_test().done()
def is_done(self):
return self.current_test_index >= len(self.tests) - 1
def sum_scores(self):
return [self.sum_scores_per_quadrant(q) for q in ALL_QUADRANTS]
def get_templates_for_quadrant(self, quadrant):
return [t for t in self.test_templates if t.is_quadrant(quadrant)]
def sum_scores_per_quadrant(self, quadrant):
count_total = 0
count_hit = 0
count_missed = 0
for t in self.get_templates_for_quadrant(quadrant):
count_total += t.count_total
count_hit += t.count_hit
count_missed += t.count_missed
return (count_total, count_hit, count_missed)
|
##############################################################################
# Parte do livro Introdução à Programação com Python
# Autor: Nilo Ney Coutinho Menezes
# Editora Novatec (c) 2010-2020
# Primeira edição - Novembro/2010 - ISBN 978-85-7522-250-8
# Segunda edição - Junho/2014 - ISBN 978-85-7522-408-3
# Terceira Edição - Janeiro/2019 - ISBN 978-85-7522-718-3
#
# Site: https://python.nilo.pro.br/
#
# Arquivo: exercicios3\capitulo 05\exercicio-05-23.py
##############################################################################
n = int(input("Digite um número:"))
if n < 0:
print("Número inválido. Digite apenas valores positivos")
if n == 0 or n == 1:
print(f"{n} é um caso especial.")
else:
if n == 2:
print("2 é primo")
elif n % 2 == 0:
print(f"{n} não é primo, pois 2 é o único número par primo.")
else:
x = 3
while(x < n):
if n % x == 0:
break
x = x + 2
if x == n:
print(f"{n} é primo")
else:
print(f"{n} não é primo, pois é divisível por {x}")
|
#
# Root of the pints module.
# Provides access to all shared functionality (optimisation, mcmc, etc.).
#
# This file is part of PINTS.
# Copyright (c) 2017-2019, University of Oxford.
# For licensing information, see the LICENSE file distributed with the PINTS
# software package.
#
"""
Pints: Probabilistic Inference on Noisy Time Series.
This module provides several optimisation and sampling methods that can be
applied to find the parameters of a model (typically a time series model) that
are most likely, given an experimental data set.
"""
from __future__ import absolute_import, division
from __future__ import print_function, unicode_literals
import sys
#
# Version info: Remember to keep this in sync with setup.py!
#
__version_int__ = 0, 2, 1
__version__ = '.'.join([str(x) for x in __version_int__])
if sys.version_info[0] < 3:
del(x) # Before Python3, list comprehension iterators leaked
#
# Expose pints version
#
def version(formatted=False):
"""
Returns the version number, as a 3-part integer (major, minor, revision).
If ``formatted=True``, it returns a string formatted version (for example
"Pints 1.0.0").
"""
if formatted:
return 'Pints ' + __version__
else:
return __version_int__
#
# Constants
#
# Float format: a float can be converted to a 17 digit decimal and back without
# loss of information
FLOAT_FORMAT = '{: .17e}'
#
# Core classes
#
from ._core import ForwardModel, ForwardModelS1
from ._core import TunableMethod
from ._core import SingleOutputProblem, MultiOutputProblem
#
# Utility classes and methods
#
from ._util import strfloat, vector, matrix2d
from ._util import Timer
from ._logger import Logger, Loggable
#
# Logs of probability density functions (not necessarily normalised)
#
from ._log_pdfs import (
LogPDF,
LogPrior,
LogPosterior,
ProblemLogLikelihood,
SumOfIndependentLogPDFs,
)
#
# Log-priors
#
from ._log_priors import (
BetaLogPrior,
CauchyLogPrior,
ComposedLogPrior,
ExponentialLogPrior,
GammaLogPrior,
GaussianLogPrior,
HalfCauchyLogPrior,
InverseGammaLogPrior,
LogNormalLogPrior,
MultivariateGaussianLogPrior,
NormalLogPrior,
StudentTLogPrior,
UniformLogPrior,
)
#
# Log-likelihoods
#
from ._log_likelihoods import (
AR1LogLikelihood,
ARMA11LogLikelihood,
CauchyLogLikelihood,
GaussianIntegratedUniformLogLikelihood,
GaussianKnownSigmaLogLikelihood,
GaussianLogLikelihood,
KnownNoiseLogLikelihood,
ScaledLogLikelihood,
StudentTLogLikelihood,
UnknownNoiseLogLikelihood,
)
#
# Boundaries
#
from ._boundaries import (
Boundaries,
LogPDFBoundaries,
RectangularBoundaries,
)
#
# Error measures
#
from ._error_measures import (
ErrorMeasure,
ProblemErrorMeasure,
ProbabilityBasedError,
SumOfErrors,
MeanSquaredError,
RootMeanSquaredError,
SumOfSquaresError,
)
#
# Parallel function evaluation
#
from ._evaluation import (
evaluate,
Evaluator,
ParallelEvaluator,
SequentialEvaluator,
)
#
# Optimisation
#
from ._optimisers import (
curve_fit,
fmin,
Optimisation,
OptimisationController,
optimise,
Optimiser,
PopulationBasedOptimiser,
TriangleWaveTransform,
)
from ._optimisers._cmaes import CMAES
from ._optimisers._pso import PSO
from ._optimisers._snes import SNES
from ._optimisers._xnes import XNES
#
# Diagnostics
#
from ._diagnostics import (
effective_sample_size,
rhat,
rhat_all_params,
)
#
# MCMC
#
from ._mcmc import (
mcmc_sample,
MCMCController,
MCMCSampler,
MCMCSampling,
MultiChainMCMC,
SingleChainMCMC,
)
from ._mcmc._adaptive_covariance import AdaptiveCovarianceMCMC
from ._mcmc._differential_evolution import DifferentialEvolutionMCMC
from ._mcmc._dream import DreamMCMC
from ._mcmc._emcee_hammer import EmceeHammerMCMC
from ._mcmc._hamiltonian import HamiltonianMCMC
from ._mcmc._mala import MALAMCMC
from ._mcmc._population import PopulationMCMC
from ._mcmc._metropolis import MetropolisRandomWalkMCMC
#
# Nested samplers
#
from ._nested import NestedSampler
from ._nested._rejection import NestedRejectionSampler
from ._nested._ellipsoid import NestedEllipsoidSampler
#
# Noise generators (always import!)
#
from . import noise
#
# Remove any imported modules, so we don't expose them as part of pints
#
del(sys)
|
# %%
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
import numpy as np
# %%
train = pd.read_csv('../data/input/train.csv')
test = pd.read_csv('../data/input/test.csv')
# %%
print(train.isnull().sum())
print(train.shape)
print(train.info())
# %%
print(test.isnull().sum())
print(test.shape)
print(test.info())
# %%
columns = test.columns
print(columns)
for col in columns:
print(col)
print(len(train[col].unique()))
print('_'*30)
# %%
train['title'].apply(lambda x: 'official' in x.lower()).sum()
# %%
train['title'].apply(lambda x: '公式' in x.lower()).sum()
|
from tools.database.connection import Connection, Cursor
import pymapd
class MapdConnection(Connection):
def __init__(self, host, name, user, password):
self.connection = pymapd.connect(user=user, password=password, host=host, dbname=name)
def _make_cursor(self):
return MapdCursor(self.connection.cursor())
class MapdCursor(Cursor):
def __init__(self, cursor: pymapd.Cursor):
self.c = cursor
def fetch(self, count: int=1):
if count == 1:
return self.c.fetchone()
elif count > 1:
return self.c.fetchmany(count)
elif count == -1:
return self.c.fetchall()
def execute(self, sql: str) -> None:
self.c.execute(sql)
def close(self):
self.c.close()
|
#!/usr/bin/python3
# Copyright 2021. FastyBird s.r.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-lines
"""
Devices module repositories
"""
# Library dependencies
import json
import uuid
from abc import abstractmethod, ABC
from typing import List, Dict
from kink import inject
import modules_metadata.exceptions as metadata_exceptions
from exchange_plugin.dispatcher import EventDispatcher
from exchange_plugin.events.event import IEvent
from modules_metadata.loader import load_schema
from modules_metadata.routing import RoutingKey
from modules_metadata.validator import validate
from modules_metadata.types import ModuleOrigin, DataType
from pony.orm import core as orm
# Library libs
from devices_module.events import ModelEntityCreatedEvent, ModelEntityUpdatedEvent, ModelEntityDeletedEvent
from devices_module.exceptions import HandleExchangeDataException
from devices_module.items import (
ConnectorItem,
FbBusConnectorItem,
FbMqttV1ConnectorItem,
DeviceItem,
ChannelItem,
PropertyItem,
DevicePropertyItem,
ChannelPropertyItem,
ControlItem,
ConnectorControlItem,
DeviceControlItem,
ChannelControlItem,
ConfigurationItem,
DeviceConfigurationItem,
ChannelConfigurationItem,
)
from devices_module.models import (
DeviceEntity,
ChannelEntity,
DevicePropertyEntity,
ChannelPropertyEntity,
ConnectorEntity,
FbBusConnectorEntity,
FbMqttConnectorEntity,
ConnectorControlEntity,
DeviceControlEntity,
ChannelControlEntity,
DeviceConfigurationEntity,
ChannelConfigurationEntity,
)
@inject
class DevicesRepository:
"""
Devices repository
@package FastyBird:DevicesModule!
@module repositories
@author Adam Kadlec <[email protected]>
"""
__items: Dict[str, DeviceItem] or None = None
__iterator_index = 0
__event_dispatcher: EventDispatcher
# -----------------------------------------------------------------------------
def __init__(
self,
event_dispatcher: EventDispatcher,
) -> None:
self.__event_dispatcher = event_dispatcher
self.__event_dispatcher.add_listener(
event_id=ModelEntityCreatedEvent.EVENT_NAME,
listener=self.__entity_created,
)
self.__event_dispatcher.add_listener(
event_id=ModelEntityUpdatedEvent.EVENT_NAME,
listener=self.__entity_updated,
)
self.__event_dispatcher.add_listener(
event_id=ModelEntityDeletedEvent.EVENT_NAME,
listener=self.__entity_deleted,
)
# -----------------------------------------------------------------------------
def get_by_id(self, device_id: uuid.UUID) -> DeviceItem or None:
"""Find device in cache by provided identifier"""
if self.__items is None:
self.initialize()
if device_id.__str__() in self.__items:
return self.__items[device_id.__str__()]
return None
# -----------------------------------------------------------------------------
def get_by_key(self, device_key: str) -> DeviceItem or None:
"""Find device in cache by provided key"""
if self.__items is None:
self.initialize()
for record in self.__items.values():
if record.key == device_key:
return record
return None
# -----------------------------------------------------------------------------
def get_by_identifier(self, device_identifier: str) -> DeviceItem or None:
"""Find device in cache by provided identifier"""
if self.__items is None:
self.initialize()
for record in self.__items.values():
if record.identifier == device_identifier:
return record
return None
# -----------------------------------------------------------------------------
def get_all_by_parent(self, device_id: uuid.UUID) -> List[DeviceItem]:
"""Find all devices in cache for parent device identifier"""
if self.__items is None:
self.initialize()
items: List[DeviceItem] = []
for record in self.__items.values():
if record.parent is not None and record.parent.__eq__(device_id):
items.append(record)
return items
# -----------------------------------------------------------------------------
def clear(self) -> None:
"""Clear items cache"""
self.__items = None
# -----------------------------------------------------------------------------
@orm.db_session
def create_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received device message from exchange when entity was created"""
if routing_key != RoutingKey.DEVICES_ENTITY_CREATED:
return False
if self.__items is None:
self.initialize()
return True
data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
entity: DeviceEntity or None = DeviceEntity.get(
device_id=uuid.UUID(data.get("id"), version=4),
)
if entity is not None:
self.__items[entity.device_id.__str__()] = self.__create_item(entity)
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def update_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received device message from exchange when entity was updated"""
if routing_key != RoutingKey.DEVICES_ENTITY_UPDATED:
return False
if self.__items is None:
self.initialize()
return True
validated_data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
if validated_data.get("id") not in self.__items:
entity: DeviceEntity or None = DeviceEntity.get(
device_id=uuid.UUID(validated_data.get("id"), version=4)
)
if entity is not None:
self.__items[entity.device_id.__str__()] = self.__create_item(entity)
return True
return False
item = self.__update_item(
self.get_by_id(uuid.UUID(validated_data.get("id"), version=4)),
validated_data,
)
if item is not None:
self.__items[validated_data.get("id")] = item
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def delete_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received device message from exchange when entity was updated"""
if routing_key != RoutingKey.DEVICES_ENTITY_DELETED:
return False
if data.get("id") in self.__items:
del self.__items[data.get("id")]
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def initialize(self) -> None:
"""Initialize devices properties repository by fetching entities from database"""
items: Dict[str, DeviceItem] = {}
for entity in DeviceEntity.select():
if self.__items is None or entity.device_id.__str__() not in self.__items:
item = self.__create_item(entity)
else:
item = self.__update_item(self.get_by_id(entity.device_id), entity.to_dict())
if item is not None:
items[entity.device_id.__str__()] = item
self.__items = items
# -----------------------------------------------------------------------------
@staticmethod
def __create_item(entity: DeviceEntity) -> DeviceItem or None:
return DeviceItem(
device_id=entity.device_id,
device_identifier=entity.identifier,
device_key=entity.key,
device_name=entity.name,
device_comment=entity.comment,
device_enabled=entity.enabled,
hardware_manufacturer=entity.hardware_manufacturer,
hardware_model=entity.hardware_model,
hardware_version=entity.hardware_version,
hardware_mac_address=entity.hardware_mac_address,
firmware_manufacturer=entity.firmware_manufacturer,
firmware_version=entity.firmware_version,
connector_id=entity.connector.connector_id if entity.connector is not None else None,
connector_data=entity.connector.params \
if entity.connector is not None and entity.connector.params is not None else {},
parent_device=entity.parent.device_id if entity.parent is not None else None,
)
# -----------------------------------------------------------------------------
@staticmethod
def __update_item(item: DeviceItem, data: Dict) -> DeviceItem or None:
return DeviceItem(
device_id=item.device_id,
device_identifier=item.identifier,
device_key=item.key,
device_name=data.get("name", item.name),
device_comment=data.get("comment", item.comment),
device_enabled=data.get("enabled", item.enabled),
hardware_manufacturer=data.get("hardware_manufacturer", item.hardware_manufacturer),
hardware_model=data.get("hardware_model", item.hardware_model),
hardware_version=data.get("hardware_version", item.hardware_version),
hardware_mac_address=data.get("hardware_mac_address", item.hardware_mac_address),
firmware_manufacturer=data.get("firmware_manufacturer", item.firmware_manufacturer),
firmware_version=data.get("firmware_version", item.firmware_version),
connector_id=item.connector_id,
connector_data=item.connector_data,
parent_device=item.parent,
)
# -----------------------------------------------------------------------------
def __entity_created(self, event: IEvent) -> None:
if not isinstance(event, ModelEntityCreatedEvent) or not isinstance(event.entity, DeviceEntity):
return
self.initialize()
# -----------------------------------------------------------------------------
def __entity_updated(self, event: IEvent) -> None:
if not isinstance(event, ModelEntityUpdatedEvent) or not isinstance(event.entity, DeviceEntity):
return
self.initialize()
# -----------------------------------------------------------------------------
def __entity_deleted(self, event: IEvent) -> None:
if not isinstance(event, ModelEntityDeletedEvent) or not isinstance(event.entity, DeviceEntity):
return
self.initialize()
# -----------------------------------------------------------------------------
def __iter__(self) -> "DevicesRepository":
# Reset index for nex iteration
self.__iterator_index = 0
return self
# -----------------------------------------------------------------------------
def __len__(self):
if self.__items is None:
self.initialize()
return len(self.__items.values())
# -----------------------------------------------------------------------------
def __next__(self) -> DeviceItem:
if self.__items is None:
self.initialize()
if self.__iterator_index < len(self.__items.values()):
items: List[DeviceItem] = list(self.__items.values())
result: DeviceItem = items[self.__iterator_index]
self.__iterator_index += 1
return result
# Reset index for nex iteration
self.__iterator_index = 0
# End of iteration
raise StopIteration
@inject
class ChannelsRepository:
"""
Channels repository
@package FastyBird:DevicesModule!
@module repositories
@author Adam Kadlec <[email protected]>
"""
__items: Dict[str, ChannelItem] or None = None
__iterator_index = 0
__event_dispatcher: EventDispatcher
# -----------------------------------------------------------------------------
def __init__(
self,
event_dispatcher: EventDispatcher,
) -> None:
self.__event_dispatcher = event_dispatcher
self.__event_dispatcher.add_listener(
event_id=ModelEntityCreatedEvent.EVENT_NAME,
listener=self.__entity_created,
)
self.__event_dispatcher.add_listener(
event_id=ModelEntityUpdatedEvent.EVENT_NAME,
listener=self.__entity_updated,
)
self.__event_dispatcher.add_listener(
event_id=ModelEntityDeletedEvent.EVENT_NAME,
listener=self.__entity_deleted,
)
# -----------------------------------------------------------------------------
def get_by_id(self, channel_id: uuid.UUID) -> ChannelItem or None:
"""Find channel in cache by provided identifier"""
if self.__items is None:
self.initialize()
if channel_id.__str__() in self.__items:
return self.__items[channel_id.__str__()]
return None
# -----------------------------------------------------------------------------
def get_by_key(self, channel_key: str) -> ChannelItem or None:
"""Find channel in cache by provided key"""
if self.__items is None:
self.initialize()
for record in self.__items.values():
if record.key == channel_key:
return record
return None
# -----------------------------------------------------------------------------
def get_by_identifier(self, device_id: uuid.UUID, channel_identifier: str) -> ChannelItem or None:
"""Find channel in cache by provided identifier"""
if self.__items is None:
self.initialize()
for record in self.__items.values():
if record.device_id.__eq__(device_id) and record.identifier == channel_identifier:
return record
return None
# -----------------------------------------------------------------------------
def get_all_by_device(self, device_id: uuid.UUID) -> List[ChannelItem]:
"""Find all channels in cache for device identifier"""
if self.__items is None:
self.initialize()
items: List[ChannelItem] = []
for record in self.__items.values():
if record.device_id.__eq__(device_id):
items.append(record)
return items
# -----------------------------------------------------------------------------
def clear(self) -> None:
"""Clear items cache"""
self.__items = None
# -----------------------------------------------------------------------------
@orm.db_session
def create_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received channel message from exchange when entity was created"""
if routing_key != RoutingKey.CHANNELS_ENTITY_CREATED:
return False
if self.__items is None:
self.initialize()
return True
data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
entity: ChannelEntity or None = ChannelEntity.get(
channel_id=uuid.UUID(data.get("id"), version=4),
)
if entity is not None:
self.__items[entity.channel_id.__str__()] = self.__create_item(entity)
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def update_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received channel message from exchange when entity was updated"""
if routing_key != RoutingKey.CHANNELS_ENTITY_UPDATED:
return False
if self.__items is None:
self.initialize()
return True
validated_data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
if validated_data.get("id") not in self.__items:
entity: ChannelEntity or None = ChannelEntity.get(
channel_id=uuid.UUID(validated_data.get("id"), version=4)
)
if entity is not None:
self.__items[entity.channel_id.__str__()] = self.__create_item(entity)
return True
return False
item = self.__update_item(
self.get_by_id(uuid.UUID(validated_data.get("id"), version=4)),
validated_data,
)
if item is not None:
self.__items[validated_data.get("id")] = item
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def delete_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received channel message from exchange when entity was updated"""
if routing_key != RoutingKey.CHANNELS_ENTITY_DELETED:
return False
if data.get("id") in self.__items:
del self.__items[data.get("id")]
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def initialize(self) -> None:
"""Initialize channels properties repository by fetching entities from database"""
items: Dict[str, ChannelItem] = {}
for entity in ChannelEntity.select():
if self.__items is None or entity.channel_id.__str__() not in self.__items:
item = self.__create_item(entity)
else:
item = self.__update_item(self.get_by_id(entity.channel_id), entity.to_dict())
if item is not None:
items[entity.channel_id.__str__()] = item
self.__items = items
# -----------------------------------------------------------------------------
def __entity_created(self, event: IEvent) -> None:
if not isinstance(event, ModelEntityCreatedEvent) or not isinstance(event.entity, ChannelEntity):
return
self.initialize()
# -----------------------------------------------------------------------------
def __entity_updated(self, event: IEvent) -> None:
if not isinstance(event, ModelEntityUpdatedEvent) or not isinstance(event.entity, ChannelEntity):
return
self.initialize()
# -----------------------------------------------------------------------------
def __entity_deleted(self, event: IEvent) -> None:
if not isinstance(event, ModelEntityDeletedEvent) or not isinstance(event.entity, ChannelEntity):
return
self.initialize()
# -----------------------------------------------------------------------------
@staticmethod
def __create_item(entity: ChannelEntity) -> ChannelItem or None:
return ChannelItem(
channel_id=entity.channel_id,
channel_identifier=entity.identifier,
channel_key=entity.key,
channel_name=entity.name,
channel_comment=entity.comment,
device_id=entity.device.device_id,
)
# -----------------------------------------------------------------------------
@staticmethod
def __update_item(item: ChannelItem, data: Dict) -> ChannelItem or None:
return ChannelItem(
channel_id=item.channel_id,
channel_identifier=item.identifier,
channel_key=item.key,
channel_name=data.get("name", item.name),
channel_comment=data.get("comment", item.comment),
device_id=item.device_id,
)
# -----------------------------------------------------------------------------
def __iter__(self) -> "ChannelsRepository":
# Reset index for nex iteration
self.__iterator_index = 0
return self
# -----------------------------------------------------------------------------
def __len__(self):
if self.__items is None:
self.initialize()
return len(self.__items.values())
# -----------------------------------------------------------------------------
def __next__(self) -> ChannelItem:
if self.__items is None:
self.initialize()
if self.__iterator_index < len(self.__items.values()):
items: List[ChannelItem] = list(self.__items.values())
result: ChannelItem = items[self.__iterator_index]
self.__iterator_index += 1
return result
# Reset index for nex iteration
self.__iterator_index = 0
# End of iteration
raise StopIteration
@inject
class PropertiesRepository(ABC):
"""
Base properties repository
@package FastyBird:DevicesModule!
@module repositories
@author Adam Kadlec <[email protected]>
"""
_items: Dict[str, ChannelPropertyItem or DevicePropertyItem] or None = None
__iterator_index = 0
_event_dispatcher: EventDispatcher
# -----------------------------------------------------------------------------
def __init__(
self,
event_dispatcher: EventDispatcher,
) -> None:
self.__event_dispatcher = event_dispatcher
self.__event_dispatcher.add_listener(
event_id=ModelEntityCreatedEvent.EVENT_NAME,
listener=self._entity_created,
)
self.__event_dispatcher.add_listener(
event_id=ModelEntityUpdatedEvent.EVENT_NAME,
listener=self._entity_updated,
)
self.__event_dispatcher.add_listener(
event_id=ModelEntityDeletedEvent.EVENT_NAME,
listener=self._entity_deleted,
)
# -----------------------------------------------------------------------------
def get_by_id(self, property_id: uuid.UUID) -> DevicePropertyItem or ChannelPropertyItem or None:
"""Find property in cache by provided identifier"""
if self._items is None:
self.initialize()
if property_id.__str__() in self._items:
return self._items[property_id.__str__()]
return None
# -----------------------------------------------------------------------------
def get_by_key(self, property_key: str) -> DevicePropertyItem or ChannelPropertyItem or None:
"""Find property in cache by provided key"""
if self._items is None:
self.initialize()
for record in self._items.values():
if record.key == property_key:
return record
return None
# -----------------------------------------------------------------------------
def clear(self) -> None:
"""Clear items cache"""
self._items = None
# -----------------------------------------------------------------------------
@abstractmethod
def initialize(self) -> None:
"""Initialize repository by fetching entities from database"""
# -----------------------------------------------------------------------------
def _entity_created(self, event: IEvent) -> None:
if (
not isinstance(event, ModelEntityCreatedEvent)
or not isinstance(event.entity, (DevicePropertyEntity, ChannelPropertyEntity))
):
return
self.initialize()
# -----------------------------------------------------------------------------
def _entity_updated(self, event: IEvent) -> None:
if (
not isinstance(event, ModelEntityUpdatedEvent)
or not isinstance(event.entity, (DevicePropertyEntity, ChannelPropertyEntity))
):
return
self.initialize()
# -----------------------------------------------------------------------------
def _entity_deleted(self, event: IEvent) -> None:
if (
not isinstance(event, ModelEntityDeletedEvent)
or not isinstance(event.entity, (DevicePropertyEntity, ChannelPropertyEntity))
):
return
self.initialize()
# -----------------------------------------------------------------------------
@staticmethod
def _create_item(entity: DevicePropertyEntity or ChannelPropertyEntity) -> PropertyItem or None:
if isinstance(entity, DevicePropertyEntity):
return DevicePropertyItem(
property_id=entity.property_id,
property_name=entity.name,
property_identifier=entity.identifier,
property_key=entity.key,
property_settable=entity.settable,
property_queryable=entity.queryable,
property_data_type=entity.data_type,
property_format=entity.format,
property_unit=entity.unit,
device_id=entity.device.device_id,
)
if isinstance(entity, ChannelPropertyEntity):
return ChannelPropertyItem(
property_id=entity.property_id,
property_name=entity.name,
property_identifier=entity.identifier,
property_key=entity.key,
property_settable=entity.settable,
property_queryable=entity.queryable,
property_data_type=entity.data_type,
property_format=entity.format,
property_unit=entity.unit,
device_id=entity.channel.device.device_id,
channel_id=entity.channel.channel_id,
)
return None
# -----------------------------------------------------------------------------
@staticmethod
def _update_item(item: PropertyItem, data: Dict) -> PropertyItem or None:
data_type = data.get("data_type", item.data_type.value if item.data_type is not None else None)
data_type = DataType(data_type) if data_type is not None else None
if isinstance(item, DevicePropertyItem):
return DevicePropertyItem(
property_id=item.property_id,
property_name=data.get("name", item.name),
property_identifier=item.identifier,
property_key=item.key,
property_settable=data.get("settable", item.settable),
property_queryable=data.get("queryable", item.queryable),
property_data_type=data_type,
property_format=data.get("format", item.format),
property_unit=data.get("unit", item.unit),
device_id=item.device_id,
)
if isinstance(item, ChannelPropertyItem):
return ChannelPropertyItem(
property_id=item.property_id,
property_name=data.get("name", item.name),
property_identifier=item.identifier,
property_key=item.key,
property_settable=data.get("settable", item.settable),
property_queryable=data.get("queryable", item.queryable),
property_data_type=data_type,
property_format=data.get("format", item.format),
property_unit=data.get("unit", item.unit),
device_id=item.device_id,
channel_id=item.channel_id,
)
return None
# -----------------------------------------------------------------------------
def __iter__(self) -> "PropertiesRepository":
# Reset index for nex iteration
self.__iterator_index = 0
return self
# -----------------------------------------------------------------------------
def __len__(self):
if self._items is None:
self.initialize()
return len(self._items.values())
# -----------------------------------------------------------------------------
def __next__(self) -> DevicePropertyItem or ChannelPropertyItem:
if self._items is None:
self.initialize()
if self.__iterator_index < len(self._items.values()):
items: List[DevicePropertyItem or ChannelPropertyItem] = list(self._items.values())
result: DevicePropertyItem or ChannelPropertyItem = items[self.__iterator_index]
self.__iterator_index += 1
return result
# Reset index for nex iteration
self.__iterator_index = 0
# End of iteration
raise StopIteration
class DevicesPropertiesRepository(PropertiesRepository):
"""
Devices properties repository
@package FastyBird:DevicesModule!
@module repositories
@author Adam Kadlec <[email protected]>
"""
def get_by_identifier(self, device_id: uuid.UUID, property_identifier: str) -> DevicePropertyItem or None:
"""Find property in cache by provided identifier"""
if self._items is None:
self.initialize()
for record in self._items.values():
if record.device_id.__eq__(device_id) and record.identifier == property_identifier:
return record
return None
# -----------------------------------------------------------------------------
def get_all_by_device(self, device_id: uuid.UUID) -> List[DevicePropertyItem]:
"""Find all devices properties in cache for device identifier"""
if self._items is None:
self.initialize()
items: List[DevicePropertyItem] = []
for record in self._items.values():
if record.device_id.__eq__(device_id):
items.append(record)
return items
# -----------------------------------------------------------------------------
@orm.db_session
def create_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received device property message from exchange when entity was created"""
if routing_key != RoutingKey.DEVICES_PROPERTY_ENTITY_CREATED:
return False
if self._items is None:
self.initialize()
return True
data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
entity: DevicePropertyEntity or None = DevicePropertyEntity.get(
property_id=uuid.UUID(data.get("id"), version=4),
)
if entity is not None:
self._items[entity.property_id.__str__()] = self._create_item(entity)
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def update_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received device property message from exchange when entity was updated"""
if routing_key != RoutingKey.DEVICES_PROPERTY_ENTITY_UPDATED:
return False
if self._items is None:
self.initialize()
return True
validated_data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
if validated_data.get("id") not in self._items:
entity: DevicePropertyEntity or None = DevicePropertyEntity.get(
property_id=uuid.UUID(validated_data.get("id"), version=4)
)
if entity is not None:
self._items[entity.property_id.__str__()] = self._create_item(entity)
return True
return False
item = self._update_item(
self.get_by_id(uuid.UUID(validated_data.get("id"), version=4)),
validated_data,
)
if item is not None:
self._items[validated_data.get("id")] = item
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def delete_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received device property message from exchange when entity was updated"""
if routing_key != RoutingKey.DEVICES_PROPERTY_ENTITY_DELETED:
return False
if data.get("id") in self._items:
del self._items[data.get("id")]
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def initialize(self) -> None:
"""Initialize devices properties repository by fetching entities from database"""
items: Dict[str, DevicePropertyItem] = {}
for entity in DevicePropertyEntity.select():
if self._items is None or entity.property_id.__str__() not in self._items:
item = self._create_item(entity)
else:
item = self._update_item(self.get_by_id(entity.property_id), entity.to_dict())
if item is not None:
items[entity.property_id.__str__()] = item
self._items = items
class ChannelsPropertiesRepository(PropertiesRepository):
"""
Channel property repository
@package FastyBird:DevicesModule!
@module repositories
@author Adam Kadlec <[email protected]>
"""
def get_by_identifier(self, channel_id: uuid.UUID, property_identifier: str) -> ChannelPropertyItem or None:
"""Find property in cache by provided identifier"""
if self._items is None:
self.initialize()
for record in self._items.values():
if record.channel_id.__eq__(channel_id) and record.identifier == property_identifier:
return record
return None
# -----------------------------------------------------------------------------
def get_all_by_channel(self, channel_id: uuid.UUID) -> List[ChannelPropertyItem]:
"""Find all channels properties in cache for channel identifier"""
if self._items is None:
self.initialize()
items: List[ChannelPropertyItem] = []
for record in self._items.values():
if record.channel_id.__eq__(channel_id):
items.append(record)
return items
# -----------------------------------------------------------------------------
@orm.db_session
def create_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received channel property message from exchange when entity was created"""
if routing_key != RoutingKey.CHANNELS_PROPERTY_ENTITY_CREATED:
return False
if self._items is None:
self.initialize()
return True
data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
entity: ChannelPropertyEntity or None = ChannelPropertyEntity.get(
property_id=uuid.UUID(data.get("id"), version=4),
)
if entity is not None:
self._items[entity.property_id.__str__()] = self._create_item(entity)
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def update_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received channel property message from exchange when entity was updated"""
if routing_key != RoutingKey.CHANNELS_PROPERTY_ENTITY_UPDATED:
return False
if self._items is None:
self.initialize()
return True
validated_data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
if validated_data.get("id") not in self._items:
entity: ChannelPropertyEntity or None = ChannelPropertyEntity.get(
property_id=uuid.UUID(validated_data.get("id"), version=4)
)
if entity is not None:
self._items[entity.property_id.__str__()] = self._create_item(entity)
return True
return False
item = self._update_item(
self.get_by_id(uuid.UUID(validated_data.get("id"), version=4)),
validated_data,
)
if item is not None:
self._items[validated_data.get("id")] = item
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def delete_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received channel property message from exchange when entity was updated"""
if routing_key != RoutingKey.CHANNELS_PROPERTY_ENTITY_DELETED:
return False
if data.get("id") in self._items:
del self._items[data.get("id")]
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def initialize(self) -> None:
"""Initialize channel properties repository by fetching entities from database"""
items: Dict[str, ChannelPropertyItem] = {}
for entity in ChannelPropertyEntity.select():
if self._items is None or entity.property_id.__str__() not in self._items:
item = self._create_item(entity)
else:
item = self._update_item(self.get_by_id(entity.property_id), entity.to_dict())
if item is not None:
items[entity.property_id.__str__()] = item
self._items = items
@inject
class ConnectorsRepository(ABC):
"""
Connectors repository
@package FastyBird:DevicesModule!
@module repositories
@author Adam Kadlec <[email protected]>
"""
__items: Dict[str, ConnectorItem] or None = None
__iterator_index = 0
__event_dispatcher: EventDispatcher
# -----------------------------------------------------------------------------
def __init__(
self,
event_dispatcher: EventDispatcher,
) -> None:
self.__event_dispatcher = event_dispatcher
self.__event_dispatcher.add_listener(
event_id=ModelEntityCreatedEvent.EVENT_NAME,
listener=self.__entity_created,
)
self.__event_dispatcher.add_listener(
event_id=ModelEntityUpdatedEvent.EVENT_NAME,
listener=self.__entity_updated,
)
self.__event_dispatcher.add_listener(
event_id=ModelEntityDeletedEvent.EVENT_NAME,
listener=self.__entity_deleted,
)
# -----------------------------------------------------------------------------
def get_by_id(self, connector_id: uuid.UUID) -> ConnectorItem or None:
"""Find connector in cache by provided identifier"""
if self.__items is None:
self.initialize()
if connector_id.__str__() in self.__items:
return self.__items[connector_id.__str__()]
return None
# -----------------------------------------------------------------------------
def get_by_key(self, connector_key: str) -> ConnectorItem or None:
"""Find connector in cache by provided key"""
if self.__items is None:
self.initialize()
for record in self.__items.values():
if record.key == connector_key:
return record
return None
# -----------------------------------------------------------------------------
@orm.db_session
def create_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received connector message from exchange when entity was created"""
if routing_key != RoutingKey.CONNECTORS_ENTITY_CREATED:
return False
if self.__items is None:
self.initialize()
return True
data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
entity: ConnectorEntity or None = ConnectorEntity.get(connector_id=uuid.UUID(data.get("id"), version=4))
if entity is not None:
self.__items[entity.connector_id.__str__()] = self.__create_item(entity)
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def update_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received connector message from exchange when entity was updated"""
if routing_key != RoutingKey.CONNECTORS_ENTITY_UPDATED:
return False
if self.__items is None:
self.initialize()
return True
validated_data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
if validated_data.get("id") not in self.__items:
entity: ConnectorEntity or None = ConnectorEntity.get(
connector_id=uuid.UUID(validated_data.get("id"), version=4)
)
if entity is not None:
self.__items[entity.connector_id.__str__()] = self.__create_item(entity)
return True
return False
item = self.__update_item(
self.get_by_id(uuid.UUID(validated_data.get("id"), version=4)),
validated_data,
)
if item is not None:
self.__items[validated_data.get("id")] = item
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def delete_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received connector message from exchange when entity was updated"""
if routing_key != RoutingKey.CONNECTORS_ENTITY_DELETED:
return False
if data.get("id") in self.__items:
del self.__items[data.get("id")]
return True
return False
# -----------------------------------------------------------------------------
def clear(self) -> None:
"""Clear items cache"""
self.__items = None
# -----------------------------------------------------------------------------
@orm.db_session
def initialize(self) -> None:
"""Initialize repository by fetching entities from database"""
items: Dict[str, ConnectorItem] = {}
for entity in ConnectorEntity.select():
if self.__items is None or entity.connector_id.__str__() not in self.__items:
item = self.__create_item(entity)
else:
item = self.__update_item(self.get_by_id(entity.connector_id), entity.to_dict())
if item is not None:
items[entity.connector_id.__str__()] = item
self.__items = items
# -----------------------------------------------------------------------------
def __entity_created(self, event: IEvent) -> None:
if not isinstance(event, ModelEntityCreatedEvent) or not isinstance(event.entity, ConnectorEntity):
return
self.initialize()
# -----------------------------------------------------------------------------
def __entity_updated(self, event: IEvent) -> None:
if not isinstance(event, ModelEntityUpdatedEvent) or not isinstance(event.entity, ConnectorEntity):
return
self.initialize()
# -----------------------------------------------------------------------------
def __entity_deleted(self, event: IEvent) -> None:
if not isinstance(event, ModelEntityDeletedEvent) or not isinstance(event.entity, ConnectorEntity):
return
self.initialize()
# -----------------------------------------------------------------------------
@staticmethod
def __create_item(entity: ConnectorEntity) -> ConnectorItem or None:
if isinstance(entity, FbBusConnectorEntity):
return FbBusConnectorItem(
connector_id=entity.connector_id,
connector_name=entity.name,
connector_key=entity.key,
connector_enabled=entity.enabled,
connector_type=entity.type,
connector_params=entity.params,
)
if isinstance(entity, FbMqttConnectorEntity):
return FbMqttV1ConnectorItem(
connector_id=entity.connector_id,
connector_name=entity.name,
connector_key=entity.key,
connector_enabled=entity.enabled,
connector_type=entity.type,
connector_params=entity.params,
)
return None
# -----------------------------------------------------------------------------
@staticmethod
def __update_item(item: ConnectorItem, data: Dict) -> ConnectorItem or None:
if isinstance(item, FbBusConnectorItem):
params: Dict = item.params
params["address"] = data.get("address", item.address)
params["serial_interface"] = data.get("serial_interface", item.serial_interface)
params["baud_rate"] = data.get("baud_rate", item.baud_rate)
return FbBusConnectorItem(
connector_id=item.connector_id,
connector_name=data.get("name", item.name),
connector_key=item.key,
connector_enabled=bool(data.get("enabled", item.enabled)),
connector_type=item.type,
connector_params=params,
)
if isinstance(item, FbMqttV1ConnectorItem):
params: Dict = item.params
params["server"] = data.get("server", item.server)
params["port"] = data.get("port", item.port)
params["secured_port"] = data.get("secured_port", item.secured_port)
params["username"] = data.get("username", item.username)
return FbMqttV1ConnectorItem(
connector_id=item.connector_id,
connector_name=data.get("name", item.name),
connector_key=item.key,
connector_enabled=bool(data.get("enabled", item.enabled)),
connector_type=item.type,
connector_params=params,
)
return None
# -----------------------------------------------------------------------------
def __iter__(self) -> "ConnectorsRepository":
# Reset index for nex iteration
self.__iterator_index = 0
return self
# -----------------------------------------------------------------------------
def __len__(self):
if self.__items is None:
self.initialize()
return len(self.__items.values())
# -----------------------------------------------------------------------------
def __next__(self) -> ConnectorItem:
if self.__items is None:
self.initialize()
if self.__iterator_index < len(self.__items.values()):
items: List[ConnectorItem] = list(self.__items.values())
result: ConnectorItem = items[self.__iterator_index]
self.__iterator_index += 1
return result
# Reset index for nex iteration
self.__iterator_index = 0
# End of iteration
raise StopIteration
@inject
class ControlsRepository(ABC):
"""
Base controls repository
@package FastyBird:DevicesModule!
@module repositories
@author Adam Kadlec <[email protected]>
"""
_items: Dict[str, ControlItem] or None = None
__iterator_index = 0
_event_dispatcher: EventDispatcher
# -----------------------------------------------------------------------------
def __init__(
self,
event_dispatcher: EventDispatcher,
) -> None:
self._event_dispatcher = event_dispatcher
self._event_dispatcher.add_listener(
event_id=ModelEntityCreatedEvent.EVENT_NAME,
listener=self._entity_created,
)
self._event_dispatcher.add_listener(
event_id=ModelEntityUpdatedEvent.EVENT_NAME,
listener=self._entity_updated,
)
self._event_dispatcher.add_listener(
event_id=ModelEntityDeletedEvent.EVENT_NAME,
listener=self._entity_deleted,
)
# -----------------------------------------------------------------------------
def get_by_id(
self,
control_id: uuid.UUID,
) -> DeviceControlItem or ChannelControlItem or ConnectorControlItem or None:
"""Find control in cache by provided identifier"""
if self._items is None:
self.initialize()
if control_id.__str__() in self._items:
return self._items[control_id.__str__()]
return None
# -----------------------------------------------------------------------------
def clear(self) -> None:
"""Clear items cache"""
self._items = None
# -----------------------------------------------------------------------------
@abstractmethod
def initialize(self) -> None:
"""Initialize repository by fetching entities from database"""
# -----------------------------------------------------------------------------
def _entity_created(self, event: IEvent) -> None:
if (
not isinstance(event, ModelEntityCreatedEvent)
or not isinstance(event.entity, (DevicesControlsRepository, ChannelControlEntity, ConnectorControlEntity))
):
return
self.initialize()
# -----------------------------------------------------------------------------
def _entity_updated(self, event: IEvent) -> None:
if (
not isinstance(event, ModelEntityUpdatedEvent)
or not isinstance(event.entity, (DevicesControlsRepository, ChannelControlEntity, ConnectorControlEntity))
):
return
self.initialize()
# -----------------------------------------------------------------------------
def _entity_deleted(self, event: IEvent) -> None:
if (
not isinstance(event, ModelEntityDeletedEvent)
or not isinstance(event.entity, (DevicesControlsRepository, ChannelControlEntity, ConnectorControlEntity))
):
return
self.initialize()
# -----------------------------------------------------------------------------
@staticmethod
def _create_item(
entity: DeviceControlEntity or ChannelControlEntity or ConnectorControlEntity
) -> ControlItem or None:
if isinstance(entity, DeviceControlEntity):
return DeviceControlItem(
control_id=entity.control_id,
control_name=entity.name,
device_id=entity.device.device_id,
)
if isinstance(entity, ChannelControlEntity):
return ChannelControlItem(
control_id=entity.control_id,
control_name=entity.name,
device_id=entity.channel.device.device_id,
channel_id=entity.channel.channel_id,
)
if isinstance(entity, ConnectorControlEntity):
return ConnectorControlItem(
control_id=entity.control_id,
control_name=entity.name,
connector_id=entity.connector.connector_id,
)
return None
# -----------------------------------------------------------------------------
@staticmethod
def _update_item(item: ControlItem) -> ControlItem or None:
if isinstance(item, DeviceControlItem):
return DeviceControlItem(
control_id=item.control_id,
control_name=item.name,
device_id=item.device_id,
)
if isinstance(item, ChannelControlItem):
return ChannelControlItem(
control_id=item.control_id,
control_name=item.name,
device_id=item.device_id,
channel_id=item.channel_id,
)
if isinstance(item, ConnectorControlItem):
return ConnectorControlItem(
control_id=item.control_id,
control_name=item.name,
connector_id=item.connector_id,
)
return None
# -----------------------------------------------------------------------------
def __iter__(self) -> "ControlsRepository":
# Reset index for nex iteration
self.__iterator_index = 0
return self
# -----------------------------------------------------------------------------
def __len__(self):
if self._items is None:
self.initialize()
return len(self._items.values())
# -----------------------------------------------------------------------------
def __next__(self) -> ControlItem:
if self._items is None:
self.initialize()
if self.__iterator_index < len(self._items.values()):
items: List[ControlItem] = list(self._items.values())
result: ControlItem = items[self.__iterator_index]
self.__iterator_index += 1
return result
# Reset index for nex iteration
self.__iterator_index = 0
# End of iteration
raise StopIteration
class DevicesControlsRepository(ControlsRepository):
"""
Devices controls repository
@package FastyBird:DevicesModule!
@module repositories
@author Adam Kadlec <[email protected]>
"""
def get_by_name(self, device_id: uuid.UUID, control_name: str) -> DeviceControlItem or None:
"""Find control in cache by provided name"""
if self._items is None:
self.initialize()
for record in self._items.values():
if record.device_id.__eq__(device_id) and record.name == control_name:
return record
return None
# -----------------------------------------------------------------------------
def get_all_by_device(self, device_id: uuid.UUID) -> List[DeviceControlItem]:
"""Find all devices controls in cache for device identifier"""
if self._items is None:
self.initialize()
items: List[DeviceControlItem] = []
for record in self._items.values():
if record.device_id.__eq__(device_id):
items.append(record)
return items
# -----------------------------------------------------------------------------
@orm.db_session
def create_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received device control message from exchange when entity was created"""
if routing_key != RoutingKey.DEVICES_CONTROL_ENTITY_CREATED:
return False
if self._items is None:
self.initialize()
return True
data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
entity: DeviceControlEntity or None = DeviceControlEntity.get(
control_id=uuid.UUID(data.get("id"), version=4),
)
if entity is not None:
self._items[entity.control_id.__str__()] = self._create_item(entity)
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def update_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received device control message from exchange when entity was updated"""
if routing_key != RoutingKey.DEVICES_CONTROL_ENTITY_UPDATED:
return False
if self._items is None:
self.initialize()
return True
validated_data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
if validated_data.get("id") not in self._items:
entity: DeviceControlEntity or None = DeviceControlEntity.get(
control_id=uuid.UUID(validated_data.get("id"), version=4)
)
if entity is not None:
self._items[entity.control_id.__str__()] = self._create_item(entity)
return True
return False
item = self._update_item(self.get_by_id(uuid.UUID(validated_data.get("id"), version=4)))
if item is not None:
self._items[validated_data.get("id")] = item
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def delete_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received device control message from exchange when entity was updated"""
if routing_key != RoutingKey.DEVICES_CONTROL_ENTITY_DELETED:
return False
if data.get("id") in self._items:
del self._items[data.get("id")]
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def initialize(self) -> None:
"""Initialize devices controls repository by fetching entities from database"""
items: Dict[str, DeviceControlItem] = {}
for entity in DeviceControlEntity.select():
if self._items is None or entity.control_id.__str__() not in self._items:
item = self._create_item(entity)
else:
item = self._update_item(self.get_by_id(entity.control_id))
if item is not None:
items[entity.control_id.__str__()] = item
self._items = items
class ChannelsControlsRepository(ControlsRepository):
"""
Channels controls repository
@package FastyBird:DevicesModule!
@module repositories
@author Adam Kadlec <[email protected]>
"""
def get_by_name(self, channel_id: uuid.UUID, control_name: str) -> ChannelControlItem or None:
"""Find control in cache by provided name"""
if self._items is None:
self.initialize()
for record in self._items.values():
if record.channel_id.__eq__(channel_id) and record.name == control_name:
return record
return None
# -----------------------------------------------------------------------------
def get_all_by_channel(self, channel_id: uuid.UUID) -> List[ChannelControlItem]:
"""Find all channels controls in cache for channel identifier"""
if self._items is None:
self.initialize()
items: List[ChannelControlItem] = []
for record in self._items.values():
if record.channel_id.__eq__(channel_id):
items.append(record)
return items
# -----------------------------------------------------------------------------
@orm.db_session
def create_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received channel control message from exchange when entity was created"""
if routing_key != RoutingKey.CHANNELS_CONTROL_ENTITY_CREATED:
return False
if self._items is None:
self.initialize()
return True
data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
entity: ChannelControlEntity or None = ChannelControlEntity.get(
control_id=uuid.UUID(data.get("id"), version=4),
)
if entity is not None:
self._items[entity.control_id.__str__()] = self._create_item(entity)
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def update_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received channel control message from exchange when entity was updated"""
if routing_key != RoutingKey.CHANNELS_CONTROL_ENTITY_UPDATED:
return False
if self._items is None:
self.initialize()
return True
validated_data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
if validated_data.get("id") not in self._items:
entity: ChannelControlEntity or None = ChannelControlEntity.get(
control_id=uuid.UUID(validated_data.get("id"), version=4)
)
if entity is not None:
self._items[entity.control_id.__str__()] = self._create_item(entity)
return True
return False
item = self._update_item(self.get_by_id(uuid.UUID(validated_data.get("id"), version=4)))
if item is not None:
self._items[validated_data.get("id")] = item
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def delete_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received channel control message from exchange when entity was updated"""
if routing_key != RoutingKey.CHANNELS_CONTROL_ENTITY_DELETED:
return False
if data.get("id") in self._items:
del self._items[data.get("id")]
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def initialize(self) -> None:
"""Initialize channel controls repository by fetching entities from database"""
items: Dict[str, ChannelControlItem] = {}
for entity in ChannelControlEntity.select():
if self._items is None or entity.control_id.__str__() not in self._items:
item = self._create_item(entity)
else:
item = self._update_item(self.get_by_id(entity.control_id))
if item is not None:
items[entity.control_id.__str__()] = item
self._items = items
class ConnectorsControlsRepository(ControlsRepository):
"""
Connectors controls repository
@package FastyBird:DevicesModule!
@module repositories
@author Adam Kadlec <[email protected]>
"""
def get_by_name(self, connector_id: uuid.UUID, control_name: str) -> ConnectorControlItem or None:
"""Find control in cache by provided name"""
if self._items is None:
self.initialize()
for record in self._items.values():
if record.connector_id.__eq__(connector_id) and record.name == control_name:
return record
return None
# -----------------------------------------------------------------------------
def get_all_by_channel(self, connector_id: uuid.UUID) -> List[ConnectorControlItem]:
"""Find all connectors controls in cache for connector identifier"""
if self._items is None:
self.initialize()
items: List[ConnectorControlItem] = []
for record in self._items.values():
if record.connector_id.__eq__(connector_id):
items.append(record)
return items
# -----------------------------------------------------------------------------
@orm.db_session
def create_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received connector control message from exchange when entity was created"""
if routing_key != RoutingKey.CONNECTORS_CONTROL_ENTITY_CREATED:
return False
if self._items is None:
self.initialize()
return True
data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
entity: ConnectorControlEntity or None = ConnectorControlEntity.get(
control_id=uuid.UUID(data.get("id"), version=4),
)
if entity is not None:
self._items[entity.control_id.__str__()] = self._create_item(entity)
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def update_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received connector control message from exchange when entity was updated"""
if routing_key != RoutingKey.CONNECTORS_CONTROL_ENTITY_UPDATED:
return False
if self._items is None:
self.initialize()
return True
validated_data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
if validated_data.get("id") not in self._items:
entity: ConnectorControlEntity or None = ConnectorControlEntity.get(
control_id=uuid.UUID(validated_data.get("id"), version=4)
)
if entity is not None:
self._items[entity.control_id.__str__()] = self._create_item(entity)
return True
return False
item = self._update_item(self.get_by_id(uuid.UUID(validated_data.get("id"), version=4)))
if item is not None:
self._items[validated_data.get("id")] = item
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def delete_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received connector control message from exchange when entity was updated"""
if routing_key != RoutingKey.CONNECTORS_CONTROL_ENTITY_DELETED:
return False
if data.get("id") in self._items:
del self._items[data.get("id")]
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def initialize(self) -> None:
"""Initialize connector controls repository by fetching entities from database"""
items: Dict[str, ConnectorControlItem] = {}
for entity in ConnectorControlEntity.select():
if self._items is None or entity.control_id.__str__() not in self._items:
item = self._create_item(entity)
else:
item = self._update_item(self.get_by_id(entity.control_id))
if item is not None:
items[entity.control_id.__str__()] = item
self._items = items
@inject
class ConfigurationRepository(ABC):
"""
Base configuration repository
@package FastyBird:DevicesModule!
@module repositories
@author Adam Kadlec <[email protected]>
"""
_items: Dict[str, DeviceConfigurationItem or ChannelConfigurationItem] or None = None
__iterator_index = 0
_event_dispatcher: EventDispatcher
# -----------------------------------------------------------------------------
def __init__(
self,
event_dispatcher: EventDispatcher,
) -> None:
self.__event_dispatcher = event_dispatcher
self.__event_dispatcher.add_listener(
event_id=ModelEntityCreatedEvent.EVENT_NAME,
listener=self._entity_created,
)
self.__event_dispatcher.add_listener(
event_id=ModelEntityUpdatedEvent.EVENT_NAME,
listener=self._entity_updated,
)
self.__event_dispatcher.add_listener(
event_id=ModelEntityDeletedEvent.EVENT_NAME,
listener=self._entity_deleted,
)
# -----------------------------------------------------------------------------
def get_by_id(self, configuration_id: uuid.UUID) -> DeviceConfigurationItem or ChannelConfigurationItem or None:
"""Find configuration in cache by provided identifier"""
if self._items is None:
self.initialize()
if configuration_id.__str__() in self._items:
return self._items[configuration_id.__str__()]
return None
# -----------------------------------------------------------------------------
def get_by_key(self, configuration_key: str) -> DeviceConfigurationItem or ChannelConfigurationItem or None:
"""Find configuration in cache by provided key"""
if self._items is None:
self.initialize()
for record in self._items.values():
if record.key == configuration_key:
return record
return None
# -----------------------------------------------------------------------------
def clear(self) -> None:
"""Clear items cache"""
self._items = None
# -----------------------------------------------------------------------------
@abstractmethod
def initialize(self) -> None:
"""Initialize repository by fetching entities from database"""
# -----------------------------------------------------------------------------
def _entity_created(self, event: IEvent) -> None:
if (
not isinstance(event, ModelEntityCreatedEvent)
or not isinstance(event.entity, (DeviceConfigurationEntity, ChannelConfigurationEntity))
):
return
self.initialize()
# -----------------------------------------------------------------------------
def _entity_updated(self, event: IEvent) -> None:
if (
not isinstance(event, ModelEntityUpdatedEvent)
or not isinstance(event.entity, (DeviceConfigurationEntity, ChannelConfigurationEntity))
):
return
self.initialize()
# -----------------------------------------------------------------------------
def _entity_deleted(self, event: IEvent) -> None:
if (
not isinstance(event, ModelEntityDeletedEvent)
or not isinstance(event.entity, (DeviceConfigurationEntity, ChannelConfigurationEntity))
):
return
self.initialize()
# -----------------------------------------------------------------------------
@staticmethod
def _create_item(entity: DeviceConfigurationEntity or ChannelConfigurationEntity) -> ConfigurationItem or None:
if isinstance(entity, DeviceConfigurationEntity):
return DeviceConfigurationItem(
configuration_id=entity.configuration_id,
configuration_key=entity.key,
configuration_identifier=entity.identifier,
configuration_name=entity.name,
configuration_comment=entity.comment,
configuration_data_type=entity.data_type,
configuration_value=entity.value,
configuration_default=entity.default,
configuration_params=entity.params if entity.params is not None else {},
device_id=entity.device.device_id,
)
if isinstance(entity, ChannelConfigurationEntity):
return ChannelConfigurationItem(
configuration_id=entity.configuration_id,
configuration_key=entity.key,
configuration_identifier=entity.identifier,
configuration_name=entity.name,
configuration_comment=entity.comment,
configuration_data_type=entity.data_type,
configuration_value=entity.value,
configuration_default=entity.default,
configuration_params=entity.params if entity.params is not None else {},
device_id=entity.channel.device.device_id,
channel_id=entity.channel.channel_id,
)
return None
# -----------------------------------------------------------------------------
@staticmethod
def _update_item(item: ConfigurationItem, data: Dict) -> ConfigurationItem or None:
data_type = data.get("data_type", item.data_type.value if item.data_type is not None else None)
data_type = DataType(data_type) if data_type is not None else None
params: Dict[str, str or int or float or bool or List or None] = {}
if "min" in data.keys():
params["min"] = data.get("min", item.min_value)
if "max" in data.keys():
params["max"] = data.get("max", item.max_value)
if "step" in data.keys():
params["step"] = data.get("step", item.step_value)
if "values" in data.keys():
params["values"] = data.get("values", item.values)
if isinstance(item, DeviceConfigurationItem):
return DeviceConfigurationItem(
configuration_id=item.configuration_id,
configuration_key=item.key,
configuration_identifier=item.identifier,
configuration_name=data.get("name", item.name),
configuration_comment=data.get("comment", item.comment),
configuration_data_type=data_type,
configuration_value=data.get("value", item.value),
configuration_default=data.get("default", item.default),
configuration_params={**item.params, **params},
device_id=item.device_id,
)
if isinstance(item, ChannelConfigurationItem):
return ChannelConfigurationItem(
configuration_id=item.configuration_id,
configuration_key=item.key,
configuration_identifier=item.identifier,
configuration_name=data.get("name", item.name),
configuration_comment=data.get("comment", item.comment),
configuration_data_type=data_type,
configuration_value=data.get("value", item.value),
configuration_default=data.get("default", item.default),
configuration_params={**item.params, **params},
device_id=item.device_id,
channel_id=item.channel_id,
)
return None
# -----------------------------------------------------------------------------
def __iter__(self) -> "ConfigurationRepository":
# Reset index for nex iteration
self.__iterator_index = 0
return self
# -----------------------------------------------------------------------------
def __len__(self):
if self._items is None:
self.initialize()
return len(self._items.values())
# -----------------------------------------------------------------------------
def __next__(self) -> DeviceConfigurationItem or ChannelConfigurationItem:
if self._items is None:
self.initialize()
if self.__iterator_index < len(self._items.values()):
items: List[DeviceConfigurationItem or ChannelConfigurationItem] = list(self._items.values())
result: DeviceConfigurationItem or ChannelConfigurationItem = items[self.__iterator_index]
self.__iterator_index += 1
return result
# Reset index for nex iteration
self.__iterator_index = 0
# End of iteration
raise StopIteration
class DevicesConfigurationRepository(ConfigurationRepository):
"""
Devices configuration repository
@package FastyBird:DevicesModule!
@module repositories
@author Adam Kadlec <[email protected]>
"""
def get_by_identifier(self, device_id: uuid.UUID, configuration_identifier: str) -> DeviceConfigurationItem or None:
"""Find configuration in cache by provided identifier"""
if self._items is None:
self.initialize()
for record in self._items.values():
if record.device_id.__eq__(device_id) and record.identifier == configuration_identifier:
return record
return None
# -----------------------------------------------------------------------------
def get_all_by_device(self, device_id: uuid.UUID) -> List[DeviceConfigurationItem]:
"""Find all devices properties in cache for device identifier"""
if self._items is None:
self.initialize()
items: List[DeviceConfigurationItem] = []
for record in self._items.values():
if record.device_id.__eq__(device_id):
items.append(record)
return items
# -----------------------------------------------------------------------------
@orm.db_session
def create_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received device configuration message from exchange when entity was created"""
if routing_key != RoutingKey.DEVICES_CONFIGURATION_ENTITY_CREATED:
return False
if self._items is None:
self.initialize()
return True
data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
entity: DeviceConfigurationEntity or None = DeviceConfigurationEntity.get(
configuration_id=uuid.UUID(data.get("id"), version=4),
)
if entity is not None:
self._items[entity.configuration_id.__str__()] = self._create_item(entity)
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def update_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received device configuration message from exchange when entity was updated"""
if routing_key != RoutingKey.DEVICES_CONFIGURATION_ENTITY_UPDATED:
return False
if self._items is None:
self.initialize()
return True
validated_data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
if validated_data.get("id") not in self._items:
entity: DeviceConfigurationEntity or None = DeviceConfigurationEntity.get(
configuration_id=uuid.UUID(validated_data.get("id"), version=4)
)
if entity is not None:
self._items[entity.configuration_id.__str__()] = self._create_item(entity)
return True
return False
item = self._update_item(
self.get_by_id(uuid.UUID(validated_data.get("id"), version=4)),
validated_data,
)
if item is not None:
self._items[validated_data.get("id")] = item
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def delete_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received device configuration message from exchange when entity was updated"""
if routing_key != RoutingKey.DEVICES_CONFIGURATION_ENTITY_DELETED:
return False
if data.get("id") in self._items:
del self._items[data.get("id")]
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def initialize(self) -> None:
"""Initialize devices properties repository by fetching entities from database"""
items: Dict[str, DeviceConfigurationItem] = {}
for entity in DeviceConfigurationEntity.select():
if self._items is None or entity.configuration_id.__str__() not in self._items:
item = self._create_item(entity)
else:
item = self._update_item(self.get_by_id(entity.configuration_id), entity.to_dict())
if item is not None:
items[entity.configuration_id.__str__()] = item
self._items = items
class ChannelsConfigurationRepository(ConfigurationRepository):
"""
Channel configuration repository
@package FastyBird:DevicesModule!
@module repositories
@author Adam Kadlec <[email protected]>
"""
def get_by_identifier(
self,
channel_id: uuid.UUID,
configuration_identifier: str,
) -> ChannelConfigurationItem or None:
"""Find configuration in cache by provided identifier"""
if self._items is None:
self.initialize()
for record in self._items.values():
if record.channel_id.__eq__(channel_id) and record.identifier == configuration_identifier:
return record
return None
# -----------------------------------------------------------------------------
def get_all_by_channel(self, channel_id: uuid.UUID) -> List[ChannelConfigurationItem]:
"""Find all channels properties in cache for channel identifier"""
if self._items is None:
self.initialize()
items: List[ChannelConfigurationItem] = []
for record in self._items.values():
if record.channel_id.__eq__(channel_id):
items.append(record)
return items
# -----------------------------------------------------------------------------
@orm.db_session
def create_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received channel configuration message from exchange when entity was created"""
if routing_key != RoutingKey.CHANNELS_CONFIGURATION_ENTITY_CREATED:
return False
if self._items is None:
self.initialize()
return True
data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
entity: ChannelConfigurationEntity or None = ChannelConfigurationEntity.get(
configuration_id=uuid.UUID(data.get("id"), version=4),
)
if entity is not None:
self._items[entity.configuration_id.__str__()] = self._create_item(entity)
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def update_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received channel configuration message from exchange when entity was updated"""
if routing_key != RoutingKey.CHANNELS_CONFIGURATION_ENTITY_UPDATED:
return False
if self._items is None:
self.initialize()
return True
validated_data: Dict = validate_exchange_data(ModuleOrigin(ModuleOrigin.DEVICES_MODULE), routing_key, data)
if validated_data.get("id") not in self._items:
entity: ChannelConfigurationEntity or None = ChannelConfigurationEntity.get(
configuration_id=uuid.UUID(validated_data.get("id"), version=4)
)
if entity is not None:
self._items[entity.configuration_id.__str__()] = self._create_item(entity)
return True
return False
item = self._update_item(
self.get_by_id(uuid.UUID(validated_data.get("id"), version=4)),
validated_data,
)
if item is not None:
self._items[validated_data.get("id")] = item
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def delete_from_exchange(self, routing_key: RoutingKey, data: Dict) -> bool:
"""Process received channel configuration message from exchange when entity was updated"""
if routing_key != RoutingKey.CHANNELS_CONFIGURATION_ENTITY_DELETED:
return False
if data.get("id") in self._items:
del self._items[data.get("id")]
return True
return False
# -----------------------------------------------------------------------------
@orm.db_session
def initialize(self) -> None:
"""Initialize channel properties repository by fetching entities from database"""
items: Dict[str, ChannelConfigurationItem] = {}
for entity in ChannelConfigurationEntity.select():
if self._items is None or entity.configuration_id.__str__() not in self._items:
item = self._create_item(entity)
else:
item = self._update_item(self.get_by_id(entity.configuration_id), entity.to_dict())
if item is not None:
items[entity.configuration_id.__str__()] = item
self._items = items
def validate_exchange_data(origin: ModuleOrigin, routing_key: RoutingKey, data: Dict) -> Dict:
"""
Validate received RPC message against defined schema
"""
try:
schema: str = load_schema(origin, routing_key)
except metadata_exceptions.FileNotFoundException as ex:
raise HandleExchangeDataException("Provided data could not be validated") from ex
except metadata_exceptions.InvalidArgumentException as ex:
raise HandleExchangeDataException("Provided data could not be validated") from ex
try:
return validate(json.dumps(data), schema)
except metadata_exceptions.MalformedInputException as ex:
raise HandleExchangeDataException("Provided data are not in valid json format") from ex
except metadata_exceptions.LogicException as ex:
raise HandleExchangeDataException("Provided data could not be validated") from ex
except metadata_exceptions.InvalidDataException as ex:
raise HandleExchangeDataException("Provided data are not valid") from ex
|
import unittest
from unittest import mock
import authenticator as au
class TestAuthenticator(unittest.TestCase):
def test_gen_16(self):
"""gen_auth_string must return an expecteds formatted string"""
name = 'foo'
secret = 'random_string'
res = au.gen_auth_string(name, secret)
template = "otpauth://totp/{}?secret={}".format(name, secret)
assert res == template
@mock.patch('authenticator.gen_16', return_value='egg')
def test_gen_16_None(self, gen_16=None):
"""gen_auth_string must return a correctly formatted string with no args"""
res = au.gen_auth_string(None, None)
template = "otpauth://totp/authenticator?secret=egg"
assert res == template, f"Failure for {res}"
@mock.patch('authenticator.otp.get_totp', return_value='egg')
def test_gen_time_token(self, get_totp):
"""Ensure the get_totp function is called with the correct args."""
secret = 'banana'
res = au.gen_time_token(secret)
get_totp.assert_called_with(secret)
assert res == 'egg'
@mock.patch('authenticator.otp.valid_totp', return_value='sploge')
def test_validate_time_auth(self, validate_time_auth):
"""Ensure the get_totp function is called with the correct args."""
secret = 'banana'
token = 'orange'
res = au.validate_time_auth(token, secret)
validate_time_auth.assert_called_with(token=token, secret=secret)
assert res == 'sploge'
@mock.patch('pyqrcode.create', return_value='URLO')
def test_validate_time_auth(self, create):
"""Ensure the get_totp function is called with the correct args."""
secret = 'banana'
name = 'fred'
url = au.create_qr(secret, name)
#text = url.text(quiet_zone=1)
create.assert_called_with('otpauth://totp/fred?secret=banana')
assert 'URLO' == url, url
|
__version__="1.5.7"
|
# -*- coding: utf-8 -*-
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = '[email protected] (Eric Bidelman)'
import collections
import logging
import datetime
import json
import os
import re
import webapp2
from google.appengine.ext import db
from google.appengine.api import mail
from google.appengine.api import urlfetch
from google.appengine.api import users
from google.appengine.api import taskqueue
from google.appengine.ext.webapp.mail_handlers import BounceNotificationHandler
from django.template.loader import render_to_string
from django.utils.html import conditional_escape as escape
import common
import settings
import models
def get_default_headers():
headers = {
'Authorization': 'key=%s' % settings.FIREBASE_SERVER_KEY,
'Content-Type': 'application/json'
}
return headers
def format_email_body(is_update, feature, changes):
"""Return an HTML string for a notification email body."""
if feature.shipped_milestone:
milestone_str = feature.shipped_milestone
elif feature.shipped_milestone is None and feature.shipped_android_milestone:
milestone_str = '%s (android)' % feature.shipped_android_milestone
else:
milestone_str = 'not yet assigned'
moz_link_urls = [link for link in feature.doc_links
if 'developer.mozilla.org' in link]
formatted_changes = ''
for prop in changes:
prop_name = prop['prop_name']
new_val = prop['new_val']
old_val = prop['old_val']
formatted_changes += ('<li>%s: <br/><b>old:</b> %s <br/>'
'<b>new:</b> %s<br/></li>\n\n' %
(prop_name, escape(old_val), escape(new_val)))
if not formatted_changes:
formatted_changes = '<li>None</li>'
body_data = {
'feature': feature,
'id': feature.key().id(),
'milestone': milestone_str,
'status': models.IMPLEMENTATION_STATUS[feature.impl_status_chrome],
'formatted_changes': formatted_changes,
'moz_link_urls': moz_link_urls,
}
template_path = ('update-feature-email.html' if is_update
else 'new-feature-email.html')
body = render_to_string(template_path, body_data)
return body
def accumulate_reasons(addr_reasons, user_list, reason):
"""Add a reason string for each user."""
for user in user_list:
addr_reasons[user.email].append(reason)
def convert_reasons_to_task(addr, reasons, email_html, subject):
"""Add a task dict to task_list for each user who has not already got one."""
assert reasons, 'We are emailing someone without any reason'
footer_lines = ['<p>You are receiving this email because:</p>', '<ul>']
for reason in sorted(set(reasons)):
footer_lines.append('<li>%s</li>' % reason)
footer_lines.append('</ul>')
footer_lines.append('<p><a href="%ssettings">Unsubscribe</a></p>' %
settings.SITE_URL)
email_html_with_footer = email_html + '\n\n' + '\n'.join(footer_lines)
one_email_task = {
'to': addr,
'subject': subject,
'html': email_html_with_footer
}
return one_email_task
def make_email_tasks(feature, is_update=False, changes=[]):
"""Return a list of task dicts to notify users of feature changes."""
feature_watchers = models.FeatureOwner.all().filter(
'watching_all_features = ', True).fetch(None)
email_html = format_email_body(is_update, feature, changes)
if is_update:
subject = 'updated feature: %s' % feature.name
else:
subject = 'new feature: %s' % feature.name
addr_reasons = collections.defaultdict(list) # [{email_addr: [reason,...]}]
accumulate_reasons(
addr_reasons, feature_watchers,
'You are watching all feature changes')
# There will always be at least one component.
for component_name in feature.blink_components:
component = models.BlinkComponent.get_by_name(component_name)
if not component:
logging.warn('Blink component "%s" not found.'
'Not sending email to subscribers' % component_name)
continue
accumulate_reasons(
addr_reasons, component.owners,
'You are an owner of this feature\'s component')
accumulate_reasons(
addr_reasons, component.subscribers,
'You subscribe to this feature\'s component')
starrers = FeatureStar.get_feature_starrers(feature.key().id())
accumulate_reasons(addr_reasons, starrers, 'You starred this feature')
all_tasks = [convert_reasons_to_task(addr, reasons, email_html, subject)
for addr, reasons in sorted(addr_reasons.items())]
return all_tasks
class PushSubscription(models.DictModel):
subscription_id = db.StringProperty(required=True)
class FeatureStar(models.DictModel):
"""A FeatureStar represent one user's interest in one feature."""
email = db.EmailProperty(required=True)
feature_id = db.IntegerProperty(required=True)
# This is so that we do not sync a bell to a star that the user has removed.
starred = db.BooleanProperty(default=True)
@classmethod
def get_star(self, email, feature_id):
"""If that user starred that feature, return the model or None."""
q = FeatureStar.all()
q.filter('email =', email)
q.filter('feature_id =', feature_id)
return q.get()
@classmethod
def set_star(self, email, feature_id, starred=True):
"""Set/clear a star for the specified user and feature."""
feature_star = self.get_star(email, feature_id)
if not feature_star and starred:
feature_star = FeatureStar(email=email, feature_id=feature_id)
feature_star.put()
elif feature_star and feature_star.starred != starred:
feature_star.starred = starred
feature_star.put()
else:
return # No need to update anything in datastore
feature = models.Feature.get_by_id(feature_id)
feature.star_count += 1 if starred else -1
if feature.star_count < 0:
logging.error('count would be < 0: %r', (email, feature_id, starred))
return
feature.put(notify=False)
@classmethod
def get_user_stars(self, email):
"""Return a list of feature_ids of all features that the user starred."""
q = FeatureStar.all()
q.filter('email =', email)
q.filter('starred =', True)
feature_stars = q.fetch(None)
logging.info('found %d stars for %r', len(feature_stars), email)
feature_ids = [fs.feature_id for fs in feature_stars]
logging.info('returning %r', feature_ids)
return feature_ids
@classmethod
def get_feature_starrers(self, feature_id):
"""Return list of UserPref objects for starrers that want notifications."""
q = FeatureStar.all()
q.filter('feature_id =', feature_id)
q.filter('starred =', True)
feature_stars = q.fetch(None)
logging.info('found %d stars for %r', len(feature_stars), feature_id)
emails = [fs.email for fs in feature_stars]
logging.info('looking up %r', emails)
user_prefs = models.UserPref.get_prefs_for_emails(emails)
user_prefs = [up for up in user_prefs
if up.notify_as_starrer and not up.bounced]
return user_prefs
class FeatureChangeHandler(webapp2.RequestHandler):
"""This task handles a feature creation or update by making email tasks."""
def post(self):
json_body = json.loads(self.request.body)
feature = json_body.get('feature') or None
is_update = json_body.get('is_update') or False
changes = json_body.get('changes') or []
# Email feature subscribers if the feature exists and there were
# actually changes to it.
feature = models.Feature.get_by_id(feature['id'])
if feature and (is_update and len(changes) or not is_update):
email_tasks = make_email_tasks(
feature, is_update=is_update, changes=changes)
for one_email_dict in email_tasks:
payload = json.dumps(one_email_dict)
task = taskqueue.Task(
method='POST', url='/tasks/outbound-email', payload=payload,
target='notifier')
taskqueue.Queue().add(task)
class OutboundEmailHandler(webapp2.RequestHandler):
"""Task to send a notification email to one recipient."""
def post(self):
json_body = json.loads(self.request.body)
to = json_body['to']
subject = json_body['subject']
email_html = json_body['html']
if settings.SEND_ALL_EMAIL_TO:
to_user, to_domain = to.split('@')
to = settings.SEND_ALL_EMAIL_TO % {'user': to_user, 'domain': to_domain}
message = mail.EmailMessage(
sender='Chromestatus <admin@%s.appspotmail.com>' % settings.APP_ID,
to=to, subject=subject, html=email_html)
message.check_initialized()
logging.info('Will send the following email:\n')
logging.info('To: %s', message.to)
logging.info('Subject: %s', message.subject)
logging.info('Body:\n%s', message.html)
if settings.SEND_EMAIL:
message.send()
logging.info('Email sent')
else:
logging.info('Email not sent because of settings.SEND_EMAIL')
class NotificationNewSubscriptionHandler(webapp2.RequestHandler):
def post(self):
json_body = json.loads(self.request.body)
subscription_id = json_body.get('subscriptionId') or None
if subscription_id is None:
return
# Don't add duplicate tokens.
query = PushSubscription.all(keys_only=True).filter(
'subscription_id =', subscription_id)
found_token = query.get()
if found_token is None:
subscription = PushSubscription(subscription_id=subscription_id)
subscription.put()
class SetStarHandler(webapp2.RequestHandler):
"""Handle JSON API requests to set/clear a star."""
def post(self):
"""Stars or unstars a feature for the signed in user."""
json_body = json.loads(self.request.body)
feature_id = json_body.get('featureId')
starred = json_body.get('starred', True)
if type(feature_id) != int:
logging.info('Invalid feature_id: %r', feature_id)
self.abort(400)
feature = models.Feature.get_feature(feature_id)
if not feature:
logging.info('feature not found: %r', feature_id)
self.abort(404)
user = users.get_current_user()
if not user:
logging.info('User must be signed in before starring')
self.abort(400)
FeatureStar.set_star(user.email(), feature_id, starred)
data = {}
self.response.headers['Content-Type'] = 'application/json;charset=utf-8'
result = self.response.write(json.dumps(data, separators=(',',':')))
class GetUserStarsHandler(webapp2.RequestHandler):
"""Handle JSON API requests list all stars for current user."""
def post(self):
"""Returns a list of starred feature_ids for the signed in user."""
# Note: the post body is not used.
user = users.get_current_user()
if user:
feature_ids = FeatureStar.get_user_stars(user.email())
else:
feature_ids = [] # Anon users cannot star features.
data = {
'featureIds': feature_ids,
}
self.response.headers['Content-Type'] = 'application/json;charset=utf-8'
result = self.response.write(json.dumps(data, separators=(',',':')))
class NotificationSubscribeHandler(webapp2.RequestHandler):
def post(self, feature_id=None):
"""Subscribes or unsubscribes a token to a topic."""
json_body = json.loads(self.request.body)
subscription_id = json_body.get('subscriptionId') or None
remove = json_body.get('remove') or False
if subscription_id is None or feature_id is None:
return
data = {}
topic_id = feature_id if feature_id else 'new-feature'
url = ('https://iid.googleapis.com/iid/v1/%s/rel/topics/%s' %
(subscription_id, topic_id))
if remove:
url = 'https://iid.googleapis.com/iid/v1:batchRemove'
data = """{{
"to": "/topics/{topic_id}",
"registration_tokens": ["{token}"]
}}""".format(topic_id=topic_id, token=subscription_id)
result = urlfetch.fetch(url=url, payload=data, method=urlfetch.POST,
headers=get_default_headers())
if result.status_code != 200:
logging.error('Error: subscribing %s to topic: %s' %
(subscription_id, topic_id))
return
class NotificationSendHandler(webapp2.RequestHandler):
def _send_notification_to_feature_subscribers(self, feature, is_update=False):
"""Sends a notification to users when new features are added or updated.
Args:
feature: Feature that was added/modified.
is_update: True if this was an update to the feature. False if
it was newly added.
"""
if not settings.SEND_PUSH_NOTIFICATIONS:
return
feature_id = feature.key().id()
topic_id = feature_id if is_update else 'new-feature'
data = """{{
"notification": {{
"title": "{title}",
"body": "{added_str}. Click here for more information.",
"icon": "/static/img/crstatus_192.png",
"click_action": "https://www.chromestatus.com/feature/{id}"
}},
"to": "/topics/{topic_id}"
}}""".format(title=feature.name, id=feature_id, topic_id=topic_id,
added_str=('Was updated' if is_update else 'New feature added'))
result = urlfetch.fetch(url='https://fcm.googleapis.com/fcm/send',
payload=data, method=urlfetch.POST, headers=get_default_headers())
if result.status_code != 200:
logging.error('Error sending notification to topic %s. %s' %
(topic_id, result.content))
return
def post(self):
json_body = json.loads(self.request.body)
feature = json_body.get('feature') or None
is_update = json_body.get('is_update') or False
changes = json_body.get('changes') or []
# Email feature subscribers if the feature exists and
# there were changes to it.
feature = models.Feature.get_by_id(feature['id'])
if feature and (is_update and len(changes) or not is_update):
self._send_notification_to_feature_subscribers(
feature=feature, is_update=is_update)
class NotificationSubscriptionInfoHandler(webapp2.RequestHandler):
def post(self):
json_body = json.loads(self.request.body)
subscription_id = json_body.get('subscriptionId') or None
if subscription_id is None:
return
url = 'https://iid.googleapis.com/iid/info/%s?details=true' % subscription_id
result = urlfetch.fetch(
url=url, method=urlfetch.GET, headers=get_default_headers())
if result.status_code != 200:
logging.error('Error: fetching info for subscription %s' % subscription_id)
self.response.set_status(400, message=result.content)
self.response.write(result.content)
return
self.response.write(result.content)
class NotificationsListHandler(common.ContentHandler):
def get(self):
subscriptions = PushSubscription.all().fetch(None)
template_data = {
'FIREBASE_SERVER_KEY': settings.FIREBASE_SERVER_KEY,
'subscriptions': json.dumps([s.subscription_id for s in subscriptions])
}
self.render(data=template_data, template_path=os.path.join('admin/notifications/list.html'))
class BouncedEmailHandler(BounceNotificationHandler):
BAD_WRAP_RE = re.compile('=\r\n')
BAD_EQ_RE = re.compile('=3D')
"""Handler to notice when email to given user is bouncing."""
# For docs on AppEngine's bounce email handling, see:
# https://cloud.google.com/appengine/docs/python/mail/bounce
# Source code is in file:
# google_appengine/google/appengine/ext/webapp/mail_handlers.py
def post(self):
try:
super(BouncedEmailHandler, self).post()
except AttributeError:
# Work-around for
# https://code.google.com/p/googleappengine/issues/detail?id=13512
raw_message = self.request.POST.get('raw-message')
logging.info('raw_message %r', raw_message)
raw_message = self.BAD_WRAP_RE.sub('', raw_message)
raw_message = self.BAD_EQ_RE.sub('=', raw_message)
logging.info('fixed raw_message %r', raw_message)
mime_message = email.message_from_string(raw_message)
logging.info('get_payload gives %r', mime_message.get_payload())
self.request.POST['raw-message'] = mime_message
super(BouncedEmail, self).post() # Retry with mime_message
def receive(self, bounce_message):
email_addr = bounce_message.original.get('to')
subject = 'Mail to %r bounced' % email_addr
logging.info(subject)
pref_list = models.UserPref.get_prefs_for_emails([email_addr])
user_pref = pref_list[0]
user_pref.bounced = True
user_pref.put()
# Escalate to someone who might do something about it, e.g.
# find a new owner for a component.
body = ('The following message bounced.\n'
'=================\n'
'From: {from}\n'
'To: {to}\n'
'Subject: {subject}\n\n'
'{text}\n'.format(**bounce_message.original))
logging.info(body)
message = mail.EmailMessage(
sender='Chromestatus <admin@%s.appspotmail.com>' % settings.APP_ID,
to=settings.BOUNCE_ESCALATION_ADDR, subject=subject, body=body)
message.check_initialized()
if settings.SEND_EMAIL:
message.send()
app = webapp2.WSGIApplication([
('/admin/notifications/list', NotificationsListHandler),
('/tasks/email-subscribers', FeatureChangeHandler),
('/tasks/outbound-email', OutboundEmailHandler),
('/tasks/send_notifications', NotificationSendHandler),
('/features/push/new', NotificationNewSubscriptionHandler),
('/features/push/info', NotificationSubscriptionInfoHandler),
('/features/push/subscribe/([0-9]*)', NotificationSubscribeHandler),
('/features/star/set', SetStarHandler),
('/features/star/list', GetUserStarsHandler),
('/_ah/bounce', BouncedEmailHandler),
], debug=settings.DEBUG)
app.error_handlers[404] = common.handle_404
if settings.PROD and not settings.DEBUG:
app.error_handlers[500] = common.handle_500
|
# importing libraries
import tkinter as tk
from tkinter import Message, Text
import cv2
import os
import shutil
import csv
import numpy as np
from PIL import Image, ImageTk
import pandas as pd
import datetime
import time
import tkinter.ttk as ttk
import tkinter.font as font
from pathlib import Path
window = tk.Tk()
window.title("Face_Recogniser")
window.configure(background ='white')
window.grid_rowconfigure(0, weight = 1)
window.grid_columnconfigure(0, weight = 1)
message = tk.Label(
window, text ="Face-Recognition-System",
bg ="green", fg = "white", width = 50,
height = 3, font = ('times', 30, 'bold'))
message.place(x = 200, y = 20)
lbl = tk.Label(window, text = "No.",
width = 20, height = 2, fg ="green",
bg = "white", font = ('times', 15, ' bold ') )
lbl.place(x = 400, y = 200)
txt = tk.Entry(window,
width = 20, bg ="white",
fg ="green", font = ('times', 15, ' bold '))
txt.place(x = 700, y = 215)
lbl2 = tk.Label(window, text ="Name",
width = 20, fg ="green", bg ="white",
height = 2, font =('times', 15, ' bold '))
lbl2.place(x = 400, y = 300)
txt2 = tk.Entry(window, width = 20,
bg ="white", fg ="green",
font = ('times', 15, ' bold ') )
txt2.place(x = 700, y = 315)
# The function beow is used for checking
# whether the text below is number or not ?
def is_number(s):
try:
float(s)
return True
except ValueError:
pass
try:
import unicodedata
unicodedata.numeric(s)
return True
except (TypeError, ValueError):
pass
return False
# Take Images is a function used for creating
# the sample of the images which is used for
# training the model. It takes 60 Images of
# every new user.
def TakeImages():
# Both ID and Name is used for recognising the Image
Id =(txt.get())
name =(txt2.get())
# Checking if the ID is numeric and name is Alphabetical
if(is_number(Id) and name.isalpha()):
# Opening the primary camera if you want to access
# the secondary camera you can mention the number
# as 1 inside the parenthesis
cam = cv2.VideoCapture(0)
# Specifying the path to haarcascade file
harcascadePath = "data\\haarcascade_frontalface_default.xml"
# Creating the classier based on the haarcascade file.
detector = cv2.CascadeClassifier(harcascadePath)
# Initializing the sample number(No. of images) as 0
sampleNum = 0
while(True):
# Reading the video captures by camera frame by frame
ret, img = cam.read()
# Converting the image into grayscale as most of
# the the processing is done in gray scale format
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# It converts the images in different sizes
# (decreases by 1.3 times) and 5 specifies the
# number of times scaling happens
faces = detector.detectMultiScale(gray, 1.3, 5)
# For creating a rectangle around the image
for (x, y, w, h) in faces:
# Specifying the coordinates of the image as well
# as color and thickness of the rectangle.
# incrementing sample number for each image
cv2.rectangle(img, (x, y), (
x + w, y + h), (255, 0, 0), 2)
sampleNum = sampleNum + 1
# saving the captured face in the dataset folder
# TrainingImage as the image needs to be trained
# are saved in this folder
cv2.imwrite(
"TrainingImage\\ "+name +"."+Id +'.'+ str(
sampleNum) + ".jpg", gray[y:y + h, x:x + w])
# display the frame that has been captured
# and drawn rectangle around it.
cv2.imshow('frame', img)
# wait for 100 miliseconds
if cv2.waitKey(100) & 0xFF == ord('q'):
break
# break if the sample number is more than 60
elif sampleNum>60:
break
# releasing the resources
cam.release()
# closing all the windows
cv2.destroyAllWindows()
# Displaying message for the user
res = "Images Saved for ID : " + Id +" Name : "+ name
# Creating the entry for the user in a csv file
row = [Id, name]
with open('UserDetails\\UserDetails.csv', 'a+') as csvFile:
writer = csv.writer(csvFile)
# Entry of the row in csv file
writer.writerow(row)
csvFile.close()
message.configure(text = res)
else:
if(is_number(Id)):
res = "Enter Alphabetical Name"
message.configure(text = res)
if(name.isalpha()):
res = "Enter Numeric Id"
message.configure(text = res)
# Training the images saved in training image folder
def TrainImages():
# Local Binary Pattern Histogram is an Face Recognizer
# algorithm inside OpenCV module used for training the image dataset
recognizer = cv2.face.LBPHFaceRecognizer_create()
# Specifying the path for HaarCascade file
harcascadePath = "data\\haarcascade_frontalface_default.xml"
# creating detector for faces
detector = cv2.CascadeClassifier(harcascadePath)
# Saving the detected faces in variables
faces, Id = getImagesAndLabels("TrainingImage")
# Saving the trained faces and their respective ID's
# in a model named as "trainner.yml".
recognizer.train(faces, np.array(Id))
recognizer.save("TrainingImageLabel\\Trainner.yml")
# Displaying the message
res = "Image Trained"
message.configure(text = res)
def getImagesAndLabels(path):
# get the path of all the files in the folder
imagePaths =[os.path.join(path, f) for f in os.listdir(path)]
faces =[]
# creating empty ID list
Ids =[]
# now looping through all the image paths and loading the
# Ids and the images saved in the folder
for imagePath in imagePaths:
# loading the image and converting it to gray scale
pilImage = Image.open(imagePath).convert('L')
# Now we are converting the PIL image into numpy array
imageNp = np.array(pilImage, 'uint8')
# getting the Id from the image
Id = int(os.path.split(imagePath)[-1].split(".")[1])
# extract the face from the training image sample
faces.append(imageNp)
Ids.append(Id)
return faces, Ids
# For testing phase
def TrackImages():
recognizer = cv2.face.LBPHFaceRecognizer_create()
# Reading the trained model
recognizer.read("TrainingImageLabel\\Trainner.yml")
harcascadePath = "data\\haarcascade_frontalface_default.xml"
faceCascade = cv2.CascadeClassifier(harcascadePath)
# getting the name from "userdetails.csv"
df = pd.read_csv("UserDetails\\UserDetails.csv")
cam = cv2.VideoCapture(0)
font = cv2.FONT_HERSHEY_SIMPLEX
while True:
ret, im = cam.read()
gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
faces = faceCascade.detectMultiScale(gray, 1.2, 5)
for(x, y, w, h) in faces:
cv2.rectangle(im, (x, y), (x + w, y + h), (225, 0, 0), 2)
Id, conf = recognizer.predict(gray[y:y + h, x:x + w])
if(conf < 50):
aa = df.loc[df['Id'] == Id]['Name'].values
tt = str(Id)+"-"+aa
else:
Id ='Unknown'
tt = str(Id)
if(conf > 75):
noOfFile = len(os.listdir("ImagesUnknown"))+1
cv2.imwrite("ImagesUnknown\\Image"+
str(noOfFile) + ".jpg", im[y:y + h, x:x + w])
cv2.putText(im, str(tt), (x, y + h),
font, 1, (255, 255, 255), 2)
cv2.imshow('im', im)
if (cv2.waitKey(1)== ord('q')):
break
cam.release()
cv2.destroyAllWindows()
takeImg = tk.Button(window, text ="Sample",
command = TakeImages, fg ="white", bg ="green",
width = 20, height = 3, activebackground = "Red",
font =('times', 15, ' bold '))
takeImg.place(x = 200, y = 500)
trainImg = tk.Button(window, text ="Training",
command = TrainImages, fg ="white", bg ="green",
width = 20, height = 3, activebackground = "Red",
font =('times', 15, ' bold '))
trainImg.place(x = 500, y = 500)
trackImg = tk.Button(window, text ="Testing",
command = TrackImages, fg ="white", bg ="green",
width = 20, height = 3, activebackground = "Red",
font =('times', 15, ' bold '))
trackImg.place(x = 800, y = 500)
quitWindow = tk.Button(window, text ="Quit",
command = window.destroy, fg ="white", bg ="green",
width = 20, height = 3, activebackground = "Red",
font =('times', 15, ' bold '))
quitWindow.place(x = 1100, y = 500)
window.mainloop()
|
from datetime import datetime
import os
import time
import boto3
import pg8000
import requests
S3_BUCKET_NAME = os.environ.get('S3_BUCKET_NAME', 'busshaming-timetable-dumps')
GTFS_API_KEY = os.environ.get('TRANSPORT_NSW_API_KEY')
FEED_SLUG = 'nsw-buses'
DB_NAME = os.environ.get('DATABASE_NAME')
DB_HOST = os.environ.get('DATABASE_HOST')
DB_PASSWORD = os.environ.get('DATABASE_PASSWORD')
DB_USER = os.environ.get('DATABASE_USER')
DB_PORT = os.environ.get('DATABASE_PORT', 5432)
FETCH_URLS = '''
SELECT ft.id, timetable_url, fetch_last_modified
FROM busshaming_feedtimetable ft
JOIN busshaming_feed f ON (f.id = ft.feed_id)
WHERE f.slug = %s
'''
UPDATE_LMT = '''
UPDATE busshaming_feedtimetable SET fetch_last_modified = %s
WHERE id = %s
'''
def upload_s3(filename, content):
print(filename)
print('Uploading to S3.')
s3 = boto3.resource('s3')
s3.Bucket(S3_BUCKET_NAME).put_object(Key=filename, Body=content)
def main(event, context):
conn = pg8000.connect(database=DB_NAME, user=DB_USER, password=DB_PASSWORD, host=DB_HOST, port=DB_PORT)
cur = conn.cursor()
cur.execute(FETCH_URLS, (FEED_SLUG,))
curtime = datetime.utcnow()
headers = {'Authorization': 'apikey ' + GTFS_API_KEY}
for tf_id, url, last_modified in cur.fetchall():
time.sleep(1)
print(f'Checking {url} ...')
response = requests.head(url, headers=headers)
if response.status_code == 200:
lmt = response.headers['last-modified']
if lmt != last_modified:
print('Downloading...')
response = requests.get(url, headers=headers)
print('Fetching complete.')
if response.status_code == 200:
filename = f'{FEED_SLUG}/{tf_id}/{curtime.isoformat()}.zip'
upload_s3(filename, response.content)
new_lmt = response.headers['last-modified']
cur.execute(UPDATE_LMT, (new_lmt, tf_id))
conn.commit()
else:
print('Fetch failed:')
print(response.status_code)
print(response.content)
else:
print('Head fetch failed:')
print(response.status_code)
print(response.content)
conn.close()
if __name__ == '__main__':
main(None, None)
|
import re
from typing import Any
from meiga import Result, Error, Failure, Success
from petisco.domain.errors.given_input_is_not_valid_error import (
GivenInputIsNotValidError,
)
from petisco.domain.errors.input_exceed_lenght_limit_error import (
InputExceedLengthLimitError,
)
class Name(str):
def __new__(cls, name, length_limit: int = 50):
name = None if name == "None" else name
cls.length_limit = length_limit
return str.__new__(cls, name)
def to_result(self) -> Result[Any, Error]:
name = None if self == "None" else self
if name is not None:
if len(self) > self.length_limit:
return Failure(InputExceedLengthLimitError(message=name))
else:
if not re.search(r"^[a-zA-Z]*(([',. -][a-zA-Z ])?[a-zA-Z]*)*$", name):
return Failure(GivenInputIsNotValidError(message=name))
return Success(name)
|
# coding: utf-8
def sizeof_fmt(num, suffix='B'):
"""
Supports:
all currently known binary prefixes, https://en.wikipedia.org/wiki/Binary_prefix#Specific_units_of_IEC_60027-2_A.2_and_ISO.2FIEC_80000
negative and positive numbers
numbers larger than 1000 Yobibytes
arbitrary units (maybe you like to count in Gibibits!)
"""
for unit in ['','Ki','Mi','Gi','Ti','Pi','Ei','Zi']:
if abs(num) < 1024.0:
return "%3.1f%s%s" % (num, unit, suffix)
num /= 1024.0
return "%.1f%s%s" % (num, 'Yi', suffix)
if __name__ == '__main__':
# '157.4GiB'
sizeof_fmt(168963795964)
|
import os
import sys
import numpy as np
from ..mbase import BaseModel
from ..pakbase import Package
from ..utils import mfreadnam
from .mtbtn import Mt3dBtn
from .mtadv import Mt3dAdv
from .mtdsp import Mt3dDsp
from .mtssm import Mt3dSsm
from .mtrct import Mt3dRct
from .mtgcg import Mt3dGcg
from .mttob import Mt3dTob
from .mtphc import Mt3dPhc
from .mtuzt import Mt3dUzt
from .mtsft import Mt3dSft
from .mtlkt import Mt3dLkt
from ..discretization.structuredgrid import StructuredGrid
from flopy.discretization.modeltime import ModelTime
class Mt3dList(Package):
"""
List package class
"""
def __init__(self, model, extension="list", listunit=7):
# Call ancestor's init to set self.parent, extension, name and
# unit number
Package.__init__(self, model, extension, "LIST", listunit)
# self.parent.add_package(self) This package is not added to the base
# model so that it is not included in get_name_file_entries()
return
def __repr__(self):
return "List package class"
def write_file(self):
# Not implemented for list class
return
"""
class Mt3dms(BaseModel):
'MT3DMS base class'
def __init__(self, modelname='mt3dmstest', namefile_ext='nam',
modflowmodel=None, ftlfilename=None,
model_ws=None, external_path=None, verbose=False,
load=True, listunit=7, exe_name='mt3dms.exe', ):
BaseModel.__init__(self, modelname, namefile_ext, model_ws=model_ws,
exe_name=exe_name)
self.heading = '# Name file for MT3DMS, generated by Flopy.'
self.__mf = modflowmodel
self.lst = Mt3dList(self, listunit=listunit)
self.ftlfilename = ftlfilename
self.__adv = None
self.__btn = None
self.__dsp = None
self.__gcg = None
self.__rct = None
self.__ssm = None
self.array_free_format = False
self.external_path = external_path
self.external = False
self.external_fnames = []
self.external_units = []
self.external_binflag = []
self.load = load
self.__next_ext_unit = 500
if external_path is not None:
if os.path.exists(external_path):
print("Note: external_path " + str(external_path) + \
" already exists")
# assert os.path.exists(external_path),'external_path does not exist'
else:
os.mkdir(external_path)
self.external = True
self.verbose = verbose
return
def __repr__(self):
return 'MT3DMS model'
def get_ncomp(self):
btn = self.get_package('BTN')
if (btn):
return btn.ncomp
else:
return 1
# function to encapsulate next_ext_unit attribute
def next_ext_unit(self):
self.__next_ext_unit += 1
return self.__next_ext_unit
def getadv(self):
if (self.__adv == None):
for p in (self.packagelist):
if isinstance(p, Mt3dAdv):
self.__adv = p
return self.__adv
def getbtn(self):
if (self.__btn == None):
for p in (self.packagelist):
if isinstance(p, Mt3dBtn):
self.__btn = p
return self.__btn
def getdsp(self):
if (self.__dsp == None):
for p in (self.packagelist):
if isinstance(p, Mt3dDsp):
self.__dsp = p
return self.__dsp
def getgcg(self):
if (self.__gcg == None):
for p in (self.packagelist):
if isinstance(p, Mt3dGcg):
self.__gcg = p
return self.__gcg
def getmf(self):
return self.__mf
def getrct(self):
if (self.__rct == None):
for p in (self.packagelist):
if isinstance(p, Mt3dRct):
self.__rct = p
return self.__rct
def getssm(self):
if (self.__ssm == None):
for p in (self.packagelist):
if isinstance(p, Mt3dSsm):
self.__ssm = p
return self.__ssm
def write_name_file(self):
fn_path = os.path.join(self.model_ws, self.namefile)
f_nam = open(fn_path, 'w')
f_nam.write('%s\n' % (self.heading))
f_nam.write('%s %3i %s\n' % (self.lst.name[0], self.lst.unit_number[0],
self.lst.file_name[0]))
if self.ftlfilename is not None:
f_nam.write('%s %3i %s\n' % ('FTL', 39, self.ftlfilename))
f_nam.write('%s' % self.get_name_file_entries())
for u, f in zip(self.external_units, self.external_fnames):
f_nam.write('DATA {0:3d} '.format(u) + f + '\n')
f_nam.close()
adv = property(getadv) # Property has no setter, so read-only
btn = property(getbtn) # Property has no setter, so read-only
dsp = property(getdsp) # Property has no setter, so read-only
gcg = property(getgcg) # Property has no setter, so read-only
mf = property(getmf) # Property has no setter, so read-only
rct = property(getrct) # Property has no setter, so read-only
ssm = property(getssm) # Property has no setter, so read-only
ncomp = property(get_ncomp)
"""
class Mt3dms(BaseModel):
"""
MT3DMS Model Class.
Parameters
----------
modelname : string, optional
Name of model. This string will be used to name the MODFLOW input
that are created with write_model. (the default is 'mt3dtest')
namefile_ext : string, optional
Extension for the namefile (the default is 'nam')
modflowmodel : flopy.modflow.mf.Modflow
This is a flopy Modflow model object upon which this Mt3dms model
is based. (the default is None)
version : string, optional
Version of MT3DMS to use (the default is 'mt3dms').
exe_name : string, optional
The name of the executable to use (the default is
'mt3dms.exe').
listunit : integer, optional
Unit number for the list file (the default is 2).
model_ws : string, optional
model workspace. Directory name to create model data sets.
(default is the present working directory).
external_path : string
Location for external files (default is None).
verbose : boolean, optional
Print additional information to the screen (default is False).
load : boolean, optional
(default is True).
silent : integer
(default is 0)
Attributes
----------
Methods
-------
See Also
--------
Notes
-----
Examples
--------
>>> import flopy
>>> m = flopy.mt3d.mt.Mt3dms()
"""
def __init__(
self,
modelname="mt3dtest",
namefile_ext="nam",
modflowmodel=None,
ftlfilename="mt3d_link.ftl",
ftlfree=False,
version="mt3dms",
exe_name="mt3dms.exe",
structured=True,
listunit=None,
ftlunit=None,
model_ws=".",
external_path=None,
verbose=False,
load=True,
silent=0,
):
# Call constructor for parent object
BaseModel.__init__(
self,
modelname,
namefile_ext,
exe_name,
model_ws,
structured=structured,
verbose=verbose,
)
# Set attributes
self.version_types = {"mt3dms": "MT3DMS", "mt3d-usgs": "MT3D-USGS"}
self.set_version(version.lower())
if listunit is None:
listunit = 16
if ftlunit is None:
ftlunit = 10
self.lst = Mt3dList(self, listunit=listunit)
self.mf = modflowmodel
self.ftlfilename = ftlfilename
self.ftlfree = ftlfree
self.ftlunit = ftlunit
self.free_format = None
# Check whether specified ftlfile exists in model directory; if not,
# warn user
if os.path.isfile(
os.path.join(self.model_ws, str(modelname + "." + namefile_ext))
):
with open(
os.path.join(
self.model_ws, str(modelname + "." + namefile_ext)
)
) as nm_file:
for line in nm_file:
if line[0:3] == "FTL":
ftlfilename = line.strip().split()[2]
break
if ftlfilename is None:
print("User specified FTL file does not exist in model directory")
print("MT3D will not work without a linker file")
else:
if os.path.isfile(os.path.join(self.model_ws, ftlfilename)):
# Check that the FTL present in the directory is of the format
# specified by the user, i.e., is same as ftlfree
# Do this by checking whether the first non-blank character is
# an apostrophe.
# If code lands here, then ftlfilename exists, open and read
# first 4 characters
f = open(os.path.join(self.model_ws, ftlfilename), "rb")
c = f.read(4)
if isinstance(c, bytes):
c = c.decode()
# if first non-blank char is an apostrophe, then formatted,
# otherwise binary
if (c.strip()[0] == "'" and self.ftlfree) or (
c.strip()[0] != "'" and not self.ftlfree
):
pass
else:
msg = (
"Specified value of ftlfree conflicts with FTL "
+ "file format"
)
print(msg)
msg = (
"Switching ftlfree from "
+ "{} ".format(str(self.ftlfree))
+ "to {}".format(str(not self.ftlfree))
)
print(msg)
self.ftlfree = not self.ftlfree # Flip the bool
# external option stuff
self.array_free_format = False
self.array_format = "mt3d"
self.external_fnames = []
self.external_units = []
self.external_binflag = []
self.external = False
self.load = load
# the starting external data unit number
self._next_ext_unit = 2000
if external_path is not None:
# assert model_ws == '.', "ERROR: external cannot be used " + \
# "with model_ws"
# external_path = os.path.join(model_ws, external_path)
if os.path.exists(external_path):
print(
"Note: external_path "
+ str(external_path)
+ " already exists"
)
# assert os.path.exists(external_path),'external_path does not exist'
else:
os.mkdir(external_path)
self.external = True
self.external_path = external_path
self.verbose = verbose
self.silent = silent
# Create a dictionary to map package with package object.
# This is used for loading models.
self.mfnam_packages = {
"btn": Mt3dBtn,
"adv": Mt3dAdv,
"dsp": Mt3dDsp,
"ssm": Mt3dSsm,
"rct": Mt3dRct,
"gcg": Mt3dGcg,
"tob": Mt3dTob,
"phc": Mt3dPhc,
"lkt": Mt3dLkt,
"sft": Mt3dSft,
"uzt2": Mt3dUzt,
}
return
def __repr__(self):
return "MT3DMS model"
@property
def modeltime(self):
# build model time
data_frame = {
"perlen": self.mf.dis.perlen.array,
"nstp": self.mf.dis.nstp.array,
"tsmult": self.mf.dis.tsmult.array,
}
self._model_time = ModelTime(
data_frame,
self.mf.dis.itmuni_dict[self.mf.dis.itmuni],
self.dis.start_datetime,
self.dis.steady.array,
)
return self._model_time
@property
def modelgrid(self):
if not self._mg_resync:
return self._modelgrid
if self.btn is not None:
ibound = self.btn.icbund.array
delc = self.btn.delc.array
delr = self.btn.delr.array
top = self.btn.htop.array
botm = np.subtract(top, self.btn.dz.array.cumsum(axis=0))
nlay = self.btn.nlay
else:
delc = self.mf.dis.delc.array
delr = self.mf.dis.delr.array
top = self.mf.dis.top.array
botm = self.mf.dis.botm.array
nlay = self.mf.nlay
if self.mf.bas6 is not None:
ibound = self.mf.bas6.ibound.array
else:
ibound = None
# build grid
self._modelgrid = StructuredGrid(
delc=delc,
delr=delr,
top=top,
botm=botm,
idomain=ibound,
proj4=self._modelgrid.proj4,
epsg=self._modelgrid.epsg,
xoff=self._modelgrid.xoffset,
yoff=self._modelgrid.yoffset,
angrot=self._modelgrid.angrot,
nlay=nlay,
)
# resolve offsets
xoff = self._modelgrid.xoffset
if xoff is None:
if self._xul is not None:
xoff = self._modelgrid._xul_to_xll(self._xul)
else:
xoff = self.mf._modelgrid.xoffset
if xoff is None:
# incase mf._modelgrid.xoffset is not set but mf._xul is
if self.mf._xul is not None:
xoff = self._modelgrid._xul_to_xll(self.mf._xul)
else:
xoff = 0.0
yoff = self._modelgrid.yoffset
if yoff is None:
if self._yul is not None:
yoff = self._modelgrid._yul_to_yll(self._yul)
else:
yoff = self.mf._modelgrid.yoffset
if yoff is None:
# incase mf._modelgrid.yoffset is not set but mf._yul is
if self.mf._yul is not None:
yoff = self._modelgrid._yul_to_yll(self.mf._yul)
else:
yoff = 0.0
proj4 = self._modelgrid.proj4
if proj4 is None:
proj4 = self.mf._modelgrid.proj4
epsg = self._modelgrid.epsg
if epsg is None:
epsg = self.mf._modelgrid.epsg
angrot = self._modelgrid.angrot
if angrot is None or angrot == 0.0: # angrot normally defaulted to 0.0
if self.mf._modelgrid.angrot is not None:
angrot = self.mf._modelgrid.angrot
else:
angrot = 0.0
self._modelgrid.set_coord_info(xoff, yoff, angrot, epsg, proj4)
self._mg_resync = not self._modelgrid.is_complete
return self._modelgrid
@property
def solver_tols(self):
if self.gcg is not None:
return self.gcg.cclose, -999
return None
@property
def sr(self):
if self.mf is not None:
return self.mf.sr
return None
@property
def nlay(self):
if self.btn:
return self.btn.nlay
else:
return 0
@property
def nrow(self):
if self.btn:
return self.btn.nrow
else:
return 0
@property
def ncol(self):
if self.btn:
return self.btn.ncol
else:
return 0
@property
def nper(self):
if self.btn:
return self.btn.nper
else:
return 0
@property
def ncomp(self):
if self.btn:
return self.btn.ncomp
else:
return 1
@property
def mcomp(self):
if self.btn:
return self.btn.mcomp
else:
return 1
def get_nrow_ncol_nlay_nper(self):
if self.btn:
return self.btn.nrow, self.btn.ncol, self.btn.nlay, self.btn.nper
else:
return 0, 0, 0, 0
# Property has no setter, so read-only
nrow_ncol_nlay_nper = property(get_nrow_ncol_nlay_nper)
def write_name_file(self):
"""
Write the name file.
"""
fn_path = os.path.join(self.model_ws, self.namefile)
f_nam = open(fn_path, "w")
f_nam.write("{}\n".format(self.heading))
f_nam.write(
"{:14s} {:5d} {}\n".format(
self.lst.name[0],
self.lst.unit_number[0],
self.lst.file_name[0],
)
)
if self.ftlfilename is not None:
ftlfmt = ""
if self.ftlfree:
ftlfmt = "FREE"
f_nam.write(
"{:14s} {:5d} {} {}\n".format(
"FTL", self.ftlunit, self.ftlfilename, ftlfmt
)
)
# write file entries in name file
f_nam.write("{}".format(self.get_name_file_entries()))
# write the external files
for u, f in zip(self.external_units, self.external_fnames):
f_nam.write("DATA {0:5d} ".format(u) + f + "\n")
# write the output files
for u, f, b in zip(
self.output_units, self.output_fnames, self.output_binflag
):
if u == 0:
continue
if b:
f_nam.write(
"DATA(BINARY) {0:5d} ".format(u) + f + " REPLACE\n"
)
else:
f_nam.write("DATA {0:5d} ".format(u) + f + "\n")
f_nam.close()
return
def load_results(self, **kwargs):
return
@classmethod
def load(
cls,
f,
version="mt3dms",
exe_name="mt3dms.exe",
verbose=False,
model_ws=".",
load_only=None,
forgive=False,
modflowmodel=None,
):
"""
Load an existing model.
Parameters
----------
f : string
Full path and name of MT3D name file.
version : string
The version of MT3D (mt3dms, or mt3d-usgs)
(default is mt3dms)
exe_name : string
The name of the executable to use if this loaded model is run.
(default is mt3dms.exe)
verbose : bool
Write information on the load process if True.
(default is False)
model_ws : string
The path for the model workspace.
(default is the current working directory '.')
load_only : list of strings
Filetype(s) to load (e.g. ['btn', 'adv'])
(default is None, which means that all will be loaded)
forgive : bool, optional
Option to raise exceptions on package load failure, which can be
useful for debugging. Default False.
modflowmodel : flopy.modflow.mf.Modflow
This is a flopy Modflow model object upon which this Mt3dms
model is based. (the default is None)
Returns
-------
mt : flopy.mt3d.mt.Mt3dms
flopy Mt3d model object
Notes
-----
The load method does not retain the name for the MODFLOW-generated
FTL file. This can be added manually after the MT3D model has been
loaded. The syntax for doing this manually is
mt.ftlfilename = 'example.ftl'
Examples
--------
>>> import flopy
>>> f = 'example.nam'
>>> mt = flopy.mt3d.mt.Mt3dms.load(f)
>>> mt.ftlfilename = 'example.ftl'
"""
modelname, ext = os.path.splitext(f)
modelname_extension = ext[1:] # without '.'
if verbose:
sys.stdout.write(
"\nCreating new model with name: {}\n{}\n\n".format(
modelname, 50 * "-"
)
)
mt = cls(
modelname=modelname,
namefile_ext=modelname_extension,
version=version,
exe_name=exe_name,
verbose=verbose,
model_ws=model_ws,
modflowmodel=modflowmodel,
)
files_successfully_loaded = []
files_not_loaded = []
# read name file
namefile_path = os.path.join(mt.model_ws, f)
if not os.path.isfile(namefile_path):
raise IOError("cannot find name file: " + str(namefile_path))
try:
ext_unit_dict = mfreadnam.parsenamefile(
namefile_path, mt.mfnam_packages, verbose=verbose
)
except Exception as e:
# print("error loading name file entries from file")
# print(str(e))
# return None
raise Exception(
"error loading name file entries from file:\n" + str(e)
)
if mt.verbose:
print(
"\n{}\nExternal unit dictionary:\n{}\n{}\n".format(
50 * "-", ext_unit_dict, 50 * "-"
)
)
# reset unit number for list file
unitnumber = None
for key, value in ext_unit_dict.items():
if value.filetype == "LIST":
unitnumber = key
filepth = os.path.basename(value.filename)
if unitnumber == "LIST":
unitnumber = 16
if unitnumber is not None:
mt.lst.unit_number = [unitnumber]
mt.lst.file_name = [filepth]
# set ftl information
unitnumber = None
for key, value in ext_unit_dict.items():
if value.filetype == "FTL":
unitnumber = key
filepth = os.path.basename(value.filename)
if unitnumber == "FTL":
unitnumber = 10
if unitnumber is not None:
mt.ftlunit = unitnumber
mt.ftlfilename = filepth
# load btn
btn = None
btn_key = None
for key, item in ext_unit_dict.items():
if item.filetype.lower() == "btn":
btn = item
btn_key = key
break
if btn is None:
return None
try:
pck = btn.package.load(
btn.filename, mt, ext_unit_dict=ext_unit_dict
)
except Exception as e:
raise Exception("error loading BTN: {0}".format(str(e)))
files_successfully_loaded.append(btn.filename)
if mt.verbose:
sys.stdout.write(
" {:4s} package load...success\n".format(pck.name[0])
)
ext_unit_dict.pop(btn_key).filehandle.close()
ncomp = mt.btn.ncomp
# reserved unit numbers for .ucn, s.ucn, .obs, .mas, .cnf
poss_output_units = set(
list(range(201, 201 + ncomp))
+ list(range(301, 301 + ncomp))
+ list(range(401, 401 + ncomp))
+ list(range(601, 601 + ncomp))
+ [17]
)
if load_only is None:
load_only = []
for key, item in ext_unit_dict.items():
load_only.append(item.filetype)
else:
if not isinstance(load_only, list):
load_only = [load_only]
not_found = []
for i, filetype in enumerate(load_only):
filetype = filetype.upper()
if filetype != "BTN":
load_only[i] = filetype
found = False
for key, item in ext_unit_dict.items():
if item.filetype == filetype:
found = True
break
if not found:
not_found.append(filetype)
if len(not_found) > 0:
raise Exception(
"the following load_only entries were not found "
"in the ext_unit_dict: " + ",".join(not_found)
)
# try loading packages in ext_unit_dict
for key, item in ext_unit_dict.items():
if item.package is not None:
if item.filetype in load_only:
if forgive:
try:
pck = item.package.load(
item.filehandle,
mt,
ext_unit_dict=ext_unit_dict,
)
files_successfully_loaded.append(item.filename)
if mt.verbose:
sys.stdout.write(
" {:4s} package load...success\n".format(
pck.name[0]
)
)
except BaseException as o:
if mt.verbose:
sys.stdout.write(
" {:4s} package load...failed\n {!s}\n".format(
item.filetype, o
)
)
files_not_loaded.append(item.filename)
else:
pck = item.package.load(
item.filehandle, mt, ext_unit_dict=ext_unit_dict
)
files_successfully_loaded.append(item.filename)
if mt.verbose:
sys.stdout.write(
" {:4s} package load...success\n".format(
pck.name[0]
)
)
else:
if mt.verbose:
sys.stdout.write(
" {:4s} package load...skipped\n".format(
item.filetype
)
)
files_not_loaded.append(item.filename)
elif "data" not in item.filetype.lower():
files_not_loaded.append(item.filename)
if mt.verbose:
sys.stdout.write(
" {:4s} package load...skipped\n".format(
item.filetype
)
)
elif "data" in item.filetype.lower():
if mt.verbose:
sys.stdout.write(
" {} file load...skipped\n {}\n".format(
item.filetype, os.path.basename(item.filename)
)
)
if key in poss_output_units:
# id files specified to output unit numbers and allow to
# pass through
mt.output_fnames.append(os.path.basename(item.filename))
mt.output_units.append(key)
mt.output_binflag.append("binary" in item.filetype.lower())
elif key not in mt.pop_key_list:
mt.external_fnames.append(item.filename)
mt.external_units.append(key)
mt.external_binflag.append(
"binary" in item.filetype.lower()
)
mt.external_output.append(False)
# pop binary output keys and any external file units that are now
# internal
for key in mt.pop_key_list:
try:
mt.remove_external(unit=key)
item = ext_unit_dict.pop(key)
if hasattr(item.filehandle, "close"):
item.filehandle.close()
except KeyError:
if mt.verbose:
msg = (
"\nWARNING:\n External file unit "
+ "{} does not exist in ext_unit_dict.\n".format(key)
)
sys.stdout.write(msg)
# write message indicating packages that were successfully loaded
if mt.verbose:
print(1 * "\n")
s = " The following {0} packages were successfully loaded.".format(
len(files_successfully_loaded)
)
print(s)
for fname in files_successfully_loaded:
print(" " + os.path.basename(fname))
if len(files_not_loaded) > 0:
s = " The following {0} packages were not loaded.".format(
len(files_not_loaded)
)
print(s)
for fname in files_not_loaded:
print(" " + os.path.basename(fname))
print("\n")
# return model object
return mt
@staticmethod
def load_mas(fname):
"""
Load an mt3d mas file and return a numpy recarray
Parameters
----------
fname : str
name of MT3D mas file
Returns
-------
r : np.ndarray
"""
if not os.path.isfile(fname):
raise Exception("Could not find file: {}".format(fname))
dtype = [
("time", float),
("total_in", float),
("total_out", float),
("sources", float),
("sinks", float),
("fluid_storage", float),
("total_mass", float),
("error_in-out", float),
("error_alt", float),
]
r = np.loadtxt(fname, skiprows=2, dtype=dtype)
r = r.view(np.recarray)
return r
@staticmethod
def load_obs(fname):
"""
Load an mt3d obs file and return a numpy recarray
Parameters
----------
fname : str
name of MT3D obs file
Returns
-------
r : np.ndarray
"""
firstline = "STEP TOTAL TIME LOCATION OF OBSERVATION POINTS (K,I,J)"
dtype = [("step", int), ("time", float)]
nobs = 0
obs = []
if not os.path.isfile(fname):
raise Exception("Could not find file: {}".format(fname))
with open(fname, "r") as f:
line = f.readline()
if line.strip() != firstline:
msg = "First line in file must be \n{}\nFound {}".format(
firstline, line.strip()
)
msg += (
"\n{} does not appear to be a valid MT3D OBS file".format(
fname
)
)
raise Exception(msg)
# Read obs names (when break, line will have first data line)
nlineperrec = 0
while True:
line = f.readline()
if line[0:7].strip() == "1":
break
nlineperrec += 1
ll = line.strip().split()
while len(ll) > 0:
k = int(ll.pop(0))
i = int(ll.pop(0))
j = int(ll.pop(0))
obsnam = "({}, {}, {})".format(k, i, j)
if obsnam in obs:
obsnam += str(len(obs) + 1) # make obs name unique
obs.append(obsnam)
icount = 0
r = []
while True:
ll = []
for n in range(nlineperrec):
icount += 1
if icount > 1:
line = f.readline()
ll.extend(line.strip().split())
if not line:
break
rec = [int(ll[0])]
for val in ll[1:]:
rec.append(float(val))
r.append(tuple(rec))
# add obs names to dtype
for nameob in obs:
dtype.append((nameob, float))
r = np.array(r, dtype=dtype)
r = r.view(np.recarray)
return r
|
from copy import deepcopy
from math import floor
import re
from pprint import PrettyPrinter
from patterns import PatternBehavior
from monsters import get_monster, alias_monster
from sidebar import get_sidebar
from bibliography import get_citation
from magicitems import get_magicitem
from spells import get_spell
from npc import get_npc
from advutils import *
from logger import *
class InlineTextProcesser(PatternBehavior):
def citation(self, cite):
if cite.strip() not in self.bibenteries:
self.bibenteries.append(cite)
self.bibliography.append(get_citation(cite))
def cash(self, pieces):
entry = pieces[0].strip()
entry = entry[0].upper() + entry[1:]
try:
self.cash.append([entry, float(pieces[2].strip())])
self.totalcash += float(pieces[2])
except Exception as exp:
raise Exception("Expected a number at end of loot tag.")
def rev_cash(self, pieces):
entry = pieces[2].strip()
entry = entry[0].upper() + entry[1:]
try:
self.cash.append([entry, float(pieces[0].strip())])
self.totalcash += float(pieces[0])
except Exception as exp:
raise Exception("Expected a number at start of gpfirst tag.")
def noshowcash(self, pieces):
entry = pieces[0].strip()
entry = entry[0].upper() + entry[1:]
try:
self.cash.append([entry, float(pieces[1].strip())])
#self.totalcash += format_cash(pieces[1])
self.totalcash += float(pieces[1])
except Exception as exp:
raise Exception("Expected a number at end of noshowloot tag.")
def npc_ref(self, name):
name = name.strip()
if name not in self.npcsenteries:
self.npcsenteries.append(name)
npc = get_npc(name)
self.npcs.append(npc)
def quest_xp(self, pieces):
tag = pieces[0].strip()
xp = int(pieces[1].strip())
self.quests.append([tag,xp])
self.totalquestxp = self.totalquestxp + xp
def imagehere(self,pieces):
ifile = pieces.strip()
self.artwork.append(ifile)
def add_label(self,pieces):
self.exlabels.append(pieces.strip())
def chapref(self,pieces):
self.chaprefs.append(pieces.strip())
def stref(self,pieces):
self.strefs.append(pieces.strip())
def encref(self,pieces):
self.encrefs.append(pieces.strip())
def appref(self,pieces):
self.apprefs.append(pieces.strip())
def exref(self,pieces):
self.exrefs.append(pieces.strip())
def mundane(self, pieces):
entry = pieces.strip()
entry = entry[0].upper() + entry[1:]
self.mundane.append(entry)
def bold(self, pieces):
self.bold_count += 1
def italic(self, pieces):
self.it_count += 1
def lstmarker(self, pieces):
self.list_count += 1
def numlist(self, pieces):
self.numlist_count += 1
def add_table(self, pieces):
if self.table_open:
raise Exception("Error: Tables may not be nested.")
self.table_open = True
def end_table(self, pieces):
if not self.table_open:
raise Exception("Error: Attempt to close a table when no table open.\n Starting a table requires alignment specs and a table name.")
self.table_open = False
def __init__(self):
PatternBehavior.__init__(self,[
['/b/', self.bold],
['/i/', self.italic],
['/l/', self.lstmarker],
['/nl/', self.numlist],
['/table ([clrCLR]+):([A-Za-z0-9\'\-\,\. \;:]*)+/', self.add_table],
['/table/', self.end_table],
['\[\[citation:\W*(\w*)\]\]', self.citation],
['\[\[imagehere:([A-Za-z0-9\'\-\,\. \:]*)\]\]', self.imagehere],
['\[\[label:([A-Za-z0-9\'\-\, \:\.]*)\]\]', self.add_label],
['\[\[encounter:([A-Za-z0-9\'\-\,\. \:]*)\]\]', self.encref],
['\[\[storyaward:([A-Za-z0-9\'\-\,\. \:]*)\]\]', self.stref],
['\[\[chapter:([A-Za-z0-9\'\-\,\. \:]*)\]\]', self.chapref],
['\[\[appendix:([A-Za-z0-9\'\-\,\. \:]*)\]\]', self.appref],
['\[\[reference:([A-Za-z0-9\'\-\,\. \:]*)\]\]', self.exref],
['\[\[loot:([A-Za-z0-9\-\(\)\/\' ]+):([A-Za-z0-9\-\(\)\/\' ]+)*:\W?(\d*\.?\d*)\]\]', self.cash],
['\[\[gpfirst:\W?(\d*\.?\d*):([A-Za-z0-9\-\(\)\/\' ]+):([A-Za-z0-9\-\(\)\/\' ]+)*\]\]', self.rev_cash],
['\[\[noshowloot:\W*([A-Za-z0-9\-\(\)\/\' ]+):\W*(\d*\.?\d*)\]\]', self.noshowcash],
['\[\[NPC:([A-Za-z0-9\'\-\,\. \:]*)\]\]', self.npc_ref],
['\[\[xp:([A-Za-z0-9\'\-\,\. ]*):\W*(\d*)\]\]', self.quest_xp],
['\[\[mundane:([A-Za-z0-9\'\-\,\. \:]*)\]\]', self.mundane]
], False)
self.bibenteries = []
self.bibliography = []
self.npcs = []
self.npcsenteries = []
self.cash = []
self.quests = []
self.totalcash = 0.0
self.totalquestxp = 0
self.artwork = []
self.exlabel = []
self.chaprefs = []
self.strefs = []
self.encrefs = []
self.apprefs = []
self.exlabels = []
self.exrefs = []
self.bold_count = 0
self.it_count = 0
self.table_open = False
self.list_count = 0
self.numlist_count = 0
self.mundane = []
class SubDocument:
def __init__(self, name):
self.docline = []
self.variables = {'name' : name.strip()}
self.txttype = 0
self.inpara = False
self.inlist = False
self.name = name
def start_paragraph(self):
self.docline.append({
'category' : 'paragraph',
'text' : ''
})
self.inpara = True
self.inlist = False
def endparagraph(self):
self.inpara = False
self.inlist = False
def set_txttype(self,nxttype, header=None):
if (nxttype == self.txttype): return
if self.txttype > 0:
self.docline.append({
'category' : 'txttype_end',
'txttype' : self.txttype
})
if nxttype > 0:
self.docline.append({
'category' : 'txttype_begin',
'txttype' : nxttype,
'header' : header
})
self.txttype = nxttype
self.inpara = False
self.inlist = False
# callwith add_bulletpoint('my bullettext', 0/1)
def add_bulletpoint(self, point, typ):
self.set_txttype(0)
if self.inlist == False:
self.docline.append({
'category' : 'bulletlist',
'bullets' : [point],
'btyp' : typ
})
self.inlist = True
self.inpara = False
else:
self.docline[-1]['bullets'].append(point)
def append_bullet(self, txt):
self.docline[-1]['bullets'][-1] += ' ' + txt
def add_line(self,line,typ):
self.set_txttype(typ)
if not self.inpara: self.start_paragraph()
else: self.docline[-1]['text'] += ' '
self.docline[-1]['text'] += line
def add_header(self, txt, depth):
self.set_txttype(0)
self.endparagraph()
self.docline.append({
'category' : 'header',
'depth' : depth,
'text' : txt
})
def add_table(self, tab):
self.set_txttype(0)
self.endparagraph()
self.docline.append({
'category' : 'table',
'table' : tab
})
def docvariable(self, key, value):
self.variables[key] = value
def label(self):
return format_label(self.name)
def close(self):
self.set_txttype(0)
self.endparagraph()
def contents(self):
self.close()
for l in self.docline:
if l['category'] == 'table':
l['table'] = l['table'].contents()
return {
'variables' : self.variables,
'text' : self.docline,
'name' : self.name,
}
def special(self, pieces):
print pieces
raise Exception('Special processing line used out of context.')
ENCOUNTER_DIFFICULTY_LIST = ['Vweak', 'Weak', 'Normal', 'Strong', 'Vstrong']
MONSTER_ENTRY_PATTERN = re.compile(" ?(\d\d?)? ?(.*)")
def max_figures(flst, elst):
nlst = deepcopy(flst)
for fig in elst.keys():
if fig in nlst:
nlst[fig] = max(nlst[fig],elst[fig])
else:
nlst[fig] = elst[fig]
return nlst
class Spellbook(SubDocument):
def __init__(self, name):
SubDocument.__init__(self,name)
self.splst = [[],[],[],[],[],[],[],[],[]]
def contents(self):
cnt = SubDocument.contents(self)
for lst in self.splst:
lst.sort()
cnt['spells'] = self.splst
return cnt
def add_spell(self, spname):
sp = get_spell(spname);
lvl = sp['level']
lvl -= 1
self.splst[lvl].append(sp['name'])
def special(self, pieces):
if pieces[0] != 'spells' :
raise Exception("Invalid - line in spell book %s." % self.name)
sps = pieces[1].split(",")
for s in sps:
self.add_spell(s.strip())
class Encounter(SubDocument):
#encounter_table = {difficulty : normal, totalxp : 0, roster: [{name : foo, number: 1, xp : 10}]}
def contents(self):
cnt = SubDocument.contents(self)
if self.reorder:
cnt['stat_blocks'] = sorted(self.stat_blocks, key=lambda sb: sb['blocklength'])
else:
cnt['stat_blocks'] = self.stat_blocks
cnt['encounter_table'] = self.encounter_table
cnt['sidebars'] = [get_sidebar(sb) for sb in self.sidebars]
cnt['label'] = self.label()
cnt['map'] = self.map
cnt['evenstart'] = self.evenstart
cnt['extra_critters'] = self.extra_critters
return cnt
def encounter_difficulty(self, difficulty, mlst):
difficulty = difficulty.strip().capitalize()
if difficulty not in ENCOUNTER_DIFFICULTY_LIST and difficulty != 'Noxp':
raise Exception("Encounter difficulty must be one of %s." % ", ".join(ENCOUNTER_DIFFICULTY_LIST))
totalxp = 0
roster = []
clist = mlst.split(",")
diff_figs = {}
sprevbar = None
for crit in clist:
critter = crit.strip()
mo = MONSTER_ENTRY_PATTERN.match(critter).groups()
num = int(mo[0]) if mo[0] else 1
mname = mo[1]
sblock = get_monster(mname)
print_name = sblock['name'] #get normalize name
xp = sblock['xp']
totalxp += xp * num
roster.append({'name' : print_name, 'number' : num, 'xp' : xp})
diff_figs[print_name] = num
if mname not in self.appearing:
self.appearing.append(mname)
self.stat_blocks.append(sblock)
if 'sidebar' in sblock:
sb = sblock['sidebar']
if sb not in self.sidebars:
self.sidebars.append(sb)
if difficulty != 'Noxp':
self.encounter_table.append({
"difficulty" : difficulty,
"totalxp" : totalxp,
"roster" : roster
})
if difficulty == 'Noxp':
self.extra_critters += roster
self.figures = max_figures(self.figures, diff_figs)
def special(self, pieces):
if pieces[0] == 'map':
self.map = pieces[1].strip()
elif pieces[0] == 'order':
tmp = []
for k in pieces[1].split(','):
split_block = False
k = k.strip()
if k[0] == '!':
k = k[1:]
split_block = True
for st in self.stat_blocks:
if k == st['name']:
dup = deepcopy(st)
dup['split_block'] = split_block
tmp.append(dup)
self.stat_blocks = tmp
self.reorder = False
else:
self.encounter_difficulty(pieces[0],pieces[1])
def __init__(self, name):
SubDocument.__init__(self, name)
self.encounter_table = []
self.appearing = []
self.stat_blocks = []
self.figures = {}
self.sidebars = []
self.reorder = True
self.evenstart = True
self.map = None
self.extra_critters = []
class Chapter(SubDocument):
def contents(self):
cnt = SubDocument.contents(self)
enc_contents = []
for en in self.encounters:
enc_contents.append(en.contents())
cnt['encounters'] = enc_contents
cnt['label'] = self.label()
return cnt
def new_encounter(self, name):
if len(self.encounters): self.encounters[-1].close()
enc = Encounter(name)
self.encounters.append(enc)
return enc
def __init__(self, name):
SubDocument.__init__(self, name)
self.encounters = []
class StoryAward(SubDocument):
def __init__(self, name):
SubDocument.__init__(self, name)
def switch_part(self):
self.close()
self.dmtext = self.docline
self.docline = []
def special(self, pieces):
self.switch_part()
def contents(self):
cnt = SubDocument.contents(self)
cnt['text2'] = self.dmtext
cnt['label'] = "story_" + self.label()
return cnt
class MagicItem(SubDocument):
def value_by_rarity(self,rarity):
if rarity == 'Common':
return 50
elif rarity == 'Uncommon':
return 500
elif rarity == 'Rare':
return 5000
elif rarity == 'Very Rare':
return 50000
elif rarity == 'Legendary':
return 150000
elif rarity == 'Unique':
return 500000
def adjust_scroll(self,it):
sp = it['spell']
level = sp['level']
addline = ''
scrollentry = sp['name']
if 'level' in self.variables and self.variables['level'] > sp['level']:
level = int(self.variables['level'])
addline = 'When used, this scroll casts @spell as a level %d spell. ' % level
scrollentry += ' (at %d level)' % level
if level < 3 : dc = 13
elif level < 5 : dc = 15
elif level < 7 : dc = 17
elif level < 9 : dc = 18
else : dc = 19
tohit = dc - 8
if level < 2 : it['rarity'] = 'Common'
elif level < 4 : it['rarity'] = 'Uncommon'
elif level < 6 : it['rarity'] = 'Rare'
elif level < 9 : it['rarity'] = 'Very Rare'
else : it['rarity'] = 'Legendary'
if 'mechanism' in sp:
if sp['mechanism'] == 'attack' or sp['mechanism'] == 'both' :
addline += 'The attack bonus when you use this scroll is +%d.' % tohit
if sp['mechanism'] == 'save' or sp['mechanism'] == 'both':
addline += 'The save DC when you use this scroll is %d.' % dc
if addline != '':
it['description'] += '!!'
it['description'] += addline
it['scrolltag'] = scrollentry
return it
def spawn_item(self):
rarity = ['Common', 'Uncommon', 'Rare', 'Very Rare', 'Legendary']
key = self.variables['item'] if 'item' in self.variables else 'default'
it = get_magicitem(key)
if it['spell'] :
self.variables['spell'] = it['spell']
if 'spell' in self.variables:
it['spell'] = get_spell(self.variables['spell'])
if key == 'Spell Scroll' : it = self.adjust_scroll(it)
for n in it['need']:
if n not in self.variables:
raise Exception("Item %s needs variable %s set." % (key, n))
rep = '@' + n
it['name'] = it['name'].replace(rep,self.variables[n].capitalize())
it['description'] = it['description'].replace(rep,self.variables[n].lower())
it['category'] = it['category'].replace(rep,self.variables[n].capitalize())
if 'fg_subtype' in it:
it['fg_subtype'] = it['fg_subtype'].replace(rep,self.variables[n].capitalize())
it['text2'] = deepcopy(self.docline)
dl = []
if 'description' in it and it['description']:
dl = self.docline
self.docline = []
self.add_line(it['description'],0)
self.endparagraph()
self.docline = self.docline + dl
if it['rarity'] == 'byplus':
it['rarity'] = rarity[int(self.variables['plus'])]
elif it['rarity'] == 'rarebyplus':
it['rarity'] = rarity[int(self.variables['plus'])+1]
if 'called' in self.variables:
it['name'] = self.variables['called']
if 'available' not in self.variables: self.variables['available'] = 1
if 'attuneby' not in it: it['attuneby'] = None
it['available'] = int(self.variables['available'])
it['castvalue'] = self.value_by_rarity(it['rarity'])
return it
def contents(self):
it = self.spawn_item()
cnt = SubDocument.contents(self)
cnt.update(it)
return cnt
def special(self, pieces):
self.variables[pieces[0]] = pieces[1].strip()
def __init__(self, name):
SubDocument.__init__(self, name)
class MainDocument(SubDocument):
def __init__(self):
SubDocument.__init__(self,'Main')
self.chapters = []
self.appendix = []
self.storyawards = []
self.magicitems = []
self.monsters = []
self.spellbooks = []
self.variables = {
'title' : 'Set the title variable',
'code' : 'CCC-Unknown',
'tier' : 1,
'playtime' : 2
}
def contents(self):
cnt = SubDocument.contents(self)
cnt['chapters'] = [c.contents() for c in self.chapters]
cnt['figures'] = self.module_figure_list()
cnt['xpdata'] = self.xp_for_diffculty_table()
cnt['storyawards'] = [s.contents() for s in self.storyawards]
mtmp = [m.contents() for m in self.magicitems]
cnt['magicitems'] = []
cnt['consumables'] = []
cnt['scrolls'] = []
for m in mtmp:
m['variables']['advtitle'] = self.variables['title']
m['variables']['code'] = self.variables['code']
if 'scrolltag' in m:
cnt['scrolls'].append(m)
elif m['consumable']:
cnt['consumables'].append(m)
else:
cnt['magicitems'].append(m)
if len(cnt['scrolls']) == 1: #treat 1 scroll as an ordinary consumable
cnt['consumables'] += cnt['scrolls']
cnt['scrolls'] = []
cnt['extra_monsters'] = self.monsters
cnt['appendix'] = self.appendix
cnt['spellbooks'] = [s.contents() for s in self.spellbooks]
return cnt
def new_chapter(self, name):
if len(self.chapters): self.chapters[-1].close()
chp = Chapter(name)
self.chapters.append(chp)
return chp
def new_storyaward(self,name):
if len(self.storyawards): self.storyawards[-1].close()
stawd = StoryAward(name)
self.storyawards.append(stawd)
return stawd
def new_spellbook(self,name):
if len(self.spellbooks): self.spellbooks[-1].close()
book = Spellbook(name)
self.spellbooks.append(book)
return book
def new_magicitem(self,name):
if len(self.magicitems): self.magicitems[-1].close()
mi = MagicItem(name)
self.magicitems.append(mi)
return mi
def module_figure_list(self):
mod_figs = {}
for chp in self.chapters:
for enc in chp.encounters:
mod_figs = max_figures(mod_figs, enc.figures)
figs = []
for f in sorted(mod_figs.keys()):
figs.append([f,mod_figs[f]])
return figs
def add_monster(self, monster):
self.monsters.append(get_monster(monster))
def add_appendix_image(self, nm, image):
self.appendix.append({
'name' : nm.strip(),
'image' : image.strip(),
'label' : format_label(nm)
})
def xp_for_diffculty_table(self):
has_diff = {}
for diff in ENCOUNTER_DIFFICULTY_LIST:
has_diff[diff] = False
for chp in self.chapters:
for enc in chp.encounters:
for diff in enc.encounter_table:
has_diff[diff['difficulty']] = True
#has_diff now tell what difficulties present in adventure
cnt_diff = 1
for key in ENCOUNTER_DIFFICULTY_LIST:
if has_diff[key]:
has_diff[key] = cnt_diff
cnt_diff += 1
#has_diff now maps difficulty to row
xp_for_encounter_by_difficult = []
totalxp = ['Total XP'] + [0,0,0,0,0][:cnt_diff-1]
for chp in self.chapters:
for enc in chp.encounters:
row = [enc.name] + [0,0,0,0,0][:cnt_diff-1]
for d in enc.encounter_table:
row[has_diff[d['difficulty']]] = d['totalxp']
totalxp[has_diff[d['difficulty']]] += d['totalxp']
#for cell in range(0,len(row)):
# row[cell] = str(row[cell])
row = [str(x) for x in row]
xp_for_encounter_by_difficult.append(row)
has_difficulties = []
for key in ENCOUNTER_DIFFICULTY_LIST:
if has_diff[key]: has_difficulties.append(key)
xp_party_size = []
for diff in range(0,len(has_difficulties)):
row = [has_difficulties[diff], '0', '0', '0', '0', '0']
for idx in range(1,6):
row[idx] = str(totalxp[diff+1] / (idx + 2))
xp_party_size.append(row)
totalxp = [str(x) for x in totalxp]
#xp_for_encounter_by_difficult.append(totalxp)
return {
'difficulties' : has_difficulties,
'by_encounter' : xp_for_encounter_by_difficult,
'party_size' : xp_party_size,
'cols_in_diff' : len(has_difficulties)+1,
'totalxpbydiff' : totalxp
}
class AdvTable:
def __init__(self, headerline, name, w=False):
self.table = []
self.header = []
self.alignment = []
self.name = name if name else ''
self.maxlen = -1
self.longcol = -1
self.wide = w
parts = headerline.split("|")
for p in parts:
left = p[0] != ' '
right = p[-1] != ' '
center = left == right
if center: align = 'center'
elif left: align = 'left'
else: align = 'right'
self.alignment.append(align)
self.header.append(p.strip())
def add_line(self, line):
parts = line.split("|")
tab = []
idx = 0
for p in parts:
tmp = p.strip()
tab.append(tmp)
l = len(tmp)
if l > self.maxlen:
self.maxlen = l
self.longcol = idx
idx += 1
self.table.append(tab)
if len(tab) != len(self.header):
raise Exception("Table does not have the right number of columns.")
def endparagraph(self):
return
def close(self):
return
def contents(self):
return {
'alignment' : self.alignment,
'header' : self.header,
'table' : self.table,
'name' : self.name,
'longestcol' : self.longcol,
'wide' : self.wide
}
class AdventureBuilder(PatternBehavior):
def set_current_doc(self, doc):
if isinstance(doc, Chapter):
self.docstack = self.docstack[0:1] #pop everything but main doc
elif isinstance(doc, Encounter):
if not isinstance(self.docstack[1], Chapter):
raise Exception('Cannot have an encounter outside of a chapter')
self.docstack = self.docstack[0:2]
self.docstack.append(doc)
self.curdoc = doc
def variable(self, pieces):
self.curdoc.docvariable(pieces[0],pieces[1])
def new_chapter(self, pieces):
if not isinstance(self.curdoc, MainDocument) and not isinstance(self.curdoc, Chapter):
raise Exception("Can not start a new chapter here.")
chp = self.maindoc.new_chapter(pieces)
self.set_current_doc(chp)
self.toclines += 2
def new_encounter(self, pieces):
if self.in_encounter :
raise Exception("Encounters may not be nested.")
en = self.curdoc.new_encounter(pieces)
self.set_current_doc(en)
self.in_encounter = True
self.total_encounters += 1
self.toclines += 1
def new_spellbook(self, pieces):
book = self.maindoc.new_spellbook(pieces)
self.set_current_doc(book)
def appendix_image(self, pieces):
p = pieces.split(":")
self.maindoc.add_appendix_image(p[0], p[1])
self.toclines += 2
def endsubdoc(self, pieces):
if isinstance(self.curdoc, Encounter):
self.in_encounter = False
self.curdoc.close()
self.docstack.pop()
self.curdoc = self.docstack[-1]
def namedsidebar(self, pieces):
self.curdoc.close()
self.curdoc.set_txttype(0)
self.curdoc.set_txttype(2, pieces.strip())
def textline(self, pieces):
nxttype = len(pieces[0]) if pieces[0] else 0
txt = pieces[1]
txt.strip()
self.text_processer.match_and_process(txt)
print(pieces)
if self.curdoc.inlist:
self.curdoc.append_bullet(txt)
else:
self.curdoc.add_line(txt, nxttype)
def heading(self, pieces):
lvl = len(pieces[0])
if isinstance(self.curdoc,Encounter):
if lvl < 3:
log_message("Use only *** headers in encounters.")
lvl = 3 #only subsections in encounters
self.curdoc.add_header(pieces[1],lvl)
if lvl <= 2:
self.toclines += 1
def noop(self,pieces):
return
def endparagraph(self, pieces):
self.curdoc.endparagraph()
if isinstance(self.curdoc,AdvTable):
self.endsubdoc(pieces)
def check_references(self, cnt):
labels = {
'appendix' : [format_label('Adventure Summary')] + [format_label(a['name']) for a in cnt['appendix']],
'chapter' : [],
'storyawards' : [format_label(s['name']) for s in cnt['storyawards']],
'encounter' : [],
'general' : [format_label(l) for l in self.text_processer.exlabels]
}
for chp in cnt['chapters']:
labels['chapter'].append(format_label(chp['name']))
for enc in chp['encounters']:
labels['encounter'].append(format_label(enc['name']))
for r in self.text_processer.apprefs:
l = format_label(r)
if l not in labels['appendix']:
log_message("Warning: Reference to unknown appendix %s." % r)
for r in self.text_processer.chaprefs:
l = format_label(r)
if l not in labels['chapter']:
log_message("Warning: Reference to unknown chapter %s." % r)
for r in self.text_processer.strefs:
l = format_label(r)
if l not in labels['storyawards']:
log_message("Warning: Reference to unknown story award %s." % r)
for r in self.text_processer.encrefs:
l = format_label(r)
if l not in labels['encounter']:
log_message("Warning: Reference to unknown encounter %s." % r)
for r in self.text_processer.exrefs:
l = format_label(r)
if l not in labels['general']:
log_message("Warning: Reference to unknown label %s." % r)
def check_markup(self):
valid = True
if self.text_processer.bold_count % 2 == 1:
log_message("Unterminiated bold markup (/b/).")
valid = False
if self.text_processer.it_count % 2 == 1:
log_message("Unterminiated italic markup (/i/).")
valid = False
if self.text_processer.numlist_count % 2 == 1:
log_message("Unterminated numeric list markup (/nl/).")
valid = False
if self.text_processer.list_count % 2 == 1:
log_message("Unterminated list markuped (/l/).")
valid = False
if self.text_processer.table_open:
log_message("Unterminated table (missing /table).")
valid = False
return valid
def validate_document(self, cnt):
self.check_references(cnt)
return self.check_markup()
def contents(self):
tiers = [0,'1-4','5-10','11-16','17-20']
optlvl = [0,'2nd', '7th', '13th', '18th']
dmxphour = [0, 75, 325, 800, 1675]
cnt = self.maindoc.contents()
variables = cnt['variables']
cnt['bibliography'] = self.text_processer.bibliography
cnt['cash'] = self.text_processer.cash
cnt['totalcash'] = format_cash(self.text_processer.totalcash)
cnt['npcs'] = sorted(self.text_processer.npcs,key=lambda npc: npc['name'])
cnt['quests'] = self.text_processer.quests
cnt['mundane'] = self.text_processer.mundane
cnt['spellbook'] = [get_spell(sp) for sp in self.spellbook]
cash = float(self.text_processer.totalcash)
cnt['totalquestxp'] = self.text_processer.totalquestxp
cnt['cashsplit'] = ["{0:0.2f}".format(cash / float(s)) for s in range(3,8)]
cnt['splices'] = {}
for k in self.splices:
cnt['splices'][k] = self.splices[k].contents()
certitems = False
tmp = cnt['magicitems']+cnt['consumables']+cnt['scrolls']
for it in tmp:
if not (it['variables'].get('nocert',False)):
certitems = True
break
cnt['artwork'] = [a['image'] for a in cnt['appendix']]
cnt['artwork'] += self.text_processer.artwork
if 'coverimage' in variables: cnt['artwork'].append(variables['coverimage'])
for chp in cnt['chapters']:
for enc in chp['encounters']:
if enc['map']: cnt['artwork'].append(enc['map'])
for it in cnt['magicitems']:
if 'image' in it['variables']:
cnt['artwork'].append(it['variables']['image'])
for it in cnt['consumables']:
if 'image' in it['variables']:
cnt['artwork'].append(it['variables']['image'])
try:
if 'tier' not in variables: raise Exception('no tier')
ti = int(variables['tier'])
if ti < 1 or ti > 4: raise Exception('out of bounds')
except Exception:
raise Exception('The tier variable must be a number from 1-4.')
try:
if 'playtime' not in variables: raise Exception('no playtime')
ptime = int(variables['playtime'])
tblocks = int(floor(ptime/2))
except Exception:
raise Exception('The playtime variable must be set and must be an integer.')
renown = 1
if ptime >= 8: renown = 2
if cnt['xpdata']['difficulties']:
if not 'minxp' in cnt['variables']:
minxp = int(cnt['xpdata']['party_size'][0][4]) + (cnt['totalquestxp'] // 2)
minxp = (minxp // 50) * 50
cnt['variables']['minxp'] = minxp
if not 'maxxp' in cnt['variables']:
mdif = len(cnt['xpdata']['party_size']) - 1
maxxp = int(cnt['xpdata']['party_size'][mdif][2]) + cnt['totalquestxp']
maxxp = ((maxxp // 50) + 1) * 50
cnt['variables']['maxxp'] = maxxp
if len(cnt['bibliography']) > 0: self.toclines += 2
if len(cnt['npcs']) > 0: self.toclines += 2
longtoc = self.toclines > 55
cnt['intvars'] = {
'encounters' : self.total_encounters,
'has_story_awards': self.has_story_awards,
'has_itemcerts': certitems,
'tierlevels' : tiers[ti],
'optlevels' : optlvl[ti],
'dmgold' : int(floor(dmxphour[ti] * int(variables['playtime']) / 2)),
'dmxp' : dmxphour[ti] * int(variables['playtime']),
'downtime' : 5 * tblocks,
'renown' : renown,
'longtoc' : longtoc,
}
return cnt
def add_monster(self, pieces):
self.maindoc.add_monster(pieces)
def named_monster(self, pieces): #monster is an individual
alias_monster(pieces[1].strip(), pieces[0].strip(), True)
def skin_monster(self, pieces): #monster is just reskinned
alias_monster(pieces[1].strip(), pieces[0].strip(), False)
def add_bullet(self, pieces):
numlist = False
if pieces[0][0] == '#': numlist = True
self.text_processer.match_and_process(pieces[1])
self.curdoc.add_bulletpoint(pieces[1], numlist)
def special(self, pieces):
self.curdoc.special(pieces)
def new_storyaward(self,pieces):
st = self.maindoc.new_storyaward(pieces)
self.set_current_doc(st)
self.has_story_awards = True
def splice(self,pieces):
sp = SubDocument(pieces)
self.set_current_doc(sp)
self.splices[pieces] = sp
def magic_item(self,pieces):
mi = self.maindoc.new_magicitem(pieces)
self.set_current_doc(mi)
def switch_part(self, tmp):
if isinstance(self.curdoc, StoryAward):
self.curdoc.switch_part()
else:
raise Exception('Found a !playertext command when not in a story award block.')
def table_name(self,pieces):
self.table_name = pieces
def wide_table_name(self,pieces):
self.table_name = pieces
self.table_wide = True
def spell_reference(self, pieces):
self.spellbook.append(pieces)
def table(self,pieces):
if isinstance(self.curdoc, AdvTable):
self.curdoc.add_line(pieces)
else:
table = AdvTable(pieces, self.table_name, self.table_wide)
self.curdoc.add_table(table)
self.set_current_doc(table)
self.table_name = None
self.table_wide = False
def __init__(self):
PatternBehavior.__init__(self,[
['^@(\w+) (.+)$', self.variable],
['^\* (.+)$', self.new_chapter],
['^(\*\*+) (.+)$', self.heading],
['^- (\w+):(.+)?$', self.special],
['^\|\|\|\| (.*)$', self.wide_table_name],
['^\|\|\| (.*)$', self.table_name],
['^\|(.*)\|$', self.table],
['^!encounter (.*)', self.new_encounter],
['^!storyaward (.*)', self.new_storyaward],
['^!magicitem (.*)', self.magic_item],
['^!spellbook (.*)', self.new_spellbook],
['^!end', self.endsubdoc],
['^!monster (.*)$', self.add_monster],
['^!namedmonster ([A-Za-z0-9\-\(\)\' ]+)/(.*)$', self.named_monster],
['^!skinmonster ([A-Za-z0-9\-\(\)\' ]+)/(.*)$', self.skin_monster],
['^!appendix_image (.*)$',self.appendix_image],
['^!spellreference (.*)$',self.spell_reference],
['^!splice (.*)$',self.splice],
['^!table (.*)$', self.table_name],
['^//.*', self.noop],
['^$', self.endparagraph],
['^>>#(.+)$', self.namedsidebar],
['^-(.) (.*)$', self.add_bullet],
['^(>*)?(.+)$', self.textline]
])
self.maindoc = MainDocument()
self.curdoc = self.maindoc
self.docstack = [self.maindoc]
self.text_processer = InlineTextProcesser()
self.splices = {}
self.table_name = None
self.table_wide = False
self.total_encounters = 0
self.has_story_awards = False
self.toclines = 2
self.spellbook = []
self.in_encounter = False
def ParseAdventureDocument(doc):
builder = AdventureBuilder()
pdoc = None
try:
builder.match_and_process(doc)
pdoc = builder.contents()
#fout = open('/Users/jdenton/tmp/advdoc','w')
#pp = PrettyPrinter(indent=2, stream=fout)
#pp.pprint(pdoc)
#fout.close()
if builder.validate_document(pdoc): return pdoc
except Exception as exp:
log_message("Error: line %d: %s" % (builder.lineno,str(exp)))
return False
def DebugAdventureDocument(doc):
builder = AdventureBuilder()
pdoc = None
builder.match_and_process(doc)
pdoc = builder.contents()
fout = open('/tmp/advdoc','w')
pp = PrettyPrinter(indent=2, stream=fout)
pp.pprint(pdoc)
fout.close()
if builder.validate_document(pdoc): return pdoc
return False
|
import cv2, dlib
img = cv2.imread("fotos/grupo.0.jpg")
detector = dlib.cnn_face_detection_model_v1("recursos/mmod_human_face_detector.dat")
faces = detector(img, 1)
print("Faces detectadas:", len(faces))
for face in faces:
e, t, d, b, co = (int(face.rect.left()), int(face.rect.top()), int(face.rect.right()), int(face.rect.bottom()),
face.confidence)
print(co)
cv2.rectangle(img, (e, t), (d, b), (255, 255, 0), 2)
cv2.imshow("Detector CNN", img)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
"""Add sessions
Revision ID: a01d1258d7a7
Revises: 9ca629ddd362
Create Date: 2020-09-10 13:16:28.154448
"""
import sqlalchemy as sa # type: ignore
from alembic import op # type: ignore
# revision identifiers, used by Alembic.
revision = "a01d1258d7a7"
down_revision = "9ca629ddd362"
branch_labels = None
depends_on = None
def upgrade():
op.create_table(
"sessions",
sa.Column("key", sa.String(length=44), nullable=False),
sa.Column("expires", sa.DateTime(), nullable=True),
sa.Column("user", sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(["user"], ["users.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("key"),
)
def downgrade():
op.drop_table("sessions")
|
# Parse weather data from weatherbit.io
try:
import ujson as ujson
except ImportError:
import json as ujson
import weather_obs as wo
_weather_obs_fields = [
( 'time_ts', ('ts', ), None, ),
( 'summary', ('weather', 'description', ), None, ),
( 'code', ('weather', 'code', ), None, ),
( 'icon', ('weather', 'icon', ), None, ),
( 'temperature_C', ('temp', ), None, ),
( 'temperature_min_C', ('min_temp', ), None, ),
( 'temperature_max_C', ('max_temp', ), None, ),
( 'pressure_hPa', ('pres', ), None, ),
( 'wind_speed_m_s', ('wind_spd', ), None, ),
( 'wind_direction_deg', ('wind_dir', ), None, ),
( 'wind_direction_cardinal', ('wind_cdir', ), None, ),
( 'wind_gust_m_s', ('wind_gust_spd', ), None, ),
( 'cloud_cover_ratio', ('clouds', ), None, ),
( 'relative_humidity_ratio', ('rh', ), lambda x: x/100, ),
( 'precipitation_mm', ('precip', ), None, ),
( 'precipitation_probability_ratio', ('pop', ), lambda x: x/100, ),
( 'sun_rise_ts', ('sunrise_ts', ), None, ),
( 'sun_set_ts', ('sunset_ts', ), None, ),
( 'moon_rise_ts', ('moonrise_ts', ), None, ),
( 'moon_set_ts', ('moonset_ts', ), None, ),
( 'moon_phase', ('moon_phase', ), None, ),
]
def process_json(file):
obss = []
pdata = ujson.loads(file.read())
wdata = pdata['data']
wodata = {}
for o in wdata:
for f in _weather_obs_fields:
field_name = f[0]
path = f[1]
convert = f[2]
if path:
v = o
for p in path:
try:
v = v[p]
except KeyError:
v = None
break
else:
v = None
if convert and v:
v = convert(v)
wodata[field_name] = v
obs = wo.Weather_obs(**wodata)
obss.append(obs)
return obss
|
from mdns_client import MdnsListener
from printing import qprint, eprint
def do_connect(self, line):
""" connect TYPE TYPE_PARAMS Connect boards to shell49.
connect serial [port [baud]] Wired connection. Uses defaults from config file.
connect telnet [url [user [pwd]]] Wireless connection. If no url/ip address is
specified, connects to all known boards advertising repl service via mDNS.
Optional user (default: 'micro') and password (default: 'python').
Note: do not connect to the same board via serial AND telnet connections.
Doing so may block communication with the board.
"""
args = self.line_to_args(line)
if len(args) < 1:
eprint('Missing connection TYPE')
return
connect_type = args[0]
if connect_type == 'serial':
port = args[1] if len(args) > 1 else self.config.get(
0, 'port', '/dev/cu.SLAB_USBtoUART')
baud = args[2] if len(args) > 2 else self.config.get(
0, 'baudrate', '115200')
try:
baud = int(baud)
except ValueError:
eprint("Not a valid baudrate, '{}'".format(baud))
return
# Note: board may be connected over telnet, but we don't know ...
# in this case, connect blocks
if self.boards.find_board(port):
eprint("board already connected on '{}'".format(port))
return
self.boards.connect_serial(port, baud)
elif connect_type == 'telnet':
if len(args) > 1:
user = args[2] if len(args) > 2 else 'micro'
pwd = args[3] if len(args) > 3 else 'python'
self.boards.connect_telnet(args[1], user, pwd)
else:
listener = MdnsListener()
adv = list(listener.listen(seconds=1))
if len(adv) == 0:
qprint("No boards detected via mDNS.")
for b in adv:
qprint("Heard from '{}' ({})".format(b.url, b.ip))
# connect only to boards in the config database
board_id = self.config.get_board_from_name(b.hostname)
if not board_id:
qprint(" not in db, skip!")
continue
# we are not already connected to
if self.boards.connected(b.hostname):
qprint(" already connected")
continue
# let's connect!
user = self.config.get(board_id, 'user', 'micro')
pwd = self.config.get(board_id, 'password', 'python')
self.boards.connect_telnet(b.url, user, pwd)
else:
eprint('Unrecognized connection TYPE: {}'.format(connect_type))
|
'''
def allow_tags(func):
def _decorate(_func):
_func.allow_tags = True
return _func
return _decorate(func)
''' |
from datetime import datetime,timezone,timedelta
import time
try:
import sys
import socket
import fcntl
import struct
def get_ip_address(ifname):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', bytes(ifname[:15], 'utf-8'))
)[20:24])
except:
return ""
except:
def get_ip_address(ifname):
return "No IP on PC"
def get_reading_age_minutes(timestamp):
delta = datetime.now(timezone.utc) - timestamp
minutes_old = int(delta.total_seconds() / 60)
return minutes_old
def now_plus_seconds(seconds):
return datetime.now(timezone.utc) + timedelta(seconds=seconds)
class Reading():
timestamp = None
value = 0
trend = 0
def __init__(self, timestamp, value, trend):
self.timestamp = timestamp
self.value = value
self.trend = trend |
if __name__ == '__main__':
n = int(input())
i = 1
numbers = []
while i <= n:
numbers.append(str(i))
i+=1
start = 0
count = len(numbers)
total = ""
total =str(total)
for number in numbers:
total+=numbers[start]
if start <= count:
start+=1
print(total)
|
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense
from tensorflow.keras import optimizers
import numpy as np
import csv
from typing import List
def open_csv() -> List:
"""This opens the data from a csv and returns a list of it as data"""
with open('./point-weighting-combined-new.csv', newline='') as csvfile:
data = list(csv.reader(csvfile))
return data
def get_data_Y():
"""Gets the value of A winning, this is the second column in the csv for
each match to use the Y value"""
data = open_csv()
# Get the parts of the data that are not the team (A, B) and the value
# showing whether or not they lost. Everything after the second column
useful_data = data[2:]
# The final list of Y values that will be turned into a numpy array and
# returned later in this script
final = []
# Sets the buffer location, if increased by two after the loop, it will go
# through each row two at a time
buff = 0
# Goes through each of the rows until it is at the limit
while buff < len(useful_data):
# Gets the value that shows if A has won the match
a = useful_data[buff][1]
# Adds the A values as a numpy array to the final list
final.append(np.array([int(a)]))
# Increases the buffer, goes to the next match
buff += 2
# Returns the final list as a numpy array
return np.array([*final])
def get_data_X():
"""Gets the rest of the data in each match and find the difference in the
two teams for each match and save that as a numpy array"""
data = open_csv()
# Get all the columns after the initial team letter value and the value
# that shows if A has won the match
useful_data = data[2:]
# Sets the final list to be returned as the X values
final = []
# Sets the location to read from in the list of matches
buff = 0
# Loops through each match
while buff < len(useful_data):
# Get all the fields for the team A after the first two columns
a = useful_data[buff]
# Get all the fields for the team B after the first two columns
b = useful_data[buff + 1]
# Each total list is the difference between A and B score per row
total = []
# Get the totals of the difference of each column after the second
for it_a, it_b in zip(a[2:], b[2:]):
# Add the difference to the list called total to relate the team A
# score with the team B score to better understand what winning a
# match would look like on a team by team basis
total.append(int(it_a) - int(it_b))
# Add each total list (the difference between A and B score per row) to
# the final list to have the total metrics for each match
final.append(np.array(total))
# Increases the buffer through the list
buff += 2
return np.array([*final])
if __name__ == "__main__":
# Gets the value for X and Y
Y = get_data_Y()
X = get_data_X()
# Sets up the model and it's structure
model = Sequential()
# Adds layers, the input and middle layer and specifies it's activation
# function is the sigmoid function. It also specifies the input dimension
# if 16, this means we are looking at 16 independent variables per match
# and we have 64 matches, the match data consists of the difference of the
# score of team A and Team B for each one of the sixteen variables.
model.add(Dense(units=64, activation='sigmoid', input_dim=16))
model.add(Dense(units=1, activation='sigmoid'))
# This sets up the optimizer
sgd = optimizers.SGD(lr=1)
# This runs the specifies optimizer with the type of loss
model.compile(loss='mean_squared_error', optimizer=sgd)
# This fits the data of X to the Y output values
model.fit(X, Y, epochs=1500, verbose=False)
# This shows the summary of the model, the layers, shape, and parameters
print(model.summary())
# This saves the model so it can be used without having to retrain the it
model.save('neural_network.model')
|
r"""
Examples of semigroups
"""
#*****************************************************************************
# Copyright (C) 2008-2009 Nicolas M. Thiery <nthiery at users.sf.net>
#
# Distributed under the terms of the GNU General Public License (GPL)
# http://www.gnu.org/licenses/
#*****************************************************************************
from sage.misc.cachefunc import cached_method
from sage.structure.parent import Parent
from sage.structure.unique_representation import UniqueRepresentation
from sage.structure.element_wrapper import ElementWrapper
from sage.categories.all import Semigroups
from sage.sets.family import Family
class LeftZeroSemigroup(UniqueRepresentation, Parent):
r"""
An example of a semigroup.
This class illustrates a minimal implementation of a semigroup.
EXAMPLES::
sage: S = Semigroups().example(); S
An example of a semigroup: the left zero semigroup
This is the semigroup that contains all sorts of objects::
sage: S.some_elements()
[3, 42, 'a', 3.4, 'raton laveur']
with product rule given by $a \times b = a$ for all $a, b$::
sage: S('hello') * S('world')
'hello'
sage: S(3)*S(1)*S(2)
3
sage: S(3)^12312321312321
3
TESTS::
sage: TestSuite(S).run(verbose = True)
running ._test_an_element() . . . pass
running ._test_associativity() . . . pass
running ._test_cardinality() . . . pass
running ._test_category() . . . pass
running ._test_elements() . . .
Running the test suite of self.an_element()
running ._test_category() . . . pass
running ._test_eq() . . . pass
running ._test_not_implemented_methods() . . . pass
running ._test_pickling() . . . pass
pass
running ._test_elements_eq_reflexive() . . . pass
running ._test_elements_eq_symmetric() . . . pass
running ._test_elements_eq_transitive() . . . pass
running ._test_elements_neq() . . . pass
running ._test_eq() . . . pass
running ._test_not_implemented_methods() . . . pass
running ._test_pickling() . . . pass
running ._test_some_elements() . . . pass
"""
def __init__(self):
r"""
The left zero semigroup
EXAMPLES::
sage: S = Semigroups().example(); S
An example of a semigroup: the left zero semigroup
TESTS::
sage: TestSuite(S).run()
"""
Parent.__init__(self, category = Semigroups())
def _repr_(self):
r"""
EXAMPLES::
sage: Semigroups().example()._repr_()
'An example of a semigroup: the left zero semigroup'
"""
return "An example of a semigroup: the left zero semigroup"
def product(self, x, y):
r"""
Returns the product of ``x`` and ``y`` in the semigroup, as per
:meth:`Semigroups.ParentMethods.product`.
EXAMPLES::
sage: S = Semigroups().example()
sage: S('hello') * S('world')
'hello'
sage: S(3)*S(1)*S(2)
3
"""
assert x in self
assert y in self
return x
def an_element(self):
r"""
Returns an element of the semigroup.
EXAMPLES::
sage: Semigroups().example().an_element()
42
"""
return self(42)
def some_elements(self):
r"""
Returns a list of some elements of the semigroup.
EXAMPLES::
sage: Semigroups().example().some_elements()
[3, 42, 'a', 3.4, 'raton laveur']
"""
return [self(i) for i in [3, 42, "a", 3.4, "raton laveur"]]
class Element(ElementWrapper):
def is_idempotent(self):
r"""
Trivial implementation of ``Semigroups.Element.is_idempotent``
since all elements of this semigroup are idempotent!
EXAMPLES::
sage: S = Semigroups().example()
sage: S.an_element().is_idempotent()
True
sage: S(17).is_idempotent()
True
"""
return True
class FreeSemigroup(UniqueRepresentation, Parent):
r"""
An example of semigroup.
The purpose of this class is to provide a minimal template for
implementing of a semigroup.
EXAMPLES::
sage: S = Semigroups().example("free"); S
An example of a semigroup: the free semigroup generated by ('a', 'b', 'c', 'd')
This is the free semigroup generated by::
sage: S.semigroup_generators()
Family ('a', 'b', 'c', 'd')
and with product given by contatenation::
sage: S('dab') * S('acb')
'dabacb'
TESTS::
sage: TestSuite(S).run()
"""
def __init__(self, alphabet=('a','b','c','d')):
r"""
The free semigroup.
INPUT:
- ``alphabet`` -- a tuple of strings: the generators of the semigroup
EXAMPLES::
sage: from sage.categories.examples.semigroups import FreeSemigroup
sage: F = FreeSemigroup(('a','b','c')); F
An example of a semigroup: the free semigroup generated by ('a', 'b', 'c')
TESTS::
sage: F == loads(dumps(F))
True
"""
self.alphabet = alphabet
Parent.__init__(self, category = Semigroups().FinitelyGenerated())
def _repr_(self):
r"""
EXAMPLES::
sage: from sage.categories.examples.semigroups import FreeSemigroup
sage: FreeSemigroup(('a','b','c'))._repr_()
"An example of a semigroup: the free semigroup generated by ('a', 'b', 'c')"
"""
return "An example of a semigroup: the free semigroup generated by %s"%(self.alphabet,)
def product(self, x, y):
r"""
Returns the product of ``x`` and ``y`` in the semigroup, as per
:meth:`Semigroups.ParentMethods.product`.
EXAMPLES::
sage: F = Semigroups().example('free')
sage: F.an_element() * F('a')^5
'abcdaaaaa'
"""
assert x in self
assert y in self
return self(x.value + y.value)
@cached_method
def semigroup_generators(self):
r"""
Returns the generators of the semigroup.
EXAMPLES::
sage: F = Semigroups().example('free')
sage: F.semigroup_generators()
Family ('a', 'b', 'c', 'd')
"""
return Family([self(i) for i in self.alphabet])
def an_element(self):
r"""
Returns an element of the semigroup.
EXAMPLES::
sage: F = Semigroups().example('free')
sage: F.an_element()
'abcd'
"""
return self(''.join(self.alphabet))
def _element_constructor_(self, x):
r"""
Construct an element of this semigroup from the data ``x``.
INPUT:
- ``x`` -- a string
EXAMPLES::
sage: F = Semigroups().example('free'); F
An example of a semigroup: the free semigroup generated by ('a', 'b', 'c', 'd')
sage: F._element_constructor_('a')
'a'
sage: F._element_constructor_('bad')
'bad'
TESTS::
sage: F._element_constructor_('z')
Traceback (most recent call last):
...
assert a in self.alphabet
AssertionError
sage: bad = F._element_constructor_('bad'); bad
'bad'
sage: bad in F
True
TESTS::
sage: S = Semigroups().Subquotients().example()
sage: type(S._element_constructor_(17))
<class 'sage.categories.examples.semigroups.QuotientOfLeftZeroSemigroup_with_category.element_class'>
"""
for a in x:
assert a in self.alphabet
return self.element_class(self, x)
class Element(ElementWrapper):
r"""
The class for elements of the free semigroup.
"""
wrapped_class = str
class QuotientOfLeftZeroSemigroup(UniqueRepresentation, Parent):
r"""
Example of a quotient semigroup
EXAMPLES::
sage: S = Semigroups().Subquotients().example(); S
An example of a (sub)quotient semigroup: a quotient of the left zero semigroup
This is the quotient of::
sage: S.ambient()
An example of a semigroup: the left zero semigroup
obtained by setting `x=42` for any `x\geq 42`::
sage: S(100)
42
sage: S(100) == S(42)
True
The product is inherited from the ambient semigroup::
sage: S(1)*S(2) == S(1)
True
TESTS::
sage: TestSuite(S).run(verbose = True)
running ._test_an_element() . . . pass
running ._test_associativity() . . . pass
running ._test_cardinality() . . . pass
running ._test_category() . . . pass
running ._test_elements() . . .
Running the test suite of self.an_element()
running ._test_category() . . . pass
running ._test_eq() . . . pass
running ._test_not_implemented_methods() . . . pass
running ._test_pickling() . . . pass
pass
running ._test_elements_eq_reflexive() . . . pass
running ._test_elements_eq_symmetric() . . . pass
running ._test_elements_eq_transitive() . . . pass
running ._test_elements_neq() . . . pass
running ._test_eq() . . . pass
running ._test_not_implemented_methods() . . . pass
running ._test_pickling() . . . pass
running ._test_some_elements() . . . pass
"""
def _element_constructor_(self, x):
r"""
Convert ``x`` into an element of this semigroup.
EXAMPLES::
sage: S = Semigroups().Subquotients().example()
sage: S._element_constructor_(17)
17
TESTS::
sage: S = Semigroups().Subquotients().example()
sage: type(S._element_constructor_(17))
<class 'sage.categories.examples.semigroups.QuotientOfLeftZeroSemigroup_with_category.element_class'>
"""
return self.retract(self.ambient()(x))
def __init__(self, category = None):
r"""
This quotient of the left zero semigroup of integers obtained by
setting `x=42` for any `x\geq 42`.
EXAMPLES::
sage: S = Semigroups().Subquotients().example(); S
An example of a (sub)quotient semigroup: a quotient of the left zero semigroup
sage: S.ambient()
An example of a semigroup: the left zero semigroup
sage: S(100)
42
sage: S(100) == S(42)
True
sage: S(1)*S(2) == S(1)
True
TESTS::
sage: TestSuite(S).run()
"""
if category is None:
category = Semigroups().Quotients()
Parent.__init__(self, category = category)
def _repr_(self):
r"""
EXAMPLES::
sage: Semigroups().Subquotients().example()._repr_()
'An example of a (sub)quotient semigroup: a quotient of the left zero semigroup'
"""
return "An example of a (sub)quotient semigroup: a quotient of the left zero semigroup"
def ambient(self):
r"""
Returns the ambient semigroup.
EXAMPLES::
sage: S = Semigroups().Subquotients().example()
sage: S.ambient()
An example of a semigroup: the left zero semigroup
"""
return Semigroups().example()
def lift(self, x):
r"""
Lift the element ``x`` into the ambient semigroup.
INPUT:
- ``x`` -- an element of ``self``.
OUTPUT:
- an element of ``self.ambient()``.
EXAMPLES::
sage: S = Semigroups().Subquotients().example()
sage: x = S.an_element(); x
42
sage: S.lift(x)
42
sage: S.lift(x) in S.ambient()
True
sage: y = S.ambient()(100); y
100
sage: S.lift(S(y))
42
"""
assert x in self
return x.value
def the_answer(self):
r"""
Returns the Answer to Life, the Universe, and Everything as an
element of this semigroup.
EXAMPLES::
sage: S = Semigroups().Subquotients().example()
sage: S.the_answer()
42
"""
return self.retract(self.ambient()(42))
def an_element(self):
r"""
Returns an element of the semigroup.
EXAMPLES::
sage: S = Semigroups().Subquotients().example()
sage: S.an_element()
42
"""
return self.the_answer()
def some_elements(self):
r"""
Returns a list of some elements of the semigroup.
EXAMPLES::
sage: S = Semigroups().Subquotients().example()
sage: S.some_elements()
[1, 2, 3, 8, 42, 42]
"""
return [self.retract(self.ambient()(i))
for i in [1, 2, 3, 8, 42, 100]]
def retract(self, x):
r"""
Returns the retract ``x`` onto an element of this semigroup.
INPUT:
- ``x`` -- an element of the ambient semigroup (``self.ambient()``).
OUTPUT:
- an element of ``self``.
EXAMPLES::
sage: S = Semigroups().Subquotients().example()
sage: L = S.ambient()
sage: S.retract(L(17))
17
sage: S.retract(L(42))
42
sage: S.retract(L(171))
42
TESTS::
sage: S.retract(L(171)) in S
True
"""
from sage.rings.integer_ring import ZZ
assert x in self.ambient() and x.value in ZZ
if x.value > 42:
return self.the_answer()
else:
return self.element_class(self, x)
class Element(ElementWrapper):
pass
class IncompleteSubquotientSemigroup(UniqueRepresentation,Parent):
def __init__(self, category = None):
r"""
An incompletely implemented subquotient semigroup, for testing purposes
EXAMPLES::
sage: S = sage.categories.examples.semigroups.IncompleteSubquotientSemigroup()
sage: S
A subquotient of An example of a semigroup: the left zero semigroup
TESTS::
sage: S._test_not_implemented_methods()
Traceback (most recent call last):
...
AssertionError: Not implemented method: lift
sage: TestSuite(S).run(verbose = True)
running ._test_an_element() . . . pass
running ._test_associativity() . . . fail
Traceback (most recent call last):
...
NotImplementedError: <abstract method retract at ...>
------------------------------------------------------------
running ._test_cardinality() . . . pass
running ._test_category() . . . pass
running ._test_elements() . . .
Running the test suite of self.an_element()
running ._test_category() . . . pass
running ._test_eq() . . . pass
running ._test_not_implemented_methods() . . . pass
running ._test_pickling() . . . pass
pass
running ._test_elements_eq_reflexive() . . . pass
running ._test_elements_eq_symmetric() . . . pass
running ._test_elements_eq_transitive() . . . pass
running ._test_elements_neq() . . . pass
running ._test_eq() . . . pass
running ._test_not_implemented_methods() . . . fail
Traceback (most recent call last):
...
AssertionError: Not implemented method: lift
------------------------------------------------------------
running ._test_pickling() . . . pass
running ._test_some_elements() . . . pass
The following tests failed: _test_associativity, _test_not_implemented_methods
"""
Parent.__init__(self, category=Semigroups().Subquotients().or_subcategory(category))
def ambient(self):
r"""
Returns the ambient semigroup.
EXAMPLES::
sage: S = Semigroups().Subquotients().example()
sage: S.ambient()
An example of a semigroup: the left zero semigroup
"""
return Semigroups().example()
class Element(ElementWrapper):
pass
|
""" Paths for different VTR flow executables and resources """
import pathlib
# Path to the root repository directory
root_path = pathlib.Path(__file__).absolute().parent.parent.parent.parent.parent
# VTR Paths
vtr_flow_path = root_path / "vtr_flow"
# ODIN paths
odin_path = root_path / "ODIN_II"
odin_exe_path = odin_path / "odin_II"
odin_cfg_path = vtr_flow_path / "misc" / "basic_odin_config_split.xml"
odin_verify_path = odin_path / "verify_odin.sh"
odin_benchmark_path = odin_path / "regression_test" / "benchmark"
odin_output_on_error_path = odin_path / "regression_test" / ".library" / "output_on_error.conf"
# YOSYS paths
yosys_exe_path = root_path / "libs" / "EXTERNAL" / "libyosys" / "yosys"
yosys_lib_path = vtr_flow_path / "misc" / "yosyslib"
yosys_script_path = yosys_lib_path / "synthesis.ys"
# ABC paths
abc_path = root_path / "abc"
abc_exe_path = abc_path / "abc"
abc_rc_path = abc_path / "abc.rc"
# ACE paths
ace_path = root_path / "ace2"
ace_exe_path = ace_path / "ace"
ace_extract_clk_from_blif_script_path = ace_path / "scripts" / "extract_clk_from_blif.py"
# VPR paths
vpr_path = root_path / "vpr"
vpr_exe_path = vpr_path / "vpr"
# Flow scripts
scripts_path = vtr_flow_path / "scripts"
run_vtr_flow_path = scripts_path / "run_vtr_flow.py"
flow_template_path = scripts_path / "flow_script_template.txt"
# Task files
tasks_path = vtr_flow_path / "tasks"
regression_tests_path = tasks_path / "regression_tests"
# Parsing files
parse_path = vtr_flow_path / "parse"
vtr_benchmarks_parse_path = parse_path / "parse_config" / "common" / "vtr_benchmarks.txt"
pass_requirements_path = parse_path / "pass_requirements"
# Other scripts
blackbox_latches_script_path = scripts_path / "blackbox_latches.pl"
restore_multiclock_latch_old_script_path = scripts_path / "restore_multiclock_latch_information.pl"
restore_multiclock_latch_script_path = scripts_path / "restore_multiclock_latch.pl"
valgrind_supp = vpr_path / "valgrind.supp"
lsan_supp = vpr_path / "lsan.supp"
asan_supp = vpr_path / "asan.supp"
|
#!/usr/local/bin/python2.7
__author__ = 'Nick Driver - https://github.com/TheDr1ver'
'''
#### Overview ####
ioc2splunk.py takes a CSV result from ioc-parser and appends de-duped results to
a CSV file that can be processed by Splunk as a lookup table. ioc-parser results must be
saved in the following format for it to work as-coded:
<YYYYMMDD>.<ticket_number>.<report_name>.csv
ex: 20150918.1234567.ioc_report.csv
python ioc2splunk.py "./path/to/<report_name>.csv"
#### Requirements ####
- backup folder is defined by backup_folder (default: "./backup") and must be created before execution
- master_splunk_file is where the resulting Splunk lookup table will be created, and
the directory must exist before execution
- <report_name> must match the following regex: "((?![a-zA-Z0-9_\-\[\]]).)+"
( i.e. alpha-numeric characters and _-[] )
#### Flow ####
- Adds the following data/columns to csv after parsed
- date_added
- ticket_number
- report title
- status
- notes
- Parse out resulting IOC CSV from iocp.py into Splunk Lookup Table format
- Check backup folder for backup files older than 30 days and delete them
- Checks current Splunk table for IOCs older than 30 days and removes them
- Checks current Splunk table w/ new IOC results to de-dup
- Appends non-duplicates to Splunk table (with dates for future removal)
- Saves final Splunk table backup
'''
#### Imports ####
import os
import csv
from datetime import datetime, date, time
import shutil
import sys
#### Vars ####
test_ioc_csv = sys.argv[1]
temp_splunk_csv = "./new_splunk_iocs2.csv"
backup_folder = "./backup"
master_splunk_file = "./master_splunk_table/master.splunk_table.csv"
#### Filename Parser ####
def parseFilename(test_ioc_csv):
# This gets the important info from the filename being fed
parsed_results = {}
#Split for Linux
filename = test_ioc_csv.rsplit("/",1)[-1]
#Split for Windows
filename = filename.rsplit("\\",1)[-1]
#Split on periods
filename_split = filename.split(".")
if len(filename_split)!=4:
#Quit
print "Something went wrong - filename should only have 4 parts (three periods)"+str(filename_split)
else:
parsed_results["date_added"] = filename_split[0]
parsed_results["ticket_number"] = filename_split[1]
parsed_results["report_title"] = filename_split[2]
return parsed_results
#### Get the data from the IOCP.py output csv and add it to a stop-gap file (temp_splunk_f) ####
def tempCSV(temp_splunk_csv, test_ioc_csv):
headers = ["date_added", "ticket_number", "report_title", "ioc_type", "ioc", "status", "notes"]
with open(temp_splunk_csv, "wb") as temp_splunk_f:
writer = csv.writer(temp_splunk_f)
#Write the headers to the first row of the CSV file
writer.writerows([headers])
#Parse out the important info from the filename
parsed_filename = parseFilename(test_ioc_csv)
date_added = parsed_filename["date_added"]
ticket_number = parsed_filename["ticket_number"]
report_title = parsed_filename["report_title"]
#### Read IOCP Results CSV ####
with open(test_ioc_csv, 'rb') as iocpf:
try:
reader = csv.reader(iocpf, delimiter="\t")
for row in reader:
#For each row in the IOCP results csv
#Build the new row that's about to be written
row_list = []
row_list.append(date_added)
row_list.append(ticket_number)
row_list.append(report_title)
colnum = 0
for col in row:
#Do something with the column
#iocp.py outputs these values: file_path, page_no, ioc_type, ioc
#print col
if colnum==2:
#This is the IOC type
#print col
row_list.append(col)
if colnum==3:
#This is the IOC
#print col
row_list.append(col)
colnum+=1
#resulting row_list should contain date_added,ticket_number,report_title,ioc_type,ioc
#status and notes headers are for manual addition by the analyst
writer.writerows([row_list])
finally:
iocpf.close()
temp_splunk_f.close()
'''
- Check backup folder for backup files older than 30(?) days and delete them
'''
def purgeBackups(backup_folder):
#Walk through backup directory and delete anything with a dated filename older than 30 days from today
backup_files=os.listdir(backup_folder)
today_date = datetime.today()
#today_date = int(d.strftime("%Y%m%d"))
for f in backup_files:
parsed_filename=parseFilename(f)
date_added=parsed_filename["date_added"]
date_added = datetime.strptime(date_added, "%Y%m%d")
td = today_date-date_added
if td.days > 30:
file_path = backup_folder+"/"+f
os.remove(file_path)
'''
- Checks current Splunk table for IOCs older than 30 days and removes them
'''
def purgeIOCs(master_splunk_file):
#Check that master_splunk_file exists
if os.path.isfile(master_splunk_file):
#Check each IOC to see if it's older than 30 days. If so, remove it.
with open(master_splunk_file, 'rb') as splunk_master:
try:
reader = csv.reader(splunk_master)
splunk_master_list=[]
today_date = datetime.today()
#today_date = int(d.strftime("%Y%m%d"))
rowcount=0
for row in reader:
if rowcount==0:
#add the header
splunk_master_list.append(row)
#move onto the next
colnum=0
if rowcount!=0:
for col in row:
if colnum==0:
date_added = datetime.strptime(col, "%Y%m%d")
td = today_date-date_added
if td.days <= 30:
splunk_master_list.append(row)
colnum+=1
rowcount+=1
finally:
splunk_master.close()
#print splunk_master_list
with open(master_splunk_file, 'wb') as splunk_master:
try:
writer = csv.writer(splunk_master)
for row in splunk_master_list:
writer.writerows([row])
finally:
splunk_master.close()
'''
- Checks current Splunk table w/ new IOC results to de-dup
- Appends non-duplicates to Splunk table (with dates for future removal)
'''
def addIOCs(master_splunk_file, temp_splunk_csv):
#Check if master_splunk_file exists
if os.path.isfile(master_splunk_file):
#Open master_splunk_file and read line-by-line
with open(master_splunk_file, 'rb') as splunk_master:
try:
reader = csv.reader(splunk_master)
splunk_master_iocs = []
rowcount = 0
for row in reader:
if rowcount == 0:
#Skip the header
pass
colnum = 0
for col in row:
if colnum==4:
#Add each IOC to a list
splunk_master_iocs.append(col)
colnum+=1
rowcount += 1
finally:
#Close master_splunk_file
splunk_master.close()
#print splunk_master_iocs
else:
splunk_master_iocs = []
#Open temp_splunk_csv and read line by line
with open(temp_splunk_csv) as temp_splunk:
try:
reader = csv.reader(temp_splunk)
rowcount = 0
for row in reader:
if rowcount == 0 and os.path.isfile(master_splunk_file):
#Skip the header if the file exists
pass
colnum = 0
for col in row:
if colnum==4:
#Check if the IOC is in the master splunk list - if not, add the whole row
#print col
if col not in splunk_master_iocs:
#print col
#print row
fd = open(master_splunk_file, 'ab')
writer = csv.writer(fd)
writer.writerows([row])
fd.close()
colnum+=1
rowcount +=1
finally:
#Close temp_splunk_csv and then delete it
temp_splunk.close()
#os.remove(temp_splunk_csv) #### Put this back in later
'''
- Saves final Splunk table backup
'''
def saveCopies(backup_folder, master_splunk_file):
#Copy the newly created master_splunk_file with this naming convention to the backup folder:
#<date_added>.master.splunk_table.csv
d = datetime.today()
today_date = str(d.strftime("%Y%m%d"))
backup_master_splunk = today_date+".master.splunk_table.csv"
dest_file=backup_folder+"/"+backup_master_splunk
shutil.copy(master_splunk_file, dest_file)
#### Execute all Functions ####
#### Get the data from the IOCP.py output csv (test_ioc_csv) and add it to a stop-gap file (temp_splunk_csv) ####
tempCSV(temp_splunk_csv, test_ioc_csv)
#### Walk through backup directory and delete anything with a dated filename older than 30 days from today ####
purgeBackups(backup_folder)
#### Check each IOC to see if it's older than 30 days. If so, remove it. ####
purgeIOCs(master_splunk_file)
#### De-dup and add new IOCs to master Splunk file ####
addIOCs(master_splunk_file, temp_splunk_csv)
#### Copy the newly created master_splunk_file with this naming convention to the backup folder:
#### <date_added>.master.splunk_table.csv
saveCopies(backup_folder, master_splunk_file)
|
# -*- coding: utf-8 -*-
"""
Created on Sat Jul 28 23:04:58 2018
@author: SilverDoe
"""
for i in range(input('enter the limit')):
k = [0, 1, 121, 12321]; print(k[i])
for i in range(1,int(input('enter the limit'))): #More than 2 lines will result in 0 score. Do not leave a blank
print((10**(i)//9)*i)
|
from __future__ import print_function
import os
from importlib import import_module
from pyqtgraph import reload
## Extend pyqtgraph.flowchart by adding several specialized nodes
from pyqtgraph.flowchart import *
from pyqtgraph.flowchart.library import isNodeClass
from acq4.util.debug import printExc
def loadLibrary(reloadLibs=False):
global NODE_LIST, NODE_TREE
libPath = os.path.dirname(os.path.abspath(__file__))
if reloadLibs:
reload.reloadAll(libPath)
for f in os.listdir(libPath):
pathName, ext = os.path.splitext(f)
if ext != '.py' or '__init__' in pathName or '__main__' in pathName:
continue
try:
mod = import_module(".{}".format(pathName), package="acq4.util.flowchart")
# mod = __import__('.' + pathName, globals(), locals())
except:
printExc("Error loading flowchart library %s:" % pathName)
continue
for n in dir(mod):
o = getattr(mod, n)
if isNodeClass(o):
registerNodeType(o, [(pathName,)], override=reloadLibs)
loadLibrary()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import glob, re, json, random, shutil
from PIL import Image
import numpy as np
#画像サイズ
IMAGE_SIZE = 224
#チャネル数
CHANNEL_SIZE = 3
#ラベルを作成する関数
def make_label_list():
#ディレクトリのパスを取得
dir_path_list = glob.glob('image/*')
#辞書を準備
label_dic = {}
for i, dir_path in enumerate(dir_path_list):
key = re.search(r'image/(.+)', dir_path)
key = key.group(1)
label_dic[key] = i
#辞書をjsonで保存
with open("./data/label_dic.json", "w") as f:
label_json = json.dumps(label_dic)
json.dump(label_json, f)
return label_dic
#画像を数値データに変換する関数
def convert_image(img_path):
try:
#画像の名前を取得
image_name = re.search(r'image/(.+)', img_path)
image_name = image_name.group(1)
#白黒画像
if CHANNEL_SIZE == 1:
img = Image.open(img_path).convert('L')
#カラー画像
else:
img = Image.open(img_path).convert('RGB')
#画像サイズを変換
img = img.resize((IMAGE_SIZE, IMAGE_SIZE))
#画像データをnumpy配列に変換
x = np.asarray(img, dtype=np.float32)
#正規化
x /= 255
#numpy配列の形状を変換
x = x.reshape(CHANNEL_SIZE, IMAGE_SIZE, IMAGE_SIZE)
return x
except Exception as e:
shutil.move(img_path, "noise")
x = []
#デバック
print("[Error] {0} <{1}>".format(img_path, e))
return x
#ラベルデータを取得する関数
def get_label_data(img_path, label_dic):
#画像のディレクトリのパスを取得
key = re.search(r'image/(.+)/.+', img_path)
key = key.group(1)
#辞書からラベルを取得
t = label_dic[key]
#ラベルをnumpy配列に変換
t = np.asarray(t, dtype=np.int32)
return t
#データセットを作成する関数
def make_dataset(label_dic):
#デバック
#各画像の枚数を出力
dir_path_list = glob.glob('image/*')
for dir_path in dir_path_list:
img_path_list = glob.glob(dir_path+'/*.jpg')
dir_name = re.search(r'image/(.+)', dir_path)
dir_name = dir_name.group(1)
print("{0}: {1}".format(dir_name, len(img_path_list)))
print("")
#画像のパスのリストを取得
img_path_list = glob.glob('image/*/*.jpg')
#画像をシャッフル
random.shuffle(img_path_list)
#画像データを入れるリストを準備
image_data = []
#ラベルデータを入れるリストを準備
label_data = []
for img_path in img_path_list:
#画像を数値データに変換
x = convert_image(img_path)
if x == []:
continue
#ラベルデータを取得
t = get_label_data(img_path, label_dic)
#リストに追加
image_data.append(x)
label_data.append(t)
#画像データを保存するパス
save_image_path = "./data/image_data.npy"
#ラベルデータを保存するパス
save_label_path = "./data/label_data.npy"
#画像データをファイルに保存
np.save(save_image_path, image_data)
#ラベルデータをファイルに保存
np.save(save_label_path, label_data)
#デバック
print("total: {0}".format(len(img_path_list)))
if __name__ == '__main__':
#ラベルを作成
label_dic = make_label_list()
#データセットを作成
make_dataset(label_dic) |
import warnings
import numpy as np
import pytest
from numpy.testing import assert_allclose
from einsteinpy.geodesic import Geodesic, Nulllike, Timelike
@pytest.fixture()
def dummy_timegeod():
"""
Equatorial Capture
"""
return Timelike(
metric="Kerr",
metric_params=(0.9,),
position=[2.15, np.pi / 2, 0.],
momentum=[0., 0., 1.5],
steps=50,
delta=0.5,
omega=0.01, # Close orbit
return_cartesian=True,
suppress_warnings=True,
)
@pytest.fixture()
def dummy_nullgeod():
"""
Equatorial Geodesic
"""
return Nulllike(
metric="Kerr",
metric_params=(0.5,),
position=[4., np.pi / 2, 0.],
momentum=[0., 0., 2.],
steps=50,
delta=0.5,
return_cartesian=False,
suppress_warnings=True,
)
def test_str_repr(dummy_timegeod):
geod = dummy_timegeod
assert str(geod) == repr(geod)
def test_NotImplementedError():
try:
geod = Nulllike(
metric="Ker",
metric_params=(0.9,),
position=[2.5, np.pi / 2, 0.],
momentum=[0., 0., -8.5],
)
assert False
except NotImplementedError:
assert True
def test_geodesic_attributes1(dummy_timegeod):
geod = dummy_timegeod
traj = geod.trajectory
assert traj
assert isinstance(traj, tuple)
assert isinstance(traj[0], np.ndarray)
def test_geodesic_attributes2(dummy_timegeod):
geod = dummy_timegeod
traj = geod.trajectory
assert isinstance(traj[1], np.ndarray)
assert traj[0].shape[0] == traj[1].shape[0]
assert traj[1].shape[1] == 8
def test_constant_angular_momentum(dummy_nullgeod):
L = dummy_nullgeod.momentum[-1]
assert_allclose(dummy_nullgeod.trajectory[1][:, -1], L, atol=1e-4, rtol=1e-4)
def test_equatorial_geodesic(dummy_nullgeod):
theta = dummy_nullgeod.position[2]
assert_allclose(dummy_nullgeod.trajectory[1][:, 2], theta, atol=1e-6, rtol=1e-6)
def test_constant_rad():
geod = Timelike(
metric="Kerr",
metric_params=(0.99,),
position=[4., np.pi / 3, 0.],
momentum=[0., 0.767851, 2.],
return_cartesian=False,
steps=50,
delta=1.,
)
r = geod.trajectory[1][:, 1]
assert_allclose(r, 4., atol=1e-2, rtol=1e-2)
def test_kerr0_eq_sch():
metric_params = (0.,)
q0 = [4., np.pi / 2, 0.]
p0 = [0., 0., 0.]
k = Timelike(
metric="Kerr",
metric_params=metric_params,
position=q0,
momentum=p0,
steps=50,
delta=0.5,
return_cartesian=True,
suppress_warnings=True,
)
s = Timelike(
metric="Schwarzschild",
metric_params=metric_params,
position=q0,
momentum=p0,
steps=50,
delta=0.5,
return_cartesian=True,
suppress_warnings=True,
)
assert_allclose(k.trajectory[0], s.trajectory[0], atol=1e-6, rtol=1e-6)
assert_allclose(k.trajectory[1], s.trajectory[1], atol=1e-6, rtol=1e-6)
def test_kerr0_eq_kn00():
metric_params = (0.5, 0.)
q0 = [2.5, np.pi / 2, 0.]
p0 = [0., 0., -8.5]
k = Timelike(
metric="Kerr",
metric_params=metric_params,
position=q0,
momentum=p0,
steps=50,
delta=0.5,
return_cartesian=True,
suppress_warnings=True,
)
kn = Timelike(
metric="KerrNewman",
metric_params=metric_params,
position=q0,
momentum=p0,
steps=50,
delta=0.5,
return_cartesian=True,
suppress_warnings=True,
)
assert_allclose(k.trajectory[0], kn.trajectory[0], atol=1e-6, rtol=1e-6)
assert_allclose(k.trajectory[1], kn.trajectory[1], atol=1e-6, rtol=1e-6)
|
from protocolbuffers.Math_pb2 import Quaternion
from interactions.constraints import Anywhere, SmallAreaConstraint
from routing import FootprintType
from routing.waypoints.waypoint_generator import _WaypointGeneratorBase
from sims4.geometry import build_rectangle_from_two_points_and_radius, PolygonFootprint
from sims4.tuning.tunable import TunableRange, TunableTuple, OptionalTunable, Tunable, TunableAngle
import placement
import routing
import sims4.geometry
logger = sims4.log.Logger('_WaypointGeneratorUnobstructedLine', default_owner='rrodgers')
class _WaypointGeneratorUnobstructedLine(_WaypointGeneratorBase):
FACTORY_TUNABLES = {'line_length': TunableRange(description='\n The radius, in meters, of the generated constraint around the \n target object where the waypoints will be generated.\n ', tunable_type=float, default=3, minimum=0), 'fgl_parameters': TunableTuple(description='\n Arguments that will affect the FGL.\n ', min_water_depth=OptionalTunable(description='\n (float) If provided, each vertex of the line polygon along with its centroid will\n be tested to determine whether the ocean water at the test location is at least this deep.\n 0 indicates that all water placement is valid. To allow land placement, leave untuned.\n ', tunable=TunableRange(description='\n Value of the min water depth allowed.\n ', minimum=0, tunable_type=float, default=0)), max_water_depth=OptionalTunable(description='\n (float) If provided, each vertex of the line polygon along with its centroid will\n be tested to determine whether the ocean water at the test location is at most this deep.\n To disallow water placement, set to 0.\n ', tunable=TunableRange(description='\n Value of the max water depth allowed.\n ', tunable_type=float, minimum=0, maximum=1000.0, default=1000.0)))}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._footprint = None
def get_start_constraint(self):
return self.get_water_constraint(self.fgl_parameters.min_water_depth, self.fgl_parameters.max_water_depth)
def clean_up(self):
if self._footprint is None:
return
self._target.routing_context.remove_footprint_contour_override(self._footprint.footprint_id)
self._footprint = None
def get_waypoint_constraints_gen(self, routing_agent, waypoint_count):
line_length_offset = sims4.math.Vector3(0, 0, self.line_length)
object_radius = routing_agent.routing_component.object_radius
start = routing_agent.position
initial_orientation = sims4.random.random_orientation()
end = initial_orientation.transform_vector(line_length_offset) + start
polygon = build_rectangle_from_two_points_and_radius(start, end, object_radius)
starting_location_for_sample = placement.create_starting_location(position=start, routing_surface=self._routing_surface)
water_constraint = self.get_water_constraint(self.fgl_parameters.min_water_depth, self.fgl_parameters.max_water_depth)
fgl_context = placement.FindGoodLocationContext(starting_location_for_sample, object_polygons=(polygon,), ignored_object_ids=[routing_agent.id, self._context.sim.sim_id], max_distance=0, min_water_depth=water_constraint.get_min_water_depth(), max_water_depth=water_constraint.get_max_water_depth())
(_, orientation) = placement.find_good_location(fgl_context)
if orientation is None:
return
final_orientation = sims4.math.Quaternion.concatenate(orientation, initial_orientation)
oriented_line_offset = final_orientation.transform_vector(line_length_offset)
waypoint_constraints = []
for waypoint_index in range(0, waypoint_count):
percent_down_line = waypoint_index/(waypoint_count - 1)
goal_positon = oriented_line_offset*percent_down_line + start
geometry = sims4.geometry.RestrictedPolygon(sims4.geometry.CompoundPolygon(sims4.geometry.Polygon((goal_positon,))), ())
constraint = SmallAreaConstraint(geometry=geometry, routing_surface=self._routing_surface, min_water_depth=water_constraint.get_min_water_depth(), max_water_depth=water_constraint.get_max_water_depth())
waypoint_constraints.append(constraint)
end = oriented_line_offset + start
polygon = build_rectangle_from_two_points_and_radius(start, end, object_radius)
self._footprint = PolygonFootprint(polygon, routing_surface=self._routing_surface, cost=routing.get_default_discouragement_cost(), footprint_type=FootprintType.FOOTPRINT_TYPE_OBJECT, enabled=True)
routing_agent.routing_component.pathplan_context.ignore_footprint_contour(self._footprint.footprint_id)
yield from waypoint_constraints
|
__all__=("Console", "FileHelper")
|
import os
import traceback
import infosec.utils as utils
from infosec.utils import smoke
@smoke.smoke_check
def check_q1():
try:
result = utils.execute(['python3', 'q1.py', 'q1.pcapng'])
if result.exit_code:
smoke.error('ERROR: `python3 q1.py q1.pcapng` exitted with non-zero code {}'
.format(result.exit_code))
lines = [l.strip() for l in result.stdout.splitlines() if l.strip()]
if not len(lines) == 1:
smoke.error(("ERROR: `python3 q1.py q1.pcapng` should return exactly one "
+ "line of ('user', 'password'), (as the .pcapng should have one "
+ "login attempt), but it returned {} lines:")
.format(len(lines)))
print(result.stdout)
else:
smoke.success("q1.py looks good")
except Exception as e:
smoke.error('ERROR: Failed running/analyzing `python3 q1.py q1.pcapng`')
traceback.print_exc()
def smoketest():
os.chdir(os.path.dirname(os.path.abspath(__file__)))
smoke.check_if_nonempty('q1.py')
smoke.check_if_nonempty('q1.txt')
smoke.check_if_nonempty('q1.pcapng')
check_q1()
smoke.check_if_nonempty('q2.py')
smoke.check_if_nonempty('q2.txt')
smoke.check_if_nonempty('q3.py')
smoke.check_if_nonempty('q3.txt')
if __name__ == '__main__':
smoketest()
|
from __future__ import division
import codecs
import json
import re
import nltk
from nltk import sent_tokenize
from nltk import word_tokenize
def avg_wc(sentence):
letter_size = 0
no_of_words = 0
word = word_tokenize(sentence)
no_of_words = len(word)
for w in word:
letter_size += len(w)
return letter_size/no_of_words
with codecs.open("twitter.txt", encoding='utf-8') as f: #Decoding ASAP as per the Unicode Sandwich !!
text = f.read()
sentences = sent_tokenize(text)
sentences = [re.sub(u'\n',' ',sent) for sent in sentences]
average_word_count = [avg_wc(sent) for sent in sentences]
#appending thorn and the line number to the sentence
new_sentences = [unicode(i)+u'þ"'+sent +u'"þ' for i,sent in enumerate(sentences)]
i = 0
while i<len(average_word_count):
new_sentences[i] = new_sentences[i]+unicode(average_word_count[i])
i+=1
#Creamy Unicode Ends
for each_line in new_sentences:
each_line = each_line.encode('utf-8') #Encoding as late as possible
print each_line
#Problem 2:
#Creating a structure for the xml and looping through it
header = u'<document>\n\t<sentences>'
header = header.encode('utf-8')
print header
#a tuple containing id,sentences and average word count
data_tuple = [[0]*3 for i in range(0,len(sentences))]
for i in range(0,len(sentences)):
data_tuple[i][0] = i
data_tuple[i][1] = sentences[i]
data_tuple[i][2] = (average_word_count[i])
container = u''
for i in range(0,len(data_tuple)):
container += u'\t\t<sentence id ="'+unicode(data_tuple[i][0])+u'">\n\t\t\t<text>'+unicode(data_tuple[i][1])+u'</text>\n\t\t\t<avg>'+unicode(int(data_tuple[i][2]))+u'</avg>\n\t\t</sentence>\n'
container = container.encode('utf-8')
print container
footer = u'\t</sentences>\n<documents>'
footer = footer.encode('utf-8')
print footer
#Problem 3
#making use of the inbuilt json.dumps function to create the structure
json_struct = {"documents":{"sentences":[{"text":text,"avg":avg,"id":ids} for ids,text,avg in data_tuple]}}
print json.dumps(json_struct,indent = 4)
|
import unittest
import comb
class MaskKCombForAllKTestCase(unittest.TestCase):
def test_mask_k_comb_for_all_k(self):
self.assertEqual(
comb.mask_k_comb_for_all_k(('a', 'b', 'c')),
{
('*', '*', '*'),
('*', '*', 'c'),
('*', 'b', '*'),
('*', 'b', 'c'),
('a', '*', '*'),
('a', '*', 'c'),
('a', 'b', '*'),
('a', 'b', 'c')
}
)
|
# Created by Thomas Jones on 27/02/16 - [email protected]
# ratinglimiter.py, a plugin for minqlx to limit a server to players within certain ratings.
# This plugin is released to everyone, for any purpose. It comes with no warranty, no guarantee it works, it's released AS IS.
# You can modify everything, except for lines 1-4 and the !tomtec_versions code. They're there to indicate I whacked this together originally. Please make it better :D
import minqlx, requests
class ratinglimiter(minqlx.Plugin):
def __init__(self):
self.add_hook("player_connect", self.handle_player_connected)
self.add_hook("player_loaded", self.handle_player_loaded)
self.add_hook("team_switch_attempt", self.handle_team_switch)
self.add_hook("new_game", self.handle_new_game)
self.add_command("tomtec_versions", self.cmd_showversion)
self.set_cvar_once("qlx_minRating", "0")
self.set_cvar_once("qlx_maxRating", "1600")
self.set_cvar_once("qlx_kickPlayersOutOfRatingBounds", "1")
self.set_cvar_once("qlx_ratingOverridePermission", "4")
self.set_cvar_once("qlx_balanceUrl", "qlstats.net:8080")
self.set_cvar_once("qlx_balanceApi", "elo")
self.prohibitedPlayers = []
self.plugin_version = "1.3"
def handle_new_game(self):
self.prohibitedPlayers = [] # clear all banned players so their glickos can be recalculated.
def handle_player_connected(self, player):
if (player in self.prohibitedPlayers) and (self.get_cvar("qlx_kickPlayersOutOfRatingBounds", bool)):
return "^1You are not permitted to join this server."
def handle_player_loaded(self, player):
if not self.db.has_permission(player, self.get_cvar("qlx_ratingOverridePermission", int)):
self.send_request(player)
def send_request(self, player):
try:
url = "http://{}/{}/{}".format(self.get_cvar("qlx_balanceUrl"), self.get_cvar("qlx_balanceApi"), player.steam_id)
res = requests.get(url)
if res.status_code != requests.codes.ok:
raise "Requests status code is not equal to 200 OK."
js = res.json()
gt = self.game.type_short
if "players" not in js:
raise "Strange data was recieved..."
for p in js["players"]:
if int(p["steamid"]) == player.steam_id and gt in p:
self.process_player(player, p[gt]['elo'], p[gt]['games'])
except Exception as e:
self.msg("^1{}: ^7{}".format(Exception, e))
pass
def handle_team_switch(self, player, old_team, new_team):
if player in self.prohibitedPlayers:
player.center_print("^1You are not permitted to join the game.")
return minqlx.RET_STOP_ALL
@minqlx.next_frame
def process_player(self, player, glicko, games_played):
if glicko > self.get_cvar("qlx_maxRating", int): # player's glicko is higher than the server allows
self.prohibitedPlayers.append(player)
playerIs = "over"
elif glicko < self.get_cvar("qlx_minRating", int): # player's glicko is lower than the server allows
self.prohibitedPlayers.append(player)
playerIs = "under"
elif (glicko == self.get_cvar("qlx_minRating", int)) or (glicko == self.get_cvar("qlx_maxRating", int)): # player's glicko is the same as either of the limits
if self.get_cvar("qlx_kickPlayersOutOfRatingBounds", bool):
suffix = "Don't be surprised if you're kicked after some games."
else:
suffix = ""
player.tell("Your glicko ({}) is on the borderline of the server's glicko limits. {}".format(glicko, suffix))
return
if playerIs == "over":
limit = self.get_cvar("qlx_maxRating", int)
else:
limit = self.get_cvar("qlx_minRating", int)
if not self.get_cvar("qlx_kickPlayersOutOfRatingBounds", bool):
player.tell("Sorry, your glicko rating ({}) is {} the glicko limitation on this server ({}).".format(glicko, playerIs, limit))
player.tell("You can spectate, but you cannot join this game until you meet the glicko requirements.")
elif player in self.prohibitedPlayers:
player.kick("Sorry, your glicko rating ({}) is {} the glicko limitation on this server ({}).".format(glicko, playerIs, limit))
def cmd_showversion(self, player, msg, channel):
channel.reply("^4ratinglimiter.py^7 - version {}, created by Thomas Jones on 27/02/2016.".format(self.plugin_version))
|
from elasticsearch import Elasticsearch
from filehash import FileHash
PUNCTUATION = r"""!"#$%&'()*+,-./'’“”—:;<=>–?«»@[\]^_`©‘…{|}~"""
DOCUMENTS_DIR = '/documents'
DOCUMENTS_INDEX = 'library'
ES = Elasticsearch(['192.168.2.145:9200'])
SHA256 = FileHash('sha256')
|
# -*- coding: utf-8 -*-
################################################################################
## Form generated from reading UI file 'errordialog.ui'
##
## Created by: Qt User Interface Compiler version 6.1.1
##
## WARNING! All changes made in this file will be lost when recompiling UI file!
################################################################################
from PySide6.QtCore import * # type: ignore
from PySide6.QtGui import * # type: ignore
from PySide6.QtWidgets import * # type: ignore
class Ui_ErrorDialog(QDialog):
# Caixa de diálogo utilizada para criação ou edição de linhas
def __init__(self, parent=None):
QDialog.__init__(self, parent=parent)
self.setupUi(self)
return
def setupUi(self, Dialog):
if not Dialog.objectName():
Dialog.setObjectName(u"Dialog")
Dialog.resize(418, 168)
self.gridLayout = QGridLayout(Dialog)
self.gridLayout.setObjectName(u"gridLayout")
self.label = QLabel(Dialog)
self.label.setObjectName(u"label")
font = QFont()
font.setPointSize(13)
self.label.setFont(font)
self.label.setAlignment(Qt.AlignCenter)
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.quitLabel = QLabel(Dialog)
self.quitLabel.setObjectName(u"quitLabel")
self.quitLabel.setAlignment(Qt.AlignCenter)
self.gridLayout.addWidget(self.quitLabel, 1, 0, 1, 1)
self.retranslateUi(Dialog)
QMetaObject.connectSlotsByName(Dialog)
# setupUi
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(QCoreApplication.translate("Dialog", u"Aten\u00e7\u00e3o!", None))
self.label.setText("")
self.quitLabel.setText(QCoreApplication.translate("Dialog", u"Pressione ESC para sair", None))
# retranslateUi
|
import pytest
#import lscf
def test_get_env_settings():
# lscf.main()
assert True
|
"""
This module contains the API service for working with booking.moh.gov.ge's API.
This file is part of the vaccination.py.
(c) 2021 Temuri Takalandze <[email protected]>
For the full copyright and license information, please view the LICENSE
file that was distributed with this source code.
"""
import json
from datetime import date
from typing import Dict, Union, List
import requests
from requests.models import Response
from vaccination.service.api.base import BaseAPIService
class BookingAPIService(BaseAPIService):
"""
Service for working with the booking.moh.gov.ge's API.
"""
url_template = "https://booking.moh.gov.ge/$app/API/api$path"
security_numbers = []
def _make_request(self, method: str, **kwargs) -> Response:
kwargs["headers"] = {"SecurityNumber": self.__get_security_number()}
return super()._make_request(method, **kwargs)
def __get_security_number(self) -> str:
if not self.security_numbers:
self.security_numbers = requests.get(
"https://vaccination.abgeo.dev/api/numbers?count=10"
).json()
return self.security_numbers.pop(0)
def get_available_quantities(self, app: str = "def") -> Dict[str, int]:
"""
Make GET request to the "/Public/GetAvailableQuantities" endpoint.
:param app: Application.
:return: Endpoint response.
"""
_quantities = self._get(
url={"app": app, "path": "/Public/GetAvailableQuantities"}
)
_quantities = json.loads(_quantities)
quantities = {}
for service, quantity in _quantities.items():
quantities[service.lower()] = quantity
return quantities
def get_service_types(self, app: str = "def") -> List[Dict[str, str]]:
"""
Make GET request to the "/CommonData/GetServicesTypes" endpoint.
:param app: Application.
:return: Endpoint response.
"""
return self._get(url={"app": app, "path": "/CommonData/GetServicesTypes"})
def get_regions(
self, service: str, only_free: bool = True, app: str = "def"
) -> List[Dict[str, str]]:
"""
Make GET request to the "/CommonData/GetRegions" endpoint.
:param str service: Service ID.
:param bool only_free: Get only free.
:param app: Application.
:return: Endpoint response.
"""
return self._get(
url={"app": app, "path": "/CommonData/GetRegions"},
data={"serviceId": service, "onlyFree": only_free},
)
def get_municipalities(
self, region: str, service: str, only_free: bool = True, app: str = "def"
) -> List[Dict[str, str]]:
"""
Make GET request to the "/CommonData/GetMunicipalities/{region}" endpoint.
:param str region: Region ID.
:param str service: Service ID.
:param bool only_free: Get only free.
:param app: Application.
:return: Endpoint response.
"""
return self._get(
url={"app": app, "path": f"/CommonData/GetMunicipalities/{region}"},
data={"serviceId": service, "onlyFree": only_free},
)
def get_municipality_branches(
self, service: str, municipality: str, only_free: bool = True, app: str = "def"
) -> List[Dict[str, str]]:
"""
Make GET request to the
"/CommonData/GetMunicipalityBranches/{service}/{municipality}" endpoint.
:param str service: Service ID.
:param str municipality: Municipality ID.
:param bool only_free: Get only free.
:param str app: Application.
:return: Endpoint response.
"""
return self._get(
url={
"app": app,
"path": f"/CommonData/GetMunicipalityBranches/{service}/{municipality}",
},
data={"onlyFree": only_free},
)
def get_slots(
self,
branch: str,
region: str,
service: str,
start_date: date,
end_date: date,
app: str = "def",
) -> List[Dict[str, Union[str, List]]]:
"""
Make POST request to the "/PublicBooking/GetSlots" endpoint.
:param str branch: Branch ID.=
:param str region: Region ID.
:param str service: Service ID.
:param date start_date: Start date.
:param date end_date: End date.
:param str app: Application.
:return: Endpoint response.
"""
return self._post(
url={"app": app, "path": "/PublicBooking/GetSlots"},
json={
"branchID": branch,
"startDate": start_date.strftime("%Y-%m-%d"),
"endDate": end_date.strftime("%Y-%m-%d"),
"regionID": region,
"serviceID": service,
},
)
def search_booking(
self,
personal_number: str,
booking_number: str,
app: str = "def",
) -> Dict[str, any]:
"""
Make GET request to the
"/Booking/SearchBookingByNumber/{personal_number}/{booking_number}" endpoint.
:param str personal_number: Personal Number.
:param str booking_number: Booking Number.
:param app: Application.
:return: Endpoint response.
"""
return self._get(
url={
"app": app,
"path": f"/Booking/SearchBookingByNumber/{booking_number}/{personal_number}",
}
)
|
import csv
import os
import sys
import collections
import math
import cPickle as pickle
import sexpdata
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import py_common
from inlining_tree import parse_time, geometric_mean
PLUGIN_SUBDIR = os.environ.get("PLUGINS_SUBDIR", "plugins")
def read(algo):
with open("report_plots/data_generation/" % algo, 'rb') as f:
(exec_times, initial_exec_times) = pickle.load(f)
return (exec_times, initial_exec_times)
def read_plugin_times(benchmark, plugin_name):
bench_dir = "../results/%s/%s/" % (benchmark, PLUGIN_SUBDIR)
with open(os.path.join(bench_dir, "plugin_%s.csv" % plugin_name), "rb") as f:
times = []
for line in csv.reader(f):
for i in range(4, len(line)):
times.append(parse_time(line[i]))
if len(times) >= 1:
return geometric_mean(times)
else:
return None
def get_initial_time_from_records(benchmark):
initial_exec_times = []
try:
with open("../pca-data/%s.csv" % benchmark, "rb") as f:
for line in csv.reader(f):
t = parse_time(line[-1] + "s")
if "initial" in line[0]:
initial_exec_times.append(t)
if initial_exec_times:
return geometric_mean(initial_exec_times)
else:
return None
except IOError:
return None
def get_initial_time_from_results(benchmark):
return read_plugin_times(benchmark, plugin_name="nothing")
def get_initial_exec_time(benchmark):
arr = []
time_from_pca = get_initial_time_from_records(benchmark)
if time_from_pca is not None:
arr.append(time_from_pca)
t = get_initial_time_from_results(benchmark)
if t is not None:
arr.append(t)
if arr:
return min(arr)
else:
return None
def main():
initial_exec_time_by_bench = {}
all_records = collections.defaultdict(list)
exps = py_common.EXPERIMENT_TO_PARAMETERS.keys()
for benchmark in exps:
initial_exec_times = []
best_time = None
initial_exec_time_by_bench[benchmark] = get_initial_exec_time(benchmark)
for benchmark in exps:
plugin_names = []
for fname in os.listdir("../results/%s/plugins" % benchmark):
if "random" in fname:
plugin_names.append(os.path.splitext(os.path.basename(fname))[0].replace("plugin_", ""))
initial_exec_time = initial_exec_time_by_bench[benchmark]
for plugin_name in plugin_names:
time = read_plugin_times(benchmark, plugin_name=plugin_name)
ratio = None
if time is not None and initial_exec_time is not None:
speedup = (initial_exec_time - time) / initial_exec_time
else:
speedup = None
if time is not None:
all_records[benchmark].append((time, ratio, speedup, plugin_name))
arr = []
print "&".join(headers)
print ">>>>> TRAINING SET <<<<<"
for bench, items in all_records.iteritems():
if bench not in py_common.INITIAL_EXPERIMENTS:
continue
print_row(bench, items)
print ""
print ">>>>> TEST SET <<<<<"
for bench, items in all_records.iteritems():
if bench in py_common.INITIAL_EXPERIMENTS:
continue
print_row(bench, items)
def print_row(bench, items):
speedups = sorted([a for (_, _, a, _) in items])
min_speedup = np.min(speedups)
median_speedup = np.median(speedups)
max_speedup = np.max(speedups)
min_speedup = "%.3f\\%%" % (min_speedup * 100)
first_quartile = "%.3f\\%%" % (np.percentile(speedups, 25) * 100)
median_speedup = "%.3f\\%%" % (median_speedup * 100)
third_quartile = "%.3f\\%%" % (np.percentile(speedups, 75) * 100)
max_speedup = "%.3f\\%%" % (max_speedup * 100)
seen = []
import random
for _ in range(20):
acc = 0
for _ in range(10):
i = random.randint(0, len(speedups) - 1)
acc = max(acc, speedups[i])
seen.append(acc)
pass_basline = "?"
print "%s & %d & %s & %s & %s & %s & %s \\\\" % (bench, len(speedups), min_speedup, first_quartile, median_speedup, third_quartile, max_speedup)
headers = ["Benchmark", "Num samples", "Min", "25th Percentile", "Median", "75th Percentile", "Max"]
if __name__ == "__main__":
main()
|
###################################
# File Name : process_queue.py
###################################
#!/usr/bin/python3
import time
import multiprocessing
def set_data(q):
p = multiprocessing.current_process()
msg = "Hello World"
q.put(msg)
print ("[%s] set queue data : %s" % (p.name, msg))
def get_data(q):
time.sleep(1)
p = multiprocessing.current_process()
print ("[%s] get queue data : %s" % (p.name, q.get()))
def main():
queue = multiprocessing.Queue()
p1 = multiprocessing.Process(name="set_data", target=set_data, args=(queue,))
p1.start()
p2 = multiprocessing.Process(name="get_data", target=get_data, args=(queue,))
p2.start()
p1.join()
p2.join()
if __name__ == "__main__":
main()
|
""" multivariative simplicial weighted interpolation and extrapolation.
This is an implementation of four different interpolation
and extrapolation technics.
F_w - is for average weighted interpolation, also called baricentric.
It is a global scheme.
F_b - is a baricentric weighted simplicial interpolation. It is local and does
not provide derivative continuousness. Neigher it has a extrapolation method
implemented.
F_l - is for linear simplicial weighted interpolation. It is a local scheme, so it
needs simplicial complex, given as a list of simplexes, to operate. Note that
while interpolation function itself is continuous, its derivatives may not be
that way.
F_s - simplicial weighted interpolation. It is local, and being used with
appropriate weighting function and basis functions, it can provide derivative
continuousness up to any choosen level.
"""
__authors__ = [
'"Alexandr Kalenuk" <[email protected]>'
]
import operator
import math
###################### data functions section
def make_vector(n):
''' Makes n-dimmentional vector '''
return [0.0]*n
def copy_vector(V):
''' Copies input vector '''
return [Vi for Vi in V]
def make_matrix(n):
''' Makes square n-dimensional matrix '''
return [[0.0 for j in xrange(0,n)] for i in xrange(0,n)]
def copy_matrix(M):
''' Copies rectangular matrix '''
return [[Mij for Mij in Mi] for Mi in M]
def make_n_vectors(k,n):
''' Makes k-sized list of n-dimmentional vectors '''
return [[0.0 for j in xrange(0,n)] for i in xrange(0,k)]
###################### algebra section
def v_add(a,b):
''' Vector sum'''
return map(lambda s,d: s+d, a,b)
def v_sub(a,b):
''' Vector sub '''
return map(lambda s,d: s-d, a,b)
def v_dot(a,b):
''' Vector dot product '''
return sum(map(lambda s,d: s*d, a,b))
def v_len(a):
''' Vector length'''
return math.sqrt(sum(map(lambda s: s**2, a)))
def v_smul(s,a):
''' Multiplication by a scalar '''
return [s*ai for ai in a]
def v__E(a):
''' Levi-Civita symbol '''
n=0
t=[]
for ti in a:
t.append(ti)
for i in xrange(0,len(a)):
for j in xrange(0,len(a)-i-1):
if t[j]==t[j+1]:
return 0
elif t[j]>t[j+1]:
n+=1
t.insert(j+1,t.pop(j))
if n%2==0:return 1
return -1
def v_cross(A):
''' n-dimensional cross product on (n-1) vectors in list A '''
for a in A:
if len(a)!=len(A[0]):
print "Vector size mismatch in 'v_cross'"
DIMM=len(A[0])
N=len(A)
if N!=DIMM-1:
print "Vector number mismatch in 'v_cross'"
A[100][100]=0.0
v_res=[]
for i in xrange(0,DIMM):
v_res.append(0.0)
for jk in xrange(0,DIMM**N):
v_ijk=[i]
for j in xrange(0,N):
v_ijk.append((jk/(DIMM**(N-j-1)))%DIMM)
t_res=v__E(v_ijk)
if t_res!=0:
for k in xrange(0,N):
t_res*=A[k][v_ijk[k+1]]
v_res[i]+=t_res
return v_res
def v_mixed(A):
''' n-dimensional triple product of n vectors in list A '''
for i in xrange(1,len(A)):
if len(A[i])!=len(A[i-1]):
print "Vector size mismatch in 'v_cross'"
DIMM=len(A[0])
N=len(A)
if N!=DIMM:print "Vector number mismatch in 'v_cross'"
v_res=0.0
for jk in xrange(0,DIMM**N):
v_ijk=[]
for j in xrange(0,N):
v_ijk.append((jk/(DIMM**(N-j-1)))%DIMM)
t_res=v__E(v_ijk)
if t_res!=0:
for k in xrange(0,N):
t_res*=A[k][v_ijk[k]]
v_res+=t_res
return v_res
###################### linear equations section
def Solve(A, B):
''' Linear system solving for AX=B. Returns vector X '''
N=len(B)
X=[0]*N
def a(i, j, n):
if n==N: return A[i][j]
return a(i,j,n+1)*a(n,n,n+1)-a(i,n,n+1)*a(n,j,n+1)
def b(i, n):
if n==N: return B[i]
return a(n,n,n+1)*b(i,n+1)-a(i,n,n+1)*b(n,n+1)
def x(i):
d=b(i,i+1)
for j in xrange(i): d-=a(i,j,i+1)*X[j]
return d/a(i,i,i+1)
for k in xrange(N):
X[k]=x(k)
return X
def Gauss(A, B):
''' Gauss method implementation for linear system solving.
Args:
A: matrix,
B: vector
for equation AX=B
Returns:
X for AX=B '''
N=len(B)
X=[0]*N
gA=[]
for i in xrange(0,len(A)):
ga=[]
for j in xrange(0,len(A[i])):
ga.append(float(A[i][j]))
gA.append(ga)
gB=[]
for i in xrange(0,len(B)):
gB.append(float(B[i]))
r=0.0
for k in xrange(0, N): X[k]=0.0
for k in xrange(0, N-1):
for j in xrange(0, k+1):
if gA[j][j]==0.0: gA[j][j]=0.000000001
r=gA[k+1][j]/gA[j][j]
gA[k+1][j]=0.0
for bJ in xrange(j+1,N):
gA[k+1][bJ]=gA[k+1][bJ]-gA[j][bJ]*r
gB[k+1]=gB[k+1]-gB[j]*r
if gA[N-1][N-1]==0: gA[N-1][N-1]=0.00000001
X[N-1]=gB[N-1]/gA[N-1][N-1]
for i in xrange(N-2,-1,-1):
s=0.0
for j in xrange(i,N):
s=s+gA[i][j]*X[j]
if gA[i][j]==0: gA[i][j]=0.0000001
X[i]=(gB[i]-s)/gA[i][i]
return X
###################### projection
def v_proj(a,S):
''' Projection on a simplex plane.
Args:
a: Projecting point.
S: Simplex given by a list of points.
Returns:
Point, which is an 'a' projection on 'S' simplex plane.
'''
for b in S:
if len(b)!=len(a): print "Vector sizes mismatch in v_proj"
if len(S)==1:
return S[0]
elif len(S)==2:
a0=v_sub(a,S[0])
v01=v_sub(S[1],S[0])
Ei=0.0
E=0.0
for i in xrange(0,len(a)):
Ei+=v01[i]*v01[i]
E+=a0[i]*v01[i]
return v_add(S[0],v_smul(float(E)/Ei,v01))
elif len(S)>2:
N=len(S)-1
a0=v_sub(a,S[0])
v0i=[]
for i in xrange(0,N):
v0i.append(v_sub(S[i+1],S[0]))
A=make_matrix(N)
B=make_vector(N)
for k in xrange(0,len(a)):
for i in xrange(0,N):
for j in xrange(0,N):
A[i][j]+=v0i[j][k]*v0i[i][k]
B[i]+=a0[k]*v0i[i][k]
I=Gauss(A,B)
to_ret=copy_vector(S[0])
for i in xrange(0,N):
to_ret=v_add(to_ret,v_smul(I[i],v0i[i]))
return to_ret
def v_proj_or(a,S, p0):
''' Projection on a simplex inner space. If a projection on a simplex plane
does not lie in its inner space, function returns p0
Args:
a: Point to project.
S: Simplex given by a list of its points.
Returns:
Point of projection or p0 if projection does not lie in a
simplex inner space
'''
ret=make_vector(len(a))
ret[0]=p0
for b in S:
if len(b)!=len(a): print "Vector sizes mismatch in v_proj"
if len(S)==1:
return S[0]
elif len(S)==2:
a0=v_sub(a,S[0])
v01=v_sub(S[1],S[0])
Ei=0.0
E=0.0
for i in xrange(0,len(a)):
Ei+=v01[i]*v01[i]
E+=a0[i]*v01[i]
k=float(E)/Ei
if k<0 or k>1.0:
return ret
return v_add(S[0],v_smul(k,v01))
elif len(S)>2:
N=len(S)-1
a0=v_sub(a,S[0])
v0i=[]
for i in xrange(0,N):
v0i.append(v_sub(S[i+1],S[0]))
A=make_matrix(N)
B=make_vector(N)
for k in xrange(0,len(a)):
for i in xrange(0,N):
for j in xrange(0,N):
A[i][j]+=v0i[j][k]*v0i[i][k]
B[i]+=a0[k]*v0i[i][k]
I=Gauss(A,B)
sum_I=0
for i in I:
if i<0 or i>1: return ret
sum_I+=i
if sum_I<0 or sum_I>1: return ret
to_ret=copy_vector(S[0])
for i in xrange(0,N):
to_ret=v_add(to_ret,v_smul(I[i],v0i[i]))
return to_ret
###################### common functions section
def v_k(vx, s_k):
''' Weight function for 'vx' vector
Args:
vx: vector to weight.
s_k: scalar weight function.
Returns:
Weight of the vector
'''
return reduce(operator.mul, [s_k(x) for x in vx])
def coords_in_simplex(sx,dot,pnt, xyz,Sx, crd=[]):
''' Determines if a point is in simplex
Args:
dot: point coordinates in an a basis set by simplex 1-edges.
sx: index for basic simplex in an array of simplexes 'Sx'.
pnt: index of an origin point in simplex.
xyz: list of points,
Sx: list of point indexes, representing simplicial complex.
crd: the return value for calculated coordinates.
Returns:
'True' if point is in a simplex, 'False' otherwise.
'''
DIMM=len(dot)
A=make_matrix(DIMM)
B=make_vector(DIMM)
crd=make_vector(DIMM)
cnt=0
p_pnt=Sx[sx][pnt]-1
for i in xrange(0,DIMM+1):
p_i=Sx[sx][i]-1
if p_i!=p_pnt:
for j in xrange(0,DIMM):
A[j][cnt]=xyz[p_i][j]-xyz[p_pnt][j]
cnt+=1
if cnt!=DIMM: print "WTF error: not enough points in simplex"
for j in xrange(0,DIMM):
B[j]=dot[j]-xyz[p_pnt][j]
crd=Gauss(A,B)
res=True
summ=0.0
for j in xrange(0,DIMM):
if not 1>=crd[j]>=0: res=False
summ+=crd[j]
if 1>=summ>=0 and res:
return True
else:
return False
def get_nearest_simplex(dot,xyz,Sx,sx, best_pack):
''' Finds a simplex which is the nearest to a 'dot' point.
Args:
sx: A candiate simplex.
xyz: List of all points forming simplicial complex.
Sx: List of point indexes, representing simplicial complex.
best_pack: Structure for passing found data recoursively.
Returns:
List, first element of which represents nearest simplex index.
'''
new_pack=[best_pack[0],copy_vector(best_pack[1]),copy_vector(best_pack[2])]
new_S=[]
for i in sx:
new_S.append(xyz[i-1])
new_prj=v_proj_or(dot,new_S, 1.e10)
new_l=v_len(v_sub(new_prj,dot))
if new_l<best_pack[0]:
best_pack[0]=new_l
best_pack[1]=copy_vector(new_prj)
best_pack[2]=copy_vector(sx)
if len(sx)>1:
for i in xrange(0,len(sx)):
c_sx=copy_vector(sx)
c_sx[i:i+1]=[]
best_pack=get_nearest_simplex(dot,xyz,Sx,c_sx, best_pack)
return best_pack
def get_constant_functions(xyz,f,Sx):
''' Determines a list of constant basis functions
Args:
xyz: Point set.
f: Corresponding array of function values.
Sx: List of simplexes
Returns:
List of basis functions
'''
def fi(i):
return lambda dot: f[i]
return [fi(i) for i in xrange(len(xyz))]
def get_linear_functions(xyz,f,Sx):
''' Determines a list of linear basis functions
Args:
xyz: Point set.
f: Corresponding array of function values.
Sx: List of simplexes
Returns:
List of basis functions
'''
if len(xyz)==0:
return []
dimm=len(xyz[0])
simplex_linears=make_n_vectors(len(Sx),dimm+1)
point_linears=make_n_vectors(len(xyz),dimm+1)
for i in xrange(0,len(Sx)):
A=make_matrix(dimm+1)
B=make_vector(dimm+1)
for j in xrange(dimm+1):
pnt=Sx[i][j]-1
for k in xrange(dimm):
A[j][k]=xyz[pnt][k]
A[j][dimm]=1.0
B[j]=f[pnt]
simplex_linears[i]=Gauss(A,B)
for i in xrange(len(xyz)):
sx_N=0
for j in xrange(0,len(Sx)):
for k in xrange(0,dimm+1):
if Sx[j][k]==i+1:
sx_N+=1
for l in xrange(0,dimm+1):
point_linears[i][l]+=simplex_linears[j][l]
break
if sx_N==0: print "error: point is not in simplex"
point_linears[i]=map(lambda a:a/sx_N, point_linears[i])
def fi(i):
return lambda dot: sum([point_linears[i][j]*dot[j] for j in xrange(dimm)])+point_linears[i][dimm]
return [fi(i) for i in xrange(len(xyz))]
###################### interpolation and extrapolation section
def F_w(dot, xyz,Sx,base_f,s_k):
''' Average weighted interpolation
Args:
dot: Argument for interpolation function
given by a list of variables
xyz: Data points.
Sx: List of simplexes, which is excessive
for this particular algorithm yet let for consistancy.
base_f: Corresponding to 'xyz' list of basic functions.
s_k: Scalar weight function.
Returns:
Value of interpolation function.
'''
Up=0.0
Dn=0.0
for i in xrange(0,len(xyz)):
k=s_k(v_len(v_sub(xyz[i],dot)))
Up=Up+base_f[i](dot)*k
Dn=Dn+k
return Up/Dn
def F_l(dot, xyz,Sx,base_f,s_k):
''' Simplicial linear interpolation.
Args:
dot: Argument for interpolation function
given by a list of variables
xyz: Data points.
Sx: List of simplexes, represeting simplicial complex
base_f: Corresponding to 'xyz' list of basic functions.
s_k: Scalar weight function.
Returns:
Value of interpolation function.
'''
DIMM=len(dot)
Up=0.0
Dn=0.0
for sx in xrange(0,len(Sx)):
crd=make_n_vectors(DIMM+1,DIMM)
if coords_in_simplex(sx,dot,0,xyz,Sx, crd[0]):
for i in xrange(1,DIMM+1):
coords_in_simplex(sx,dot,i,xyz,Sx, crd[i])
Up=0.0
Dn=0.0
for i in xrange(0,DIMM+1):
Up+=base_f[Sx[sx][i]-1](dot)*v_k(crd[i], s_k)
Dn+=v_k(crd[i], s_k)
return Up/Dn
return F_lex(dot,xyz,Sx,base_f,s_k)
def F_lex(dot, xyz,Sx,base_f,s_k):
''' Simplicial linear extrapolation
Args:
dot: Argument for interpolation function
given by a list of variables
xyz: Data points.
Sx: List of simplexes, represeting simplicial complex
base_f: Corresponding to 'xyz' list of basic functions.
s_k: Scalar weight function.
Returns:
Value of extrapolation function.
'''
best_pack=[100000000,[],[]] # I probably should make this better
for sx in Sx:
for i in xrange(0,len(sx)):
c_sx=copy_vector(sx)
c_sx[i:i+1]=[]
best_pack=get_nearest_simplex(dot,xyz,Sx,c_sx, best_pack)
dot2=copy_vector(best_pack[1])
dot2[0]+=0.2
dot2[1]+=0.2
DIMM=len(dot)
Up=0.0
Dn=0.0
for sx in xrange(0,len(Sx)):
n=0
for i1 in best_pack[2]:
for i2 in Sx[sx]:
if i1==i2:
n+=1
if n==len(best_pack[2]):
crd=make_n_vectors(DIMM+1,DIMM)
for i in xrange(0,DIMM+1):
coords_in_simplex(sx,dot2,i,xyz,Sx, crd[i])
Up=0.0
Dn=0.0
for i in xrange(0,DIMM+1):
Up+=base_f[Sx[sx][i]-1](dot)*v_k(crd[i], s_k)
Dn+=v_k(crd[i], s_k)
return Up/Dn
return 0
def F_s(dot, xyz,Sx,base_f,s_k):
''' Simplicial weighted interpolation.
Args:
dot: Argument for interpolation function
given by a list of variables
xyz: Data points.
Sx: List of simplexes, represeting simplicial complex
base_f: Corresponding to 'xyz' list of basic functions.
s_k: Scalar weight function.
Returns:
Value of interpolation function.
'''
DIMM=len(dot)
for sx in xrange(0,len(Sx)):
crd=make_vector(DIMM)
if coords_in_simplex(sx,dot,0,xyz,Sx, crd):
pnt_set=[]
for pnt in Sx[sx]:
pnt_set.append(pnt-1)
return get_inS(dot,dot,pnt_set, xyz,Sx,base_f,s_k)
return F_sex(dot,xyz,Sx,base_f,s_k)
def F_sex(dot, xyz,Sx,base_f,s_k):
''' Simplex weighted extrapolation
Args:
dot: Argument for interpolation function
given by a list of variables
xyz: Data points.
Sx: List of simplexes, represeting simplicial complex
base_f: Corresponding to 'xyz' list of basic functions.
s_k: Scalar weight function.
Returns:
Value of extrapolation function.
'''
best_pack=[10000000,[],[]]
for sx in Sx:
for i in xrange(0,len(sx)):
c_sx=copy_vector(sx)
c_sx[i:i+1]=[]
best_pack=get_nearest_simplex(dot,xyz,Sx,c_sx, best_pack)
pnt_set=[]
for i in best_pack[2]:
pnt_set.append(i-1)
return get_inS(dot,best_pack[1],pnt_set, xyz,Sx,base_f,s_k)
def get_inS(dot,prj,pnt_set, xyz,Sx,base_f,s_k):
''' Gets a simplex interpolated value in a subsimplex
Args:
dot: Argument of interpolation function.
prj: Current level projection of a 'dot'.
pnt_set: Point set representing current level simplex.
xyz: Data point set.
Sx: Simplicial complex.
base_f: Corresponding to 'xyz' list of basic functions.
s_k: Scalar weight function.
Returns:
Iterpolation value for 'dot' projection on a subsimplex
'''
PSL=len(pnt_set)
if PSL==1:
return base_f[pnt_set[0]](dot)
elif PSL>1:
Up=0.0
Dn=0.0
for i in xrange(0,PSL):
new_pnt_set=[]
new_S=[]
for j in xrange(0,PSL):
if j!=i:
new_pnt_set.append(pnt_set[j])
new_S.append(xyz[pnt_set[j]])
new_prj=v_proj(prj,new_S)
cur_k=s_k(v_len(v_sub(new_prj,prj)))
ud=get_inS(dot,new_prj,new_pnt_set, xyz,Sx,base_f,s_k)
Up+=ud*cur_k
Dn+=cur_k
return Up/Dn
elif PSL<0:
print "WTF error in get_inS"
return -1
def F_b(dot, xyz,Sx,base_f,s_k):
''' Baricentric simplicial interpolation.
Args:
dot: Argument for interpolation function
given by a list of variables
xyz: Data points.
Sx: List of simplexes, represeting simplicial complex
base_f: Corresponding to 'xyz' list of basic functions.
s_k: Scalar weight function.
Returns:
Value of interpolation function.
'''
DIMM=len(dot)
for sx in xrange(0,len(Sx)):
crd=make_vector(DIMM)
if coords_in_simplex(sx,dot,0,xyz,Sx, crd):
up=0.0
down=0.0
for i in xrange(0,len(Sx[sx])):
newS=[]
for j in xrange(0,len(Sx[sx])):
if i!=j:
newS.append(xyz[Sx[sx][j]-1])
newP=v_proj(dot,newS)
cur_k=s_k(v_len(v_sub(dot,newP)))
up=up+base_f[Sx[sx][i]-1](dot)*cur_k
down=down+cur_k
return up/down
return 0
if __name__ == '__main__':
''' testing and demonstration part '''
# data for 1-variable function / curve
t = [[1], [2], [3], [4], [5], [6], [7]] # parameter set
x = [55, 180, 220, 45, 55, 180, 220] # coordinates
y = [50, 45, 190, 220, 50, 45, 190]
s1 = [[1, 2], [2, 3], [3, 4], [4, 5], [5, 6], [6, 7]] # 1-simplex complex
# data for 2-variable function / surface graphic
xy = [[10, 10], [90, 10], [90, 90], [10, 90]] # point set
f_xy = [120, 60, 80, 90] # corresponding data
s2 = [[1, 2, 3], [3, 4, 1]] # 2-simplex complex
# data for 3-variable function / 3-manifold
xyz=[[10, 10, 10],[90 , 10, 10 ],[90 ,90 , 10],[10, 90, 10],[50, 50, 90]]
f_xyz=[20,90,60,150,180]
s3=[[1,2,3,5],[3,4,1,5]]
def k(x): # weight function
if x!=0:
return 1/float(x)
else:
return 1.0e5 # to avoid zero division
def colors(n):
if n == -1: return "#000000"
elif n % 5 == 0: return "#006600"
elif n % 5 == 1: return "#000077"
elif n % 5 == 2: return "#770000"
elif n % 5 == 3: return "#003377"
elif n % 5 == 4: return "#440077"
import time # for productivity measurement
def test_1d():
canvas1 = Canvas(root, height = 256, width = 256, background = "white")
start_time = time.time()
ox='none'
oy='none'
for i in xrange(len(s1)): # for all simplexes
the_fill = colors(i)
if i>0 and i<len(s1)-1: # these simplexes are to grant smoothness only
for j in xrange(21):
ti1 = t[s1[i][0]-1][0]
ti2 = t[s1[i][1]-1][0]
ti = ti1 + j*(ti2 - ti1)/20.0
F_x=F([ti], t, s1, fxi, k)
F_y=F([ti], t, s1, fyi, k)
if ox!='none' and oy!='none':
canvas1.create_line(F_x, F_y, ox, oy, fill=the_fill, width=2)
ox = F_x
oy = F_y
canvas1.create_line(x[i]-2, y[i]-2, x[i]+2, y[i]+2, fill="#cc0000", width=2)
canvas1.create_line(x[i]-2, y[i]+2, x[i]+2, y[i]-2, fill="#cc0000", width=2)
finish_time=time.time() # geting and printing calculation time
the_time = finish_time - start_time
print 'curve calculated and drawn in: ', the_time, ' seconds'
canvas1.pack({"side": "left"})
def test_2d():
canvas2 = Canvas(root, height = 256, width = 256, background = "white")
start_time = time.time()
for i in xrange(100): # quasi-isometric plot
for j in xrange(100):
p = [i, j]
F_xy = F(p, xy, s2, fi, k)
xb = 128 + i - j
yb = 140 + (i + j) / 2
canvas2.create_line(xb, yb, xb, yb-F_xy, fill="#880000")
current_simplex = -1 # determining in which simplex p lies
for s in xrange(len(s2)):
if coords_in_simplex(s, p, 0, xy, s2):
current_simplex = s
the_fill = colors(current_simplex)
canvas2.create_line(xb, yb-F_xy, xb, yb-F_xy-1, fill=the_fill)
finish_time=time.time() # geting and printing calculation time
the_time = finish_time - start_time
print '100x100 surface calculated and drawn in: ', the_time, ' seconds'
canvas2.pack({"side": "left"})
def test_3d():
def red(x, y):
x=round(x/16)*16
if x>255: x=255
if x<0: x=0
return ("#%02X" % x) + ("%02X" % y) + ("%02X" % y)
canvas3 = Canvas(root, height = 256, width = 256, background = "white")
start_time = time.time()
for i in xrange(100):
for j in xrange(100):
p = [j, i, 100]
F_xyz = F(p, xyz, s3, fi, k)
xb = 128 + i - j
yb = 29 + (i + j) / 2
canvas3.create_line(xb, yb, xb, yb+1, fill=red(F_xyz, 50))
p = [100-i, 100, 100-j]
F_xyz = F(p, xyz, s3, fi, k)
xb = 128 + i
yb = 128 + j - i / 2
canvas3.create_line(xb, yb, xb, yb-1, fill=red(F_xyz, 30))
p = [100, 100-i, 100-j]
F_xyz = F(p, xyz, s3, fi, k)
xb = 128 - i
yb = 128 + j - i / 2
canvas3.create_line(xb, yb, xb, yb-1, fill=red(F_xyz, 0))
finish_time=time.time() # geting and printing calculation time
the_time = finish_time - start_time
print '100x100x3 manifold calculated and drawn in: ', the_time, ' seconds'
canvas3.pack({"side": "left"})
# setup and test
from Tkinter import * # initializing graphics
root = Tk()
F = F_s # interpolation scheme
fxi = get_linear_functions(t, x, s1) # basis functions
fyi = get_linear_functions(t, y, s1) # basis functions
test_1d() # curve test
F = F_s # interpolation scheme
fi = get_linear_functions(xy, f_xy, s2) # basis functions
test_2d() # surface test
F = F_s # interpolation scheme
fi = get_linear_functions(xyz, f_xyz, s3) # basis functions
test_3d() # 3-manifold test
root.mainloop()
|
# -*- coding: utf-8 -*-
from openprocurement.api.validation import (
validate_json_data,
validate_data,
_validate_accreditation_level,
_validate_accreditation_level_mode,
)
from openprocurement.api.constants import RELEASE_SIMPLE_DEFENSE_FROM
from openprocurement.api.utils import (
update_logging_context,
error_handler,
upload_objects_documents,
raise_operation_error,
)
from openprocurement.planning.api.models import Plan, Milestone
from openprocurement.planning.api.constants import PROCEDURES
from itertools import chain
from openprocurement.api.utils import get_now
from openprocurement.api.constants import PLAN_ADDRESS_KIND_REQUIRED_FROM
from copy import deepcopy
def validate_plan_data(request, **kwargs):
update_logging_context(request, {"plan_id": "__new__"})
data = validate_json_data(request)
model = request.plan_from_data(data, create=False)
_validate_plan_accreditation_level(request, model)
data = validate_data(request, model, data=data)
_validate_plan_availability(request)
_validate_plan_accreditation_level_mode(request)
_validate_tender_procurement_method_type(request)
return data
def _validate_plan_accreditation_level(request, model):
_validate_accreditation_level(request, model.create_accreditations, "plan", "creation")
def _validate_plan_accreditation_level_mode(request):
data = request.validated["data"]
mode = data.get("mode", None)
_validate_accreditation_level_mode(request, mode, "plan", "creation")
def _validate_plan_availability(request):
data = request.validated["data"]
procurement_method_type = data.get("tender", {}).get("procurementMethodType", "")
now = get_now()
if (
(now >= RELEASE_SIMPLE_DEFENSE_FROM and procurement_method_type == "aboveThresholdUA.defense")
or (now < RELEASE_SIMPLE_DEFENSE_FROM and procurement_method_type == "simple.defense")
):
raise_operation_error(
request,
"procedure with procurementMethodType = {} is not available".format(procurement_method_type),
)
def _validate_tender_procurement_method_type(request):
_procedures = deepcopy(PROCEDURES)
if get_now() >= PLAN_ADDRESS_KIND_REQUIRED_FROM:
_procedures[""] = ("centralizedProcurement", )
procurement_method_types = list(chain(*_procedures.values()))
procurement_method_types_without_above_threshold_ua_defense = list(
[x for x in procurement_method_types if x not in ('aboveThresholdUA.defense', 'simple.defense')]
)
kind_allows_procurement_method_type_mapping = {
"defense": procurement_method_types,
"general": procurement_method_types_without_above_threshold_ua_defense,
"special": procurement_method_types_without_above_threshold_ua_defense,
"central": procurement_method_types_without_above_threshold_ua_defense,
"authority": procurement_method_types_without_above_threshold_ua_defense,
"social": procurement_method_types_without_above_threshold_ua_defense,
"other": ["belowThreshold", "reporting", "priceQuotation"],
}
data = request.validated["data"]
kind = data.get("procuringEntity", {}).get("kind", "")
tender_procurement_method_type = data.get("tender", {}).get("procurementMethodType", "")
allowed_procurement_method_types = kind_allows_procurement_method_type_mapping.get(kind)
if allowed_procurement_method_types and get_now() >= PLAN_ADDRESS_KIND_REQUIRED_FROM:
if tender_procurement_method_type not in allowed_procurement_method_types:
request.errors.add(
"body", "kind",
"procuringEntity with {kind} kind cannot publish this type of procedure. "
"Procurement method types allowed for this kind: {methods}.".format(
kind=kind, methods=", ".join(allowed_procurement_method_types)
)
)
request.errors.status = 403
def validate_patch_plan_data(request, **kwargs):
return validate_data(request, Plan, True)
def validate_plan_has_not_tender(request, **kwargs):
plan = request.validated["plan"]
if plan.tender_id:
request.errors.add("body", "tender_id", "This plan has already got a tender")
request.errors.status = 422
raise error_handler(request)
def validate_plan_with_tender(request, **kwargs):
plan = request.validated["plan"]
if plan.tender_id:
json_data = request.validated["json_data"]
names = []
if "procuringEntity" in json_data:
names.append("procuringEntity")
if "budget" in json_data and "breakdown" in json_data["budget"]:
names.append("budget.breakdown")
for name in names:
request.errors.add("body", name, "Changing this field is not allowed after tender creation")
if request.errors:
request.errors.status = 422
raise error_handler(request)
def validate_plan_not_terminated(request, **kwargs):
plan = request.validated["plan"]
if plan.status in ("cancelled", "complete"):
request.errors.add("body", "status", "Can't update plan in '{}' status".format(plan.status))
request.errors.status = 422
raise error_handler(request)
def validate_plan_status_update(request, **kwargs):
status = request.validated["json_data"].get("status")
if status == "draft" and request.validated["plan"].status != status:
request.errors.add("body", "status", "Plan status can not be changed back to 'draft'")
request.errors.status = 422
raise error_handler(request)
def validate_plan_procurementMethodType_update(request, **kwargs):
new_pmt = request.validated["json_data"].get("tender", {}).get("procurementMethodType", "")
current_pmt = request.validated["plan"].tender.procurementMethodType
now = get_now()
if (
current_pmt != new_pmt
and (
now < RELEASE_SIMPLE_DEFENSE_FROM and new_pmt == "simple.defense"
or now > RELEASE_SIMPLE_DEFENSE_FROM and new_pmt == "aboveThresholdUA.defense"
)
):
request.errors.add(
"body",
"tender",
"Plan tender.procurementMethodType can not be changed from '{}' to '{}'".format(
current_pmt, new_pmt
)
)
request.errors.status = 422
raise error_handler(request)
def validate_milestone_data(request, **kwargs):
update_logging_context(request, {"milestone_id": "__new__"})
model = type(request.plan).milestones.model_class
milestone = validate_data(request, model)
upload_objects_documents(
request, request.validated["milestone"],
route_kwargs={"milestone_id": request.validated["milestone"].id}
)
return milestone
def validate_patch_milestone_data(request, **kwargs):
model = type(request.context)
return validate_data(request, model, partial=True)
def validate_milestone_author(request, **kwargs):
milestone = request.validated["milestone"]
plan = request.validated["plan"]
author = milestone.author
plan_identifier = plan.procuringEntity.identifier
milestone_identifier = author.identifier
if (plan_identifier.scheme, plan_identifier.id) != (milestone_identifier.scheme, milestone_identifier.id):
request.errors.add(
"body",
"author",
"Should match plan.procuringEntity"
)
request.errors.status = 422
raise error_handler(request)
if any(
(m.author.identifier.scheme, m.author.identifier.id) == (author.identifier.scheme, author.identifier.id)
for m in plan.milestones
if m.status in Milestone.ACTIVE_STATUSES
):
request.errors.add(
"body",
"author",
"An active milestone already exists for this author"
)
request.errors.status = 422
raise error_handler(request)
def validate_milestone_status_scheduled(request, **kwargs):
milestone = request.validated["milestone"]
if milestone.status != Milestone.STATUS_SCHEDULED:
request.errors.add(
"body",
"status",
"Cannot create milestone with status: {}".format(milestone["status"])
)
request.errors.status = 422
raise error_handler(request)
|
import torch.nn as nn
from util.function import GaussianSampleLayer
import torch
from torch.autograd import Variable
class D(nn.Module):
def __init__(self):
super(D, self).__init__()
self.main = nn.Sequential(
nn.Conv2d(1,16,(7,1),padding = (3,0),stride = (3,1),bias =False),
#nn.BatchNorm2d(16),
nn.LeakyReLU(0.02),\
nn.Conv2d(16,32,(7,1),padding = (3,0),stride = (3,1),bias = False),
nn.BatchNorm2d(32),
nn.LeakyReLU(0.02),\
nn.Conv2d(32,64,(115,1),padding = (57,0),stride = (3,1),bias =False),
nn.BatchNorm2d(64),
nn.LeakyReLU(0.02),\
)
self.fc = nn.Linear(1216,1,bias = True)
def forward(self, x):
#print('=====================D')
#print(x.shape)
h = x.view(-1,513)#原x = (256 * 512)
#print('h: ',h.shape)
output = self.main(x)
#print(output.shape)
output=output.view(-1, 1216)#19*64
x = self.fc(output)#等於機率
#print('=====================D_Finish x = ',x.shape)
return x, h
class Encoder(nn.Module):
def __init__(self):
super(Encoder, self).__init__()
self.main = nn.Sequential(
nn.Conv2d(1,16,(7,1),padding = (3,0),stride = (3,1),bias = False),
nn.BatchNorm2d(16),
nn.LeakyReLU(0.02),\
nn.Conv2d(16,32,(7,1),padding = (3,0),stride = (3,1),bias = False),
nn.BatchNorm2d(32),
nn.LeakyReLU(0.02),\
nn.Conv2d(32,64,(7,1),padding = (3,0),stride = (3,1),bias = False),
nn.BatchNorm2d(64),
nn.LeakyReLU(0.02),\
nn.Conv2d(64,128,(7,1),padding = (3,0),stride = (3,1),bias = False),
nn.BatchNorm2d(128),
nn.LeakyReLU(0.02),\
nn.Conv2d(128,256,(7,1),padding = (3,0),stride = (3,1),bias = False),
nn.BatchNorm2d(256),
nn.LeakyReLU(0.02),\
)
self.fc_mu = nn.Linear(768, 128,bias = True)
self.fc_lv = nn.Linear(768, 128,bias = True)
def forward(self, x):
#print('call==================================================')
#print(x.shape)
#print(type(x))
output = self.main(x)
#print(output.shape)
#print('?????????????????????????????')
output=output.view(-1, 768)#3*256
z_mu = self.fc_mu(output)
z_lv = self.fc_lv(output)
return z_mu, z_lv
class G(nn.Module):
def __init__(self):
super(G, self).__init__()
self.Embedding = nn.Linear(10,128,bias = False)
self.fc1 = nn.Linear(128, 171, bias = True)
self.fc2 = nn.Linear(128, 171, bias = True)
self.LR = nn.LeakyReLU(0.02)
self.fc = nn.Sequential(
nn.Linear(171,19*1*81,bias = True),
nn.BatchNorm1d(19*1*81),\
nn.LeakyReLU(0.02)
)
self.main = nn.Sequential(
nn.ConvTranspose2d(81,32,(9,1),padding = (3,0),stride = (3,1),bias = False),
nn.BatchNorm2d(32),
nn.LeakyReLU(0.2, inplace=True),\
nn.ConvTranspose2d(32,16,(7,1),padding = (2,0),stride = (3,1),bias = False),
nn.BatchNorm2d(16),
nn.LeakyReLU(0.2, inplace=True),\
nn.ConvTranspose2d(16,8,(7,1),padding = (2,0),stride = (3,1),bias = False),
nn.BatchNorm2d(8),
nn.LeakyReLU(0.2, inplace=True),\
nn.ConvTranspose2d(8,1,(1025,1),padding = (512,0),stride = (1,1),bias = False),
)
self.Tanh = nn.Tanh()
def forward(self, z,y):
person = torch.zeros(y.shape[0],10)
for i in range(y.shape[0]):
for j in range(10):
if(j==y[i]):
person[i][j] = 1
break
#print('---------------: ')
#print(person.shape)
#print(person)
who = Variable(person.cuda(),requires_grad=False)
output = self.Embedding(who)
x = 0
_z = self.fc1(z)
x += _z
_y = self.fc2(output)
x += _y
x = self.LR(x)
z = self.fc(x)
z = z.view(-1,81,19,1)
x = self.main(z)
logit = x
x = self.Tanh(x)
return x,logit
def weights_init(m):
classname = m.__class__.__name__
print(m)
if classname.find('Conv') != -1:
nn.init.xavier_normal_(m.weight.data)
#m.weight.data.normal_(0.0, 0.02)
elif classname.find('BatchNorm') != -1:
#nn.init.xavier_normal_(m.weight.data)
#m.weight.data.normal_(1.0, 0.02)
m.bias.data.fill_(0)
torch.nn.init.uniform(m.weight.data,-1, 1)
elif classname.find('Linear') != -1:
nn.init.xavier_normal_(m.weight.data)
#m.weight.data.normal_(0.0, 0.02)
#m.bias.data.fill_(0)
|
/usr/local/lib/python3.6/hmac.py |
__version__ = '1.2.29' |
import os
import asyncio
import discord
# To add a command, simply create a function 'async def on_COMMAND(bot, message, arguments)'
# 'bot' is the original bot object
# 'message' is the original message object
# 'arguments' is an array containing all the command arguments
PATH_SONGS = "resources/songs"
JSON_MEMORY_KEY_CHANNEL = "channel"
MESSAGE_PLAY = "Mes moustaches frémissent ! En avant pour {}, volume {} !"
MESSAGE_STOP = "Vous souhaitez un peu de silence, miaou ?"
MESSAGE_INVALID_SONG = "J'ai une grosse bibliothèque, mais je ne trouve pas votre chanson, miaître..."
MESSAGE_NO_SONG = "Vous devez prrréciser une musique, mon chaton."
MESSAGE_NO_CHANNEL = "Vous devez rejoindre un chat-nal vocal d'abord !"
MESSAGE_MEMORY_CHANNEL_SUCCESSFUL = "Ce canal sera miaoutilisé pour mes futures envolées artistiques !"
MESSAGE_MEMORY_CHANNEL_FAILURE = "Ce chat-nal est déjà mon lieu de travail !"
async def on_channel(bot, message, arguments):
channel = message.channel
if channel != None:
if await bot.getmem(JSON_MEMORY_KEY_CHANNEL) != channel.id:
await bot.setmem(JSON_MEMORY_KEY_CHANNEL, channel.id)
await bot.speak(MESSAGE_MEMORY_CHANNEL_SUCCESSFUL);
else:
await bot.speak(MESSAGE_MEMORY_CHANNEL_FAILURE);
async def on_play(bot, message, arguments):
channel = message.author.voice.voice_channel
if channel != None:
if len(arguments) > 0:
song_title = arguments[0]
voice = await get_voice(bot, channel)
if voice != None and not is_playing(bot):
song_path = bot.get_path(PATH_SONGS + "/" + song_title)
song_volume = int(arguments[1]) if len(arguments) > 1 else 2
song_volume = song_volume if song_volume <= 100 else 100
if os.path.isfile(song_path):
bot.player = voice.create_ffmpeg_player(song_path, options="-af volume=" + str(song_volume / 100))
bot.player.start()
await bot.speak(MESSAGE_PLAY.format(song_title, song_volume));
else:
await bot.speak(MESSAGE_INVALID_SONG);
else:
await bot.speak(MESSAGE_NO_SONG);
else:
await bot.speak(MESSAGE_NO_CHANNEL);
async def on_stop(bot, message, arguments):
if is_playing(bot):
bot.player.stop()
await bot.speak(MESSAGE_STOP);
def is_playing(bot):
return hasattr(bot, "player") and bot.player != None and bot.player.is_playing()
async def get_voice(bot, channel):
voice = None
if channel != None:
for voice_client in bot.client.voice_clients:
if voice_client.channel == channel:
voice = voice_client
if voice == None:
voice = await bot.client.join_voice_channel(channel)
return voice;
|
# Generated by Django 2.0.6 on 2019-02-20 17:25
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cwApp', '0002_auto_20190220_1724'),
]
operations = [
migrations.AlterField(
model_name='dog',
name='gender',
field=models.CharField(default='', max_length=200),
),
]
|
import dash
from dash.dependencies import Input, Output
import plotly.express as px
from modules.util import Util
from modules.layout import Layout
from modules.callbacks import register_callbacks
import dash_bootstrap_components as dbc
util = Util()
layout = Layout()
default_elements = util.process_df(util.read_sample_data())
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.LUMEN])
server = app.server
app.layout = layout.main_layout(default_elements)
register_callbacks(app)
if __name__ == '__main__':
app.run_server() |
# https://www.binarytides.com/python-socket-server-code-example/
# https://github.com/home-assistant/home-assistant/blob/master/homeassistant/components/pilight.py
# https://github.com/DavidLP/pilight/blob/master/pilight/pilight.py
pilight_client = pilight.Client(host=host, port=port)
pilight_client.start()
pilight_client.stop()
pilight_client.send_code(data={"protocol": [ "kaku_switch" ], "id": 1,"unit": 0,"off": 1})
def handle_received_code(data):
"""Run when RF codes are received."""
# Unravel dict of dicts to make event_data cut in automation rule
# possible
data = dict({'protocol': data['protocol'], 'uuid': data['uuid']}, **data['message'])
# No whitelist defined, put data on event bus
if not whitelist:
hass.bus.fire(EVENT, data)
# Check if data matches the defined whitelist
elif all(str(data[key]) in whitelist[key] for key in whitelist):
hass.bus.fire(EVENT, data)
pilight_client.set_callback(handle_received_code)
###########################
# https://github.com/home-assistant/home-assistant/blob/master/homeassistant/components/satel_integra.py
# https://github.com/c-soft/satel_integra
@asyncio.coroutine
def async_setup(hass, config):
controller = AsyncSatel(host, port, zones, hass.loop, partition)
result = yield from controller.connect()
controller.close()
@callback
def alarm_status_update_callback(status):
_LOGGER.debug("Alarm status callback, status: %s", status)
hass_alarm_status = STATE_ALARM_DISARMED
_LOGGER.debug("Sending hass_alarm_status: %s...", hass_alarm_status)
async_dispatcher_send(hass, SIGNAL_PANEL_MESSAGE, hass_alarm_status)
@callback
def zones_update_callback(status):
"""Update zone objects as per notification from the alarm."""
_LOGGER.debug("Zones callback , status: %s", status)
async_dispatcher_send(hass, SIGNAL_ZONES_UPDATED, status[ZONES])
# Create a task instead of adding a tracking job, since this task will
# run until the connection to satel_integra is closed.
hass.loop.create_task( controller.keep_alive() )
hass.loop.create_task( controller.monitor_status( alarm_status_update_callback, zones_update_callback ) )
return True
@asyncio.coroutine
def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
device = SatelIntegraBinarySensor(zone_num, zone_name, zone_type)
class SatelIntegraBinarySensor(BinarySensorDevice):
"""Representation of an Satel Integra binary sensor."""
@asyncio.coroutine
def async_added_to_hass(self):
"""Register callbacks."""
async_dispatcher_connect(self.hass, SIGNAL_ZONES_UPDATED, self._zones_updated)
@property
def should_poll(self):
"""No polling needed."""
return False
@callback
def _zones_updated(self, zones):
"""Update the zone's state, if needed."""
if self._zone_number in zones and self._state != zones[self._zone_number]:
self._state = zones[self._zone_number]
self.async_schedule_update_ha_state()
|
# -*- coding: utf-8 -*-
from pydummy.core.dummydata import DummyData
class DummyDomain(DummyData):
def __init__(self):
self.data = [
'sweetmail.com',
'fastsnail.net',
'paperlessmail.org',
'email.info',
'academiamail.edu',
'secretmail.gov',
'notgmail.uk',
'hayoo.us',
'depost.de',
'skandimail.dk'
]
|
import os, json, logging
logger = logging.getLogger()
logger.setLevel(logging.getLevelName(os.getenv('lambda_logging_level', 'INFO')))
def lambda_handler(event, context):
logger.info(event)
|
"""Utilities."""
from collections import defaultdict
def by_key(results, keys, defaults=None):
"""Group rows by specified key."""
if not isinstance(keys, tuple):
keys = (keys,)
output = defaultdict(list)
if defaults:
for key in defaults:
output[key] = []
for row in results:
key = list(row[k] for k in keys)
if len(key) == 1:
key = key[0]
else:
key = tuple(key)
output[key].append(dict(row))
return output
def compound_where(keys, fields):
"""Create a filter on compound keys."""
args = {}
ors = []
for i, key in enumerate(keys):
ands = []
for field, value in zip(fields, key):
bind_name = '{}_{}'.format(field.replace('.', '_'), i)
ands.append('{} = :{}'.format(field, bind_name))
args[bind_name] = value
ors.append(ands)
return ' or '.join([' and '.join(a) for a in ors]), args
|
class Interpreter:
def __init__(self, instructions):
self.instructions = instructions
def run(self):
pass |
import pathlib # 標準ライブラリ
import openpyxl # 外部ライブラリ pip install openpyxl
import csv # 標準ライブラリ
lwb = openpyxl.Workbook() #売上一覧表ワークブック
lsh = lwb.active #売上一覧表ワークシート
list_row = 1
path = pathlib.Path("..\data\sales") #相対パス指定
for pass_obj in path.iterdir():
if pass_obj.match("*.xlsx"):
wb = openpyxl.load_workbook(pass_obj)
for sh in wb:
for dt_row in range(9,19):
if sh.cell(dt_row, 2).value != None:
#より説明的なコード
#lsh.cell(row=list_row, column=1).value = \
# sh.cell(row=2, column=7).value #伝票NO
lsh.cell(list_row, 1).value = sh.cell(2, 7).value #伝票NO
lsh.cell(list_row, 2).value = sh.cell(3, 7).value #日付
lsh.cell(list_row, 3).value = sh.cell(4, 3).value #得意先コード
lsh.cell(list_row, 4).value = sh.cell(7, 8).value #担当者コード
lsh.cell(list_row, 5).value = sh.cell(dt_row, 1).value #No
lsh.cell(list_row, 6).value = sh.cell(dt_row, 2).value #商品コード
lsh.cell(list_row, 7).value = sh.cell(dt_row, 3).value #商品名
lsh.cell(list_row, 8).value = sh.cell(dt_row, 4).value #数量
lsh.cell(list_row, 9).value = sh.cell(dt_row, 5).value #単価
lsh.cell(list_row, 10).value = sh.cell(dt_row, 4).value * \
sh.cell(dt_row, 5).value #金額
lsh.cell(list_row, 11).value = sh.cell(dt_row, 7).value #備考
list_row += 1
#lwb.save("..\data\sales\salesList.xlsx")
with open("..\data\sales\salesList.csv","w",encoding="utf_8_sig") as fp:
writer = csv.writer(fp, lineterminator="\n")
for row in lsh.rows:
writer.writerow([col.value for col in row])
|
from django import forms
class EditVoteForm(forms.Form):
name = forms.CharField(max_length=64)
machine_name = forms.SlugField(max_length=64)
description = forms.CharField()
class EditVoteFilterForm(forms.Form):
filter_id = forms.IntegerField()
class AdminSelectForm(forms.Form):
username = forms.CharField()
class EditVoteOptionsForm(forms.Form):
auto_open_options = forms.BooleanField(required=False)
min_votes = forms.IntegerField()
max_votes = forms.IntegerField()
class GetVoteOptionForm(forms.Form):
option_id = forms.IntegerField()
class PasswordForm(forms.Form):
password = forms.CharField()
class EditVoteOptionForm(forms.Form):
option_id = forms.IntegerField()
name = forms.CharField(required=False, max_length=64)
picture_url = forms.URLField(required=False)
description = forms.CharField(required=False)
personal_link = forms.URLField(required=False)
link_name = forms.CharField(required=False, max_length=16)
class EditScheduleForm(forms.Form):
open_time = forms.DateTimeField(required=False)
close_time = forms.DateTimeField(required=False)
public_time = forms.DateTimeField(required=False)
|
#!/usr/bin/env python3
import cv2
class alternativeStrategy(object):
'''
A template strategy
'''
def __init__(self):
pass
def execute(self):
print("Please implement an execution method." )
# Define each image process strategy
class diffStrategy(alternativeStrategy):
'''
Compare difference of images
'''
def __init__(self):
pass
def execute(self, imageContainer):
# get the original image list from imageContainer
images = imageContainer.get("Original")
[image1, image2] = [images[0], images[-1]]
# Calculate difference
self.diffImage = cv2.absdiff (image1, image2)
# save the processed image
imageContainer.insert({"Process": self.diffImage})
class blurStrategy(alternativeStrategy):
'''
Apply blur to the image
'''
def __init__(self):
self.blurSetting=(3,3)
def execute(self, imageContainer):
image = imageContainer.pop("Process")
# Blur the image
self.blurImage = cv2.blur(image, self.blurSetting)
# save the processed image
imageContainer.insert({"Process": self.blurImage})
class thresStrategy(alternativeStrategy):
'''
Apply threshold to the image
'''
def __init__(self):
self.thresValue = 32
self.thresMaxVal = 255
def execute(self, imageContainer):
image = imageContainer.pop("Process")
# Apply threshold (enhanced)
ret, self.thresImage = cv2.threshold(image, self.thresValue, self.thresMaxVal, cv2.THRESH_BINARY)
# save the processed image
imageContainer.insert({"Process": self.thresImage})
class findContoursStrategy(alternativeStrategy):
'''
Find contour and set alarm if needed
'''
def __init__(self, alarm):
self.alarm = alarm
def execute(self, imageContainer):
image = imageContainer.pop("Process")
# convert to gray scale
gray_image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# Find contours
max_contours=0
contours, hierarchy = cv2.findContours(gray_image, cv2.RETR_LIST, cv2.CHAIN_APPROX_TC89_KCOS)
for contour in contours:
x,y,w,h = cv2.boundingRect(contour)
# Filter out the small boundaries
if w*h > 225:
cv2.rectangle(image, (x,y), (x+w,y+h), (0,255,0), 2)
# Enable alarm
self.alarm.set()
# Record the max contour
if w*h > max_contours:
max_contours = w*h
# save the processed image
imageContainer.insert({"Process": image})
class strategyConstructor():
'''
Constuctor of these image processing strategies
'''
def __init__(self, alarm):
self.strategyList = []
# Actually these strategies are the basic operation of motion detection
self.strategyList.append ( diffStrategy() )
self.strategyList.append ( blurStrategy() )
self.strategyList.append ( thresStrategy() )
self.strategyList.append ( findContoursStrategy(alarm) )
def listStrategy(self):
return self.strategyList
if __name__ == "__main__":
from camera import camera
cam = camera()
# Construce the strategies object
strategyConstruction = strategyConstructor()
# Using container to store images
from container import dataContainer
imgContainer = dataContainer()
# Store initial n images to image container
for i in range(5):
# Capture a image
ret, image = cam.read()
imgContainer.insert ({"Original": image.copy() })
cv2.imshow ("Image original", image )
cv2.waitKey(50)
for strategy in strategyConstruction.listStrategy():
strategy.execute(imgContainer)
cv2.imshow ("Image process", imgContainer.pop("Process") )
cv2.waitKey(2000)
cv2.destroyAllWindows()
|
from __future__ import print_function
import edward as ed
import tensorflow as tf
import numpy as np
from edward.models import Multinomial
from scipy.special import gammaln
sess = tf.Session()
ed.set_seed(98765)
def multinomial_logpmf(x, n, p):
"""
Arguments
----------
x: np.array
vector of length K, where x[i] is the number of outcomes
in the ith bucket
n: int
number of outcomes equal to sum x[i]
p: np.array
vector of probabilities summing to 1
"""
return gammaln(n + 1.0) - \
np.sum(gammaln(x + 1.0)) + \
np.sum(x * np.log(p))
def multinomial_logpmf_vec(x, n, p):
n_minibatch = x.shape[0]
return np.array([multinomial_logpmf(x[i, :], n, p)
for i in range(n_minibatch)])
def _test_log_prob_zi(n_minibatch, num_factors, K):
multinomial = Multinomial(num_factors, K)
multinomial.pi = tf.constant(1.0/K, shape=[num_factors, K])
with sess.as_default():
pi = multinomial.pi.eval()
z = np.zeros((n_minibatch, K*num_factors))
for i in range(num_factors):
z[:, (i*K):((i+1)*K)] = np.random.multinomial(1, pi[i, :], size=n_minibatch)
z_tf = tf.constant(z, dtype=tf.float32)
for i in range(num_factors):
# NOTE: since Tensorflow has no special functions, the values here are
# only an approximation
assert np.allclose(
multinomial.log_prob_zi(i, z_tf).eval(),
multinomial_logpmf_vec(z[:, (i*K):((i+1)*K)], 1, pi[i, :]),
atol=1e-4)
def test_log_prob_zi_1d_1v_2k():
_test_log_prob_zi(1, 1, 2)
def test_log_prob_zi_1d_1v_3k():
_test_log_prob_zi(1, 1, 3)
def test_log_prob_zi_2d_1v_2k():
_test_log_prob_zi(2, 1, 2)
def test_log_prob_zi_1d_2v_2k():
_test_log_prob_zi(1, 2, 2)
def test_log_prob_zi_2d_2v_2k():
_test_log_prob_zi(2, 2, 2)
|
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 5 23:44:40 2021
@author: Muzaffer
"""
from pymongo import MongoClient
client = MongoClient()
ConnectDB = client.AirQuality # Connect to database
ConnectCol = ConnectDB.AirQuality # Connect to collection
PM10 = ConnectCol.find({},{"_id":0,"PM10":1})
def FindConstants(Cp):
if Cp>0 and Cp<54:
IHi = 50
ILo = 0
BPHi = 54
BPLo = 0
return IHi, ILo,BPHi, BPLo
elif Cp>55 and Cp<154:
IHi = 100
ILo = 51
BPHi = 154
BPLo = 55
return IHi, ILo,BPHi, BPLo
elif Cp>155 and Cp<254:
IHi = 150
ILo = 101
BPHi = 254
BPLo = 155
return IHi, ILo,BPHi, BPLo
elif Cp>255 and Cp<354:
IHi = 200
ILo = 151
BPHi = 354
BPLo = 255
return IHi, ILo,BPHi, BPLo
elif Cp>355 and Cp<424:
IHi = 300
ILo = 201
BPHi = 424
BPLo = 355
return IHi, ILo,BPHi, BPLo
elif Cp>425 and Cp<504:
IHi = 400
ILo = 301
BPHi = 504
BPLo = 425
return IHi, ILo,BPHi, BPLo
elif Cp>505 and Cp<604:
IHi = 500
ILo = 401
BPHi = 604
BPLo = 505
return IHi, ILo,BPHi, BPLo
else:
print("We have an error!")
def IndexMessage(AQI):
if AQI>=0 and AQI<=50:
return "Good"
elif AQI>=51 and AQI<=100:
return "Moderate"
elif AQI>=101 and AQI<=150:
return "Unhealthy for Sensitive Groups"
elif AQI>=151 and AQI<=200:
return "Unhealthy"
elif AQI>=201 and AQI<=300:
return "Very unhealthy"
elif AQI>=301 and AQI<=500:
return "Hazardous"
else:
return "We have an error!"
def AQI(IHi, ILo, BPHi, BPLo, Cp): # Calculate Air Quality Index
Ip = round(((IHi-ILo)/(BPHi-BPLo))*(Cp-BPLo)+ILo)
return Ip
report = open("AQIReport.txt","w")
for pm in PM10:
Cp = pm['PM10']
IHi, ILo, BPHi, BPLo = FindConstants(Cp)
Ip = AQI(IHi, ILo, BPHi, BPLo, Cp)
message = IndexMessage(Ip)
print(f"PM10: {Cp}, AQI: {Ip}, Message: {message}")
report.write(f"PM10: {Cp}, AQI: {Ip}, Message: {message}\n")
report.close()
|
import json
import requests
from django.dispatch.dispatcher import receiver
from django.db.models.signals import post_save, post_delete
from med_social.utils import get_current_tenant
from notifications.models import Notification
from post_office import mail
from vendors.rfp_models import RFP, Bid, Message
@receiver(post_save, sender=Notification, dispatch_uid='notify_email_or_text')
def notify_email_or_text(sender, **kwargs):
notification = kwargs['instance']
if not kwargs['created'] or kwargs['raw']:
return
community = get_current_tenant()
if notification.recipient.username == 'kevin':
SLACK_WEBHOOK = 'https://hooks.slack.com/services/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'
if notification.target and hasattr(notification.target, 'get_absolute_url'):
link = notification.target.get_absolute_url()
elif notification.action_object and hasattr(notification.action_object, 'get_absolute_url'):
link = notification.action_object.get_absolute_url()
else:
link = notification.actor.get_absolute_url()
requests.post(SLACK_WEBHOOK, {'payload': json.dumps({'text': u'{} {}'.format(unicode(notification), community.get_full_url() + link)})})
if notification.action_object.__class__ in (RFP, Bid, Message):
user = notification.recipient
mail.send(user.email, template='New RFP Notification', context={'notification': notification, 'community': community})
|
import numpy as np
from fluiddyn.clusters.legi import Calcul2 as Cluster
from critical_Ra_sidewall import Ra_c_SW as Ra_c_SW_tests
prandtl = 0.71
dim = 2
dt_max = 0.05
end_time = 30
nb_procs = 10
nx = 8
order = 10
stretch_factor = 0.0
z_periodicity = False
cluster = Cluster()
cluster.commands_setting_env = [
"PROJET_DIR=/fsnet/project/meige/2020/20CONVECTION",
"source /etc/profile",
"source $PROJET_DIR/miniconda3/etc/profile.d/conda.sh",
"conda activate env-snek",
"export NEK_SOURCE_ROOT=$HOME/Dev/snek5000/lib/Nek5000",
"export PATH=$PATH:$NEK_SOURCE_ROOT/bin",
"export FLUIDSIM_PATH=$PROJET_DIR/numerical/",
]
for aspect_ratio, Ra_c_test in Ra_c_SW_tests.items():
ny = int(nx * aspect_ratio)
if nx * aspect_ratio - ny:
continue
Ra_side_nums = np.logspace(np.log10(Ra_c_test), np.log10(1.04 * Ra_c_test), 4)
Ra_vert_nums = np.logspace(
np.log10(2.04 * Ra_c_test), np.log10(3.04 * Ra_c_test), 4
)
for Ra_side_num in Ra_side_nums:
for Ra_vert_num in Ra_vert_nums:
command = (
f"run_simul_check_from_python.py -Pr {prandtl} -nx {nx} "
f"--order {order} --dt-max {dt_max} --end-time {end_time} -np {nb_procs} "
f"-a_y {aspect_ratio} --stretch-factor {stretch_factor} "
f"--Ra-side {Ra_side_num} --Ra-vert {Ra_vert_num}"
)
if z_periodicity:
command += " --z-periodicity"
print(command)
name_run = f"MC_asp{aspect_ratio:.3f}_Ra{Ra_side_num:.3e}_{Ra_vert_num:.3e}_Pr{prandtl:.2f}_msh{nx*order}x{round(nx*aspect_ratio)*order}"
cluster.submit_script(
command,
name_run=name_run,
nb_cores_per_node=nb_procs,
omp_num_threads=1,
ask=False,
)
|
# Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from abc import ABCMeta, abstractmethod
from pathlib import Path
from typing import Iterable, List, Optional, Type, cast
from pants.base.specs import SingleAddress
from pants.core.goals.fmt import (
Fmt,
FmtOptions,
FmtResult,
LanguageFmtResults,
LanguageFmtTargets,
fmt,
)
from pants.core.util_rules.filter_empty_sources import TargetsWithSources, TargetsWithSourcesRequest
from pants.engine.addresses import Address
from pants.engine.fs import EMPTY_DIGEST, Digest, FileContent, MergeDigests, Workspace
from pants.engine.target import Sources, Target, TargetsWithOrigins, TargetWithOrigin
from pants.engine.unions import UnionMembership
from pants.testutil.engine.util import MockConsole, MockGet, create_goal_subsystem, run_rule
from pants.testutil.test_base import TestBase
class FortranSources(Sources):
pass
class FortranTarget(Target):
alias = "fortran"
core_fields = (FortranSources,)
class SmalltalkSources(Sources):
pass
class SmalltalkTarget(Target):
alias = "smalltalk"
core_fields = (SmalltalkSources,)
class InvalidSources(Sources):
pass
class MockLanguageTargets(LanguageFmtTargets, metaclass=ABCMeta):
@staticmethod
@abstractmethod
def stdout(_: Iterable[Address]) -> str:
pass
def language_fmt_results(self, result_digest: Digest) -> LanguageFmtResults:
addresses = [
target_with_origin.target.address for target_with_origin in self.targets_with_origins
]
# NB: For these tests, we only care that
# LanguageFmtResults.input != LanguageFmtResults.output so that we write to the build root.
return LanguageFmtResults(
(
FmtResult(
input=EMPTY_DIGEST,
output=EMPTY_DIGEST,
stdout=self.stdout(addresses),
stderr="",
),
),
input=EMPTY_DIGEST,
output=result_digest,
)
class FortranTargets(MockLanguageTargets):
required_fields = (FortranSources,)
@staticmethod
def stdout(addresses: Iterable[Address]) -> str:
return f"Fortran targets: {', '.join(str(address) for address in addresses)}"
class SmalltalkTargets(MockLanguageTargets):
required_fields = (SmalltalkSources,)
@staticmethod
def stdout(addresses: Iterable[Address]) -> str:
return f"Smalltalk targets: {', '.join(str(address) for address in addresses)}"
class InvalidTargets(MockLanguageTargets):
required_fields = (InvalidSources,)
@staticmethod
def stdout(addresses: Iterable[Address]) -> str:
return f"Invalid targets: {', '.join(str(address) for address in addresses)}"
class FmtTest(TestBase):
def setUp(self) -> None:
super().setUp()
self.fortran_file = FileContent("formatted.f98", b"READ INPUT TAPE 5\n")
self.smalltalk_file = FileContent("formatted.st", b"y := self size + super size.')\n")
self.fortran_digest = self.make_snapshot(
{self.fortran_file.path: self.fortran_file.content.decode()}
).digest
self.merged_digest = self.make_snapshot(
{fc.path: fc.content.decode() for fc in (self.fortran_file, self.smalltalk_file)}
).digest
@staticmethod
def make_target_with_origin(
address: Optional[Address] = None, *, target_cls: Type[Target] = FortranTarget
) -> TargetWithOrigin:
if address is None:
address = Address.parse(":tests")
return TargetWithOrigin(
target_cls({}, address=address),
origin=SingleAddress(directory=address.spec_path, name=address.target_name),
)
def run_fmt_rule(
self,
*,
language_target_collection_types: List[Type[LanguageFmtTargets]],
targets: List[TargetWithOrigin],
result_digest: Digest,
per_target_caching: bool,
include_sources: bool = True,
) -> str:
console = MockConsole(use_colors=False)
union_membership = UnionMembership({LanguageFmtTargets: language_target_collection_types})
result: Fmt = run_rule(
fmt,
rule_args=[
console,
TargetsWithOrigins(targets),
create_goal_subsystem(FmtOptions, per_target_caching=per_target_caching),
Workspace(self.scheduler),
union_membership,
],
mock_gets=[
MockGet(
product_type=LanguageFmtResults,
subject_type=LanguageFmtTargets,
mock=lambda language_targets_collection: language_targets_collection.language_fmt_results(
result_digest
),
),
MockGet(
product_type=TargetsWithSources,
subject_type=TargetsWithSourcesRequest,
mock=lambda tgts: TargetsWithSources(tgts if include_sources else ()),
),
MockGet(
product_type=Digest, subject_type=MergeDigests, mock=lambda _: result_digest,
),
],
union_membership=union_membership,
)
assert result.exit_code == 0
return cast(str, console.stdout.getvalue())
def assert_workspace_modified(
self, *, fortran_formatted: bool, smalltalk_formatted: bool
) -> None:
fortran_file = Path(self.build_root, self.fortran_file.path)
smalltalk_file = Path(self.build_root, self.smalltalk_file.path)
if fortran_formatted:
assert fortran_file.is_file()
assert fortran_file.read_text() == self.fortran_file.content.decode()
if smalltalk_formatted:
assert smalltalk_file.is_file()
assert smalltalk_file.read_text() == self.smalltalk_file.content.decode()
def test_empty_target_noops(self) -> None:
def assert_noops(*, per_target_caching: bool) -> None:
stdout = self.run_fmt_rule(
language_target_collection_types=[FortranTargets],
targets=[self.make_target_with_origin()],
result_digest=self.fortran_digest,
per_target_caching=per_target_caching,
include_sources=False,
)
assert stdout.strip() == ""
self.assert_workspace_modified(fortran_formatted=False, smalltalk_formatted=False)
assert_noops(per_target_caching=False)
assert_noops(per_target_caching=True)
def test_invalid_target_noops(self) -> None:
def assert_noops(*, per_target_caching: bool) -> None:
stdout = self.run_fmt_rule(
language_target_collection_types=[InvalidTargets],
targets=[self.make_target_with_origin()],
result_digest=self.fortran_digest,
per_target_caching=per_target_caching,
)
assert stdout.strip() == ""
self.assert_workspace_modified(fortran_formatted=False, smalltalk_formatted=False)
assert_noops(per_target_caching=False)
assert_noops(per_target_caching=True)
def test_single_language_with_single_target(self) -> None:
address = Address.parse(":tests")
target_with_origin = self.make_target_with_origin(address)
def assert_expected(*, per_target_caching: bool) -> None:
stdout = self.run_fmt_rule(
language_target_collection_types=[FortranTargets],
targets=[target_with_origin],
result_digest=self.fortran_digest,
per_target_caching=per_target_caching,
)
assert stdout.strip() == FortranTargets.stdout([address])
self.assert_workspace_modified(fortran_formatted=True, smalltalk_formatted=False)
assert_expected(per_target_caching=False)
assert_expected(per_target_caching=True)
def test_single_language_with_multiple_targets(self) -> None:
addresses = [Address.parse(":t1"), Address.parse(":t2")]
def get_stdout(*, per_target_caching: bool) -> str:
stdout = self.run_fmt_rule(
language_target_collection_types=[FortranTargets],
targets=[self.make_target_with_origin(addr) for addr in addresses],
result_digest=self.fortran_digest,
per_target_caching=per_target_caching,
)
self.assert_workspace_modified(fortran_formatted=True, smalltalk_formatted=False)
return stdout
assert get_stdout(per_target_caching=False).strip() == FortranTargets.stdout(addresses)
assert get_stdout(per_target_caching=True).splitlines() == [
FortranTargets.stdout([address]) for address in addresses
]
def test_multiple_languages_with_single_targets(self) -> None:
fortran_address = Address.parse(":fortran")
smalltalk_address = Address.parse(":smalltalk")
def assert_expected(*, per_target_caching: bool) -> None:
stdout = self.run_fmt_rule(
language_target_collection_types=[FortranTargets, SmalltalkTargets],
targets=[
self.make_target_with_origin(fortran_address, target_cls=FortranTarget),
self.make_target_with_origin(smalltalk_address, target_cls=SmalltalkTarget),
],
result_digest=self.merged_digest,
per_target_caching=per_target_caching,
)
assert stdout.splitlines() == [
FortranTargets.stdout([fortran_address]),
SmalltalkTargets.stdout([smalltalk_address]),
]
self.assert_workspace_modified(fortran_formatted=True, smalltalk_formatted=True)
assert_expected(per_target_caching=False)
assert_expected(per_target_caching=True)
def test_multiple_languages_with_multiple_targets(self) -> None:
fortran_addresses = [Address.parse(":py1"), Address.parse(":py2")]
smalltalk_addresses = [Address.parse(":py1"), Address.parse(":py2")]
fortran_targets = [
self.make_target_with_origin(addr, target_cls=FortranTarget)
for addr in fortran_addresses
]
smalltalk_targets = [
self.make_target_with_origin(addr, target_cls=SmalltalkTarget)
for addr in smalltalk_addresses
]
def get_stdout(*, per_target_caching: bool) -> str:
stdout = self.run_fmt_rule(
language_target_collection_types=[FortranTargets, SmalltalkTargets],
targets=[*fortran_targets, *smalltalk_targets],
result_digest=self.merged_digest,
per_target_caching=per_target_caching,
)
self.assert_workspace_modified(fortran_formatted=True, smalltalk_formatted=True)
return stdout
assert get_stdout(per_target_caching=False).splitlines() == [
FortranTargets.stdout(fortran_addresses),
SmalltalkTargets.stdout(smalltalk_addresses),
]
assert get_stdout(per_target_caching=True).splitlines() == [
*(FortranTargets.stdout([address]) for address in fortran_addresses),
*(SmalltalkTargets.stdout([address]) for address in smalltalk_addresses),
]
|
# -*- coding: utf-8 -*-
################################################################################
## Form generated from reading UI file 'ui_dialogmMtSDl.ui'
##
## Created by: Qt User Interface Compiler version 5.15.2
##
## WARNING! All changes made in this file will be lost when recompiling UI file!
################################################################################
from PySide2.QtCore import *
from PySide2.QtGui import *
from PySide2.QtWidgets import *
import files_rc
class Ui_Ui_dialog(object):
def setupUi(self, Ui_dialog):
if not Ui_dialog.objectName():
Ui_dialog.setObjectName(u"Ui_dialog")
Ui_dialog.resize(376, 269)
Ui_dialog.setBaseSize(QSize(332, 30))
self.centralwidget = QWidget(Ui_dialog)
self.centralwidget.setObjectName(u"centralwidget")
self.horizontalLayout = QHBoxLayout(self.centralwidget)
self.horizontalLayout.setObjectName(u"horizontalLayout")
self.main_box_frame = QFrame(self.centralwidget)
self.main_box_frame.setObjectName(u"main_box_frame")
self.main_box_frame.setMaximumSize(QSize(350, 250))
self.main_box_frame.setStyleSheet(u"QFrame#main_box_frame {\n"
"\n"
" background-color: rgb(39, 44, 54);\n"
" border: 1px solid white;\n"
"\n"
"}")
self.main_box_frame.setFrameShape(QFrame.StyledPanel)
self.main_box_frame.setFrameShadow(QFrame.Raised)
self.verticalLayout = QVBoxLayout(self.main_box_frame)
self.verticalLayout.setSpacing(0)
self.verticalLayout.setObjectName(u"verticalLayout")
self.verticalLayout.setContentsMargins(0, 2, 0, 9)
self.top_frame = QFrame(self.main_box_frame)
self.top_frame.setObjectName(u"top_frame")
self.top_frame.setMinimumSize(QSize(345, 30))
self.top_frame.setMaximumSize(QSize(332, 30))
self.top_frame.setStyleSheet(u"QFrame#top_frame{\n"
"background-color: rgb(35, 35, 35);\n"
"}")
self.top_frame.setFrameShape(QFrame.NoFrame)
self.top_frame.setFrameShadow(QFrame.Raised)
self.horizontalLayout_2 = QHBoxLayout(self.top_frame)
self.horizontalLayout_2.setSpacing(0)
self.horizontalLayout_2.setObjectName(u"horizontalLayout_2")
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.frame_2 = QFrame(self.top_frame)
self.frame_2.setObjectName(u"frame_2")
self.frame_2.setFrameShape(QFrame.StyledPanel)
self.frame_2.setFrameShadow(QFrame.Raised)
self.verticalLayout_3 = QVBoxLayout(self.frame_2)
self.verticalLayout_3.setSpacing(0)
self.verticalLayout_3.setObjectName(u"verticalLayout_3")
self.verticalLayout_3.setContentsMargins(5, 3, 0, 0)
self.label = QLabel(self.frame_2)
self.label.setObjectName(u"label")
font = QFont()
font.setPointSize(17)
self.label.setFont(font)
self.label.setStyleSheet(u"color: rgba(255, 255, 255, 200);")
self.verticalLayout_3.addWidget(self.label)
self.horizontalLayout_2.addWidget(self.frame_2, 0, Qt.AlignLeft)
self.frame = QFrame(self.top_frame)
self.frame.setObjectName(u"frame")
sizePolicy = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame.sizePolicy().hasHeightForWidth())
self.frame.setSizePolicy(sizePolicy)
self.frame.setFrameShape(QFrame.StyledPanel)
self.frame.setFrameShadow(QFrame.Raised)
self.horizontalLayout_3 = QHBoxLayout(self.frame)
self.horizontalLayout_3.setSpacing(0)
self.horizontalLayout_3.setObjectName(u"horizontalLayout_3")
self.horizontalLayout_3.setContentsMargins(5, 0, 0, 0)
self.title_label = QLabel(self.frame)
self.title_label.setObjectName(u"title_label")
font1 = QFont()
font1.setPointSize(10)
self.title_label.setFont(font1)
self.title_label.setStyleSheet(u"color: rgba(255, 255, 255, 200);")
self.horizontalLayout_3.addWidget(self.title_label)
self.horizontalLayout_2.addWidget(self.frame, 0, Qt.AlignVCenter)
self.frame_3 = QFrame(self.top_frame)
self.frame_3.setObjectName(u"frame_3")
self.frame_3.setMinimumSize(QSize(0, 0))
self.frame_3.setStyleSheet(u"")
self.frame_3.setFrameShape(QFrame.StyledPanel)
self.frame_3.setFrameShadow(QFrame.Raised)
self.verticalLayout_4 = QVBoxLayout(self.frame_3)
self.verticalLayout_4.setSpacing(0)
self.verticalLayout_4.setObjectName(u"verticalLayout_4")
self.verticalLayout_4.setContentsMargins(0, 3, 3, 0)
self.close_btn = QPushButton(self.frame_3)
self.close_btn.setObjectName(u"close_btn")
self.close_btn.setMinimumSize(QSize(20, 20))
self.close_btn.setMaximumSize(QSize(20, 20))
self.close_btn.setStyleSheet(u"QPushButton { \n"
" border: none;\n"
" background-color: transparent;\n"
"}\n"
"QPushButton:hover {\n"
" background-color: rgb(52, 59, 72);\n"
"}\n"
"QPushButton:pressed { \n"
" background-color: rgb(85, 170, 255);\n"
"}")
icon = QIcon()
icon.addFile(u":/btns/icons/btns/cil-x.png", QSize(), QIcon.Normal, QIcon.Off)
self.close_btn.setIcon(icon)
self.verticalLayout_4.addWidget(self.close_btn)
self.horizontalLayout_2.addWidget(self.frame_3, 0, Qt.AlignLeft|Qt.AlignTop)
self.verticalLayout.addWidget(self.top_frame)
self.content_frame = QFrame(self.main_box_frame)
self.content_frame.setObjectName(u"content_frame")
sizePolicy1 = QSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
sizePolicy1.setHorizontalStretch(0)
sizePolicy1.setVerticalStretch(0)
sizePolicy1.setHeightForWidth(self.content_frame.sizePolicy().hasHeightForWidth())
self.content_frame.setSizePolicy(sizePolicy1)
self.content_frame.setFrameShape(QFrame.StyledPanel)
self.content_frame.setFrameShadow(QFrame.Raised)
self.horizontalLayout_5 = QHBoxLayout(self.content_frame)
self.horizontalLayout_5.setObjectName(u"horizontalLayout_5")
self.icon_frame = QFrame(self.content_frame)
self.icon_frame.setObjectName(u"icon_frame")
self.icon_frame.setFrameShape(QFrame.StyledPanel)
self.icon_frame.setFrameShadow(QFrame.Raised)
self.horizontalLayout_6 = QHBoxLayout(self.icon_frame)
self.horizontalLayout_6.setObjectName(u"horizontalLayout_6")
self.icon_label = QLabel(self.icon_frame)
self.icon_label.setObjectName(u"icon_label")
font2 = QFont()
font2.setFamily(u"dripicons-v2")
font2.setPointSize(50)
font2.setBold(True)
font2.setWeight(75)
self.icon_label.setFont(font2)
self.icon_label.setStyleSheet(u"color: rgba(255, 255, 255, 200);")
self.horizontalLayout_6.addWidget(self.icon_label)
self.horizontalLayout_5.addWidget(self.icon_frame, 0, Qt.AlignLeft|Qt.AlignVCenter)
self.main_content = QFrame(self.content_frame)
self.main_content.setObjectName(u"main_content")
sizePolicy.setHeightForWidth(self.main_content.sizePolicy().hasHeightForWidth())
self.main_content.setSizePolicy(sizePolicy)
self.main_content.setFrameShape(QFrame.StyledPanel)
self.main_content.setFrameShadow(QFrame.Raised)
self.horizontalLayout_7 = QHBoxLayout(self.main_content)
self.horizontalLayout_7.setObjectName(u"horizontalLayout_7")
self.content_label = QLabel(self.main_content)
self.content_label.setObjectName(u"content_label")
sizePolicy2 = QSizePolicy(QSizePolicy.Ignored, QSizePolicy.Preferred)
sizePolicy2.setHorizontalStretch(0)
sizePolicy2.setVerticalStretch(0)
sizePolicy2.setHeightForWidth(self.content_label.sizePolicy().hasHeightForWidth())
self.content_label.setSizePolicy(sizePolicy2)
self.content_label.setFont(font1)
self.content_label.setStyleSheet(u"color: rgba(255, 255, 255, 200);")
self.content_label.setWordWrap(True)
self.horizontalLayout_7.addWidget(self.content_label, 0, Qt.AlignHCenter|Qt.AlignVCenter)
self.horizontalLayout_5.addWidget(self.main_content)
self.verticalLayout.addWidget(self.content_frame)
self.btn_frame = QFrame(self.main_box_frame)
self.btn_frame.setObjectName(u"btn_frame")
self.btn_frame.setMaximumSize(QSize(16777215, 50))
self.btn_frame.setFrameShape(QFrame.StyledPanel)
self.btn_frame.setFrameShadow(QFrame.Raised)
self.verticalLayout_2 = QVBoxLayout(self.btn_frame)
self.verticalLayout_2.setSpacing(0)
self.verticalLayout_2.setObjectName(u"verticalLayout_2")
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.ok_btn = QPushButton(self.btn_frame)
self.ok_btn.setObjectName(u"ok_btn")
self.ok_btn.setMinimumSize(QSize(80, 40))
self.ok_btn.setMaximumSize(QSize(150, 40))
font3 = QFont()
font3.setPointSize(10)
font3.setBold(True)
font3.setWeight(75)
self.ok_btn.setFont(font3)
self.ok_btn.setStyleSheet(u"QPushButton#ok_btn{\n"
" background-color: rgba(2, 65, 118, 255);\n"
" color: rgba(255, 255, 255, 200);\n"
" border-radius: 5px;\n"
"}\n"
"QPushButton#ok_btn:pressed{\n"
" padding-left: 5px;\n"
" padding-top: 5px;\n"
" background-color: rgba(2, 65, 118, 100);\n"
" background-position:calc(100% - 10px)center;\n"
"}\n"
"\n"
"QPushButton#ok_btn:hover{\n"
" background-color: rgba(2, 65, 118, 200);\n"
"}")
self.verticalLayout_2.addWidget(self.ok_btn, 0, Qt.AlignRight|Qt.AlignVCenter)
self.verticalLayout.addWidget(self.btn_frame, 0, Qt.AlignHCenter|Qt.AlignTop)
self.horizontalLayout.addWidget(self.main_box_frame)
Ui_dialog.setCentralWidget(self.centralwidget)
self.retranslateUi(Ui_dialog)
self.ok_btn.clicked.connect(Ui_dialog.close)
self.close_btn.clicked.connect(Ui_dialog.close)
QMetaObject.connectSlotsByName(Ui_dialog)
# setupUi
def retranslateUi(self, Ui_dialog):
Ui_dialog.setWindowTitle(QCoreApplication.translate("Ui_dialog", u"Warning", None))
self.label.setText(QCoreApplication.translate("Ui_dialog", u"\ue063", None))
self.title_label.setText(QCoreApplication.translate("Ui_dialog", u"This is the title of the label", None))
self.close_btn.setText("")
self.icon_label.setText(QCoreApplication.translate("Ui_dialog", u"\ue063", None))
self.content_label.setText(QCoreApplication.translate("Ui_dialog", u"This is a warning", None))
self.ok_btn.setText(QCoreApplication.translate("Ui_dialog", u"Ok", None))
# retranslateUi
|
import pytest
from unittest.mock import create_autospec, Mock, call
from photorec.repository.photo import RepoPhoto
from photorec.services.storage import ServiceStorageS3
from photorec.validators.nickname import ValidatorNickname, NicknameError
from photorec.validators.photo import ValidatorPhoto, PhotoNotFoundError
from photorec.use_cases.photos.delete_photo import DeletePhotoCommand
@pytest.fixture
def repo_photo():
return create_autospec(RepoPhoto, instance=True)
@pytest.fixture()
def service_storage():
return create_autospec(ServiceStorageS3, instance=True)
def test_given_no_nickname_when_delete_photo_then_error_nickname_not_defined(
repo_photo, service_storage):
validator = create_autospec(ValidatorNickname)
validator.validate.side_effect = NicknameError()
command = DeletePhotoCommand(
repo__photo=repo_photo,
service__storage=service_storage,
validator__nickname=validator,
validator__photo=Mock()
)
with pytest.raises(NicknameError):
command.execute(nickname=Mock(), photo=Mock())
def test_given_nickname_uuid_when_delete_not_existing_photo_then_raise_and_no_changes(
repo_photo, service_storage):
repo_photo.get.return_value = None
photo = 'd48f920c-3994-4ac7-9400-17055854f645.jpeg'
nickname = 'nickname'
command = DeletePhotoCommand(
repo__photo=repo_photo,
service__storage=service_storage,
validator__nickname=Mock(),
validator__photo=ValidatorPhoto()
)
with pytest.raises(PhotoNotFoundError):
command.execute(nickname=nickname, photo=photo)
repo_photo.get.assert_called_once()
repo_photo.delete.assert_not_called()
service_storage.delete.assert_not_called()
def test_given_photo_when_delete_existing_photo_then_delete(repo_photo, service_storage):
thumbnail = 'thumbnail/d48f920c-3994-4ac7-9400-17055854f645.jpeg'
photo = 'd48f920c-3994-4ac7-9400-17055854f645.jpeg'
nickname = 'nickname'
record = {
'nickname': nickname,
'photo': photo,
'thumbnail': thumbnail,
'tag': 'tag',
'likes': 10
}
repo_photo.get.return_value = record
command = DeletePhotoCommand(
repo__photo=repo_photo,
service__storage=service_storage,
validator__nickname=Mock(),
validator__photo=Mock()
)
command.execute(nickname=nickname, photo=photo)
request = {'photo': photo}
repo_photo.get.assert_called_once()
repo_photo.delete.assert_called_once_with(key=request)
service_storage.delete.assert_has_calls([
call(key=thumbnail),
call(key=photo)
])
|
# Copyright (c) 2018 StackHPC Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# Avoid shadowing of system copy module by copy action plugin.
import abc
from copy import deepcopy
import itertools
from ansible.errors import AnsibleActionFail
from ansible.module_utils._text import to_text
from ansible.plugins.action import ActionBase
class ActionModule(ActionBase):
def run(self, tmp=None, task_vars=None):
"""
Produce a dict of Tenks state.
Actions include:
* Generating indices for physical networks for each hypervisor.
* Scheduling specifications of nodes by type onto hypervisors.
The following task arguments are accepted:
:specs: A list of node specifications to be instantiated. Required.
:node_types: A dict mapping node type names to a dict of properties
of that type.
:node_name_prefix: A string with which to prefix all sequential
node names.
:vol_name_prefix: A string with which to prefix all sequential
volume names.
:state: A dict of existing Tenks state (as produced by a previous
run of this module), to be taken into account in this run.
Optional.
:prune_only: A boolean which, if set, will instruct the plugin to
only remove any nodes with state='absent' from
`state`.
:returns: A dict of Tenks state for each hypervisor, keyed by the
hostname of the hypervisor to which the state refers.
"""
result = super(ActionModule, self).run(tmp, task_vars)
# Initialise our return dict.
result['result'] = {}
del tmp # tmp no longer has any effect
self.args = self._task.args
self.localhost_vars = task_vars['hostvars']['localhost']
self.hypervisor_vars = {
hv: hv_hostvars
for hv, hv_hostvars in task_vars['hostvars'].items()
if hv in task_vars['groups']['hypervisors']
}
self._validate_args()
if self.args['prune_only']:
self._prune_absent_nodes()
else:
# Modify the state as necessary.
self._set_physnet_idxs()
self._process_specs()
# Return the modified state.
result['result'] = self.args['state']
return result
def _prune_absent_nodes(self):
"""
Remove any nodes with state='absent' from the state dict.
"""
for hyp in self.args['state'].values():
hyp['nodes'] = [n for n in hyp['nodes']
if n.get('state') != 'absent']
def _set_physnet_idxs(self):
"""
Set the index of each physnet for each host.
Use the specified physnet mappings and any existing physnet indices to
ensure the generated indices are consistent.
"""
state = self.args['state']
for hostname, hostvars in self.hypervisor_vars.items():
# The desired mappings given in the Tenks configuration. These do
# not include IDXs which are an implementation detail of Tenks.
specified_mappings = hostvars['physnet_mappings']
try:
# The physnet indices currently in the state file.
old_idxs = state[hostname]['physnet_indices']
except KeyError:
# The hypervisor is new since the last run.
state[hostname] = {}
old_idxs = {}
new_idxs = {}
next_idx = 0
used_idxs = list(old_idxs.values())
for name, dev in specified_mappings.items():
try:
# We need to re-use the IDXs of any existing physnets.
idx = old_idxs[name]
except KeyError:
# New physnet requires a new IDX.
while next_idx in used_idxs:
next_idx += 1
used_idxs.append(next_idx)
idx = next_idx
new_idxs[name] = idx
state[hostname]['physnet_indices'] = new_idxs
def _process_specs(self):
"""
Ensure the correct nodes are present in `state`.
Remove unnecessary nodes by marking as 'absent' and schedule new nodes
to hypervisors such that the nodes in `state` match what's specified in
`specs`.
"""
# Iterate through existing nodes, marking for deletion where necessary.
for hyp in self.args['state'].values():
# Absent nodes cannot fulfil a spec.
for node in [n for n in hyp.get('nodes', [])
if n.get('state') != 'absent']:
if ((self.localhost_vars['cmd'] == 'teardown' or
not self._tick_off_node(self.args['specs'], node))):
# We need to delete this node, since it exists but does not
# fulfil any spec.
node['state'] = 'absent'
if self.localhost_vars['cmd'] != 'teardown':
# Ensure all hosts exist in state.
for hostname in self.hypervisor_vars:
self.args['state'].setdefault(hostname, {})
self.args['state'][hostname].setdefault('nodes', [])
# Now create all the required new nodes.
scheduler = RoundRobinScheduler(self.hypervisor_vars,
self.args['state'])
namer = Namer(self.args['state'])
self._create_nodes(scheduler, namer)
def _tick_off_node(self, specs, node):
"""
Tick off an existing node as fulfilling a node specification.
If `node` is required in `specs`, decrement that spec's count and
return True. Otherwise, return False.
"""
# Attributes that a spec and a node have to have in common for the node
# to count as an 'instance' of the spec.
MATCHING_ATTRS = {'type', 'ironic_config'}
for spec in specs:
if (all(spec.get(attr) == node.get(attr)
for attr in MATCHING_ATTRS)
and spec['count'] > 0):
spec['count'] -= 1
return True
return False
def _create_nodes(self, scheduler, namer):
"""
Create new nodes to fulfil the specs.
"""
# Anything left in specs needs to be created.
for spec in self.args['specs']:
for _ in range(spec['count']):
node = self._gen_node(spec['type'], spec.get('ironic_config'))
hostname, ipmi_port = scheduler.choose_host(node)
node_name_prefix = spec.get('node_name_prefix',
self.args['node_name_prefix'])
node['name'] = namer.get_name(node_name_prefix)
# Sequentially number the volume names.
vol_name_prefix = spec.get('vol_name_prefix',
self.args['vol_name_prefix'])
for vol_idx, vol in enumerate(node['volumes']):
vol['name'] = ("%s%s%d"
% (node['name'], vol_name_prefix, vol_idx))
node['ipmi_port'] = ipmi_port
self.args['state'][hostname]['nodes'].append(node)
def _gen_node(self, type_name, ironic_config=None):
"""
Generate a node description.
A name will not be assigned at this point because we don't know which
hypervisor the node will be scheduled to.
"""
node_type = self.args['node_types'][type_name]
node = deepcopy(node_type)
# All nodes need an Ironic driver.
node.setdefault(
'ironic_driver',
self.localhost_vars['default_ironic_driver']
)
# Set the type name, for future reference.
node['type'] = type_name
# Ironic config is not mandatory.
if ironic_config:
node['ironic_config'] = ironic_config
return node
def _validate_args(self):
if self.args is None:
self.args = {}
REQUIRED_ARGS = {'specs', 'node_types'}
# Var names and their defaults.
OPTIONAL_ARGS = [
('node_name_prefix', 'tk'),
# state is optional, since if this is the first run there won't be
# any yet.
('state', {}),
('vol_name_prefix', 'vol'),
('prune_only', False),
]
for arg in OPTIONAL_ARGS:
if arg[0] not in self.args:
self.args[arg[0]] = arg[1]
# No arguments are required in prune_only mode.
if not self.args['prune_only']:
for arg in REQUIRED_ARGS:
if arg not in self.args:
e = "The parameter '%s' must be specified." % arg
raise AnsibleActionFail(to_text(e))
if not self.hypervisor_vars:
e = ("There are no hosts in the 'hypervisors' group to which "
"we can schedule.")
raise AnsibleActionFail(to_text(e))
for spec in self.args['specs']:
if 'type' not in spec or 'count' not in spec:
e = ("All specs must contain a `type` and a `count`. "
"Offending spec: %s" % spec)
raise AnsibleActionFail(to_text(e))
class Namer(object):
"""
Helper object for naming nodes with a prefix and index.
"""
def __init__(self, state):
self.existing_names = {node['name']
for hv_state in state.values()
for node in hv_state['nodes']
if node.get('state') != 'absent'}
# Map from node name prefix to the next index to try.
self.next_idxs = {}
def get_name(self, node_name_prefix):
"""Return the next available name for the given prefix."""
idx = self.next_idxs.setdefault(node_name_prefix, 0)
while True:
candidate = "%s%d" % (node_name_prefix, idx)
if candidate not in self.existing_names:
self.next_idxs[node_name_prefix] = idx + 1
return candidate
idx += 1
class Host(object):
"""
Class representing a hypervisor host.
"""
def __init__(self, hostname, hostvars, state):
self.hostname = hostname
self.physnet_mappings = hostvars['physnet_mappings']
# Keep track of unused IPMI ports in the available range.
free_ipmi_ports = set(
range(hostvars['ipmi_port_range_start'],
hostvars['ipmi_port_range_end'] + 1))
for node in state['nodes']:
if node.get('state') != 'absent' and node['ipmi_port']:
free_ipmi_ports.remove(node['ipmi_port'])
self.free_ipmi_ports = sorted(free_ipmi_ports)
def reserve(self):
"""
Return the next available IPMI port for a node on this host.
The port is also removed from the available ports.
:returns: The next available IPMI port.
"""
return self.free_ipmi_ports.pop(0)
def host_passes(self, node):
"""
Perform checks to ascertain whether this host can support this node.
"""
# Is there a free IPMI port?
if not self.free_ipmi_ports:
return False
# Check that the host is connected to all physical networks that the
# node requires.
return all(pn in self.physnet_mappings.keys()
for pn in node['physical_networks'])
class Scheduler(object, metaclass=abc.ABCMeta):
"""
Abstract class representing a 'method' of scheduling nodes to hosts.
"""
def __init__(self, hostvars, state):
# Dict mapping a hypervisor hostname to a Host object for the
# hypervisor.
self.hosts = {hostname: Host(hostname, host_hv, state[hostname])
for hostname, host_hv in hostvars.items()}
@abc.abstractmethod
def choose_host(self, node):
"""Abstract method to choose a host to which we can schedule `node`.
Returns a tuple of the hostname of the chosen host and the IPMI port
for use by this node on the host.
"""
raise NotImplementedError()
class RoundRobinScheduler(Scheduler):
"""
Schedule nodes in a round-robin fashion to hosts.
"""
def __init__(self, hostvars, state):
super(RoundRobinScheduler, self).__init__(hostvars, state)
self._host_cycle = itertools.cycle(self.hosts.keys())
def choose_host(self, node):
count = 0
while True:
# Ensure we don't get into an infinite loop if no hosts are
# available.
if count >= len(self.hosts):
e = ("No hypervisors are left that can support the node %s."
% node)
raise AnsibleActionFail(to_text(e))
count += 1
hostname = next(self._host_cycle)
host = self.hosts[hostname]
if host.host_passes(node):
ipmi_port = host.reserve()
return hostname, ipmi_port
|
"""Check string suffix.
Set boolean _b to _true if string _s ends with string _suffix, _false otherwise.
Source: programming-idioms.org
"""
# Implementation author: nickname
# Created on 2016-02-18T16:58:02.566137Z
# Last modified on 2016-02-18T16:58:02.566137Z
# Version 1
b = s.endswith(suffix)
|
import cv2
import os
import numpy as np
test_28 = [
'01d32d72-bacd-11e8-b2b8-ac1f6b6435d0',
'04e82088-bad4-11e8-b2b8-ac1f6b6435d0',
'1bcde1d2-bac7-11e8-b2b7-ac1f6b6435d0',
'1ebb230a-bad1-11e8-b2b8-ac1f6b6435d0',
'3539e7f8-bad4-11e8-b2b8-ac1f6b6435d0',
'3fe1a9f8-bad8-11e8-b2b9-ac1f6b6435d0',
'4104fc8e-bad5-11e8-b2b9-ac1f6b6435d0',
'4422be80-bac9-11e8-b2b8-ac1f6b6435d0',
'590414c8-bad0-11e8-b2b8-ac1f6b6435d0',
'70d9b858-bad7-11e8-b2b9-ac1f6b6435d0',
'745448b6-bac5-11e8-b2b7-ac1f6b6435d0',
'76f171d6-bad2-11e8-b2b8-ac1f6b6435d0',
'795d77fe-bacc-11e8-b2b8-ac1f6b6435d0',
'86c164e6-bac7-11e8-b2b7-ac1f6b6435d0',
'9103658c-bac5-11e8-b2b7-ac1f6b6435d0',
'92a03f8c-baca-11e8-b2b8-ac1f6b6435d0',
'97989b76-bad2-11e8-b2b8-ac1f6b6435d0',
'a14399ee-bad4-11e8-b2b8-ac1f6b6435d0',
'a1f0873a-bacf-11e8-b2b8-ac1f6b6435d0',
'af2177a0-bacc-11e8-b2b8-ac1f6b6435d0',
'b15e17be-bac5-11e8-b2b7-ac1f6b6435d0',
'ca35a3d0-bad2-11e8-b2b8-ac1f6b6435d0',
'd8e64b1c-baca-11e8-b2b8-ac1f6b6435d0',
'defda53c-bace-11e8-b2b8-ac1f6b6435d0',
'dfd49804-bad7-11e8-b2b9-ac1f6b6435d0',
'fb19471e-bad6-11e8-b2b9-ac1f6b6435d0',
'fd3fdc76-bad4-11e8-b2b8-ac1f6b6435d0'
]
train_28 = [
'05d32f36-bba3-11e8-b2b9-ac1f6b6435d0',
'082a828a-bbbb-11e8-b2ba-ac1f6b6435d0',
'0afda11a-bba0-11e8-b2b9-ac1f6b6435d0',
'18df69fc-bbb5-11e8-b2ba-ac1f6b6435d0',
'2b3ce424-bba8-11e8-b2ba-ac1f6b6435d0',
'43f6bd88-bbc5-11e8-b2bc-ac1f6b6435d0',
'70b97ed2-bbac-11e8-b2ba-ac1f6b6435d0',
'802998d4-bbbb-11e8-b2ba-ac1f6b6435d0',
'b1131086-bb9f-11e8-b2b9-ac1f6b6435d0',
'c9806c74-bbca-11e8-b2bc-ac1f6b6435d0',
'e403806e-bbbf-11e8-b2bb-ac1f6b6435d0'
]
test_16 = [
'04fe60d6-bad5-11e8-b2b8-ac1f6b6435d0',
'06e54e5e-bac7-11e8-b2b7-ac1f6b6435d0',
'0d238c04-bad6-11e8-b2b9-ac1f6b6435d0',
'10748996-baca-11e8-b2b8-ac1f6b6435d0',
'11693760-bada-11e8-b2b9-ac1f6b6435d0',
'30679a8c-bace-11e8-b2b8-ac1f6b6435d0',
'3b2d1274-bacb-11e8-b2b8-ac1f6b6435d0',
'443b81cc-bac9-11e8-b2b8-ac1f6b6435d0',
'46b3ac54-bad8-11e8-b2b9-ac1f6b6435d0',
'56e5eac6-bac7-11e8-b2b7-ac1f6b6435d0',
'5a50c16a-baca-11e8-b2b8-ac1f6b6435d0',
'6742fb2e-bac8-11e8-b2b8-ac1f6b6435d0',
'7fcba676-bad9-11e8-b2b9-ac1f6b6435d0',
'80cd02a6-bacd-11e8-b2b8-ac1f6b6435d0',
'89975d50-bad7-11e8-b2b9-ac1f6b6435d0',
'b5764aca-bace-11e8-b2b8-ac1f6b6435d0',
'c43aea58-bacd-11e8-b2b8-ac1f6b6435d0',
'c583acc0-bacc-11e8-b2b8-ac1f6b6435d0',
'c7109768-bad8-11e8-b2b9-ac1f6b6435d0',
'c7f2fd0c-bad2-11e8-b2b8-ac1f6b6435d0',
'd0812898-bad4-11e8-b2b8-ac1f6b6435d0',
'd342255e-bada-11e8-b2b9-ac1f6b6435d0',
'db3fcdd8-bacb-11e8-b2b8-ac1f6b6435d0',
'de0ed5c2-bad0-11e8-b2b8-ac1f6b6435d0',
'e29dd5f6-bac7-11e8-b2b7-ac1f6b6435d0',
'ee922f9e-bacf-11e8-b2b8-ac1f6b6435d0',
'fdbd4f3a-bac5-11e8-b2b7-ac1f6b6435d0'
]
train_16 = [
'381e477c-bb9a-11e8-b2b9-ac1f6b6435d0',
'4f4433ea-bbad-11e8-b2ba-ac1f6b6435d0',
'58cb3d80-bb9b-11e8-b2b9-ac1f6b6435d0',
'5b18c856-bbc2-11e8-b2bb-ac1f6b6435d0',
'61a51908-bbc8-11e8-b2bc-ac1f6b6435d0',
'68a3f5f4-bba4-11e8-b2b9-ac1f6b6435d0',
'6c21c47e-bbb6-11e8-b2ba-ac1f6b6435d0',
'7666cca6-bbaa-11e8-b2ba-ac1f6b6435d0',
'7ee3439a-bbc9-11e8-b2bc-ac1f6b6435d0',
'80e422c4-bbc7-11e8-b2bc-ac1f6b6435d0',
'85da80ce-bbb9-11e8-b2ba-ac1f6b6435d0',
'8a384340-bbaa-11e8-b2ba-ac1f6b6435d0',
'9515e652-bbb7-11e8-b2ba-ac1f6b6435d0',
'a5d6de0a-bb9a-11e8-b2b9-ac1f6b6435d0',
'a74e60b8-bbc8-11e8-b2bc-ac1f6b6435d0',
'b07d8ed6-bba3-11e8-b2b9-ac1f6b6435d0',
'b0b13c66-bbaa-11e8-b2ba-ac1f6b6435d0',
'c361f992-bbad-11e8-b2ba-ac1f6b6435d0',
'ca4b50fa-bbc1-11e8-b2bb-ac1f6b6435d0',
'f38ad554-bba7-11e8-b2ba-ac1f6b6435d0',
'ff500aee-bba2-11e8-b2b9-ac1f6b6435d0'
]
DEBUG_DIR = 'debug/' + 'test_16'
if not os.path.exists(DEBUG_DIR):
os.makedirs(DEBUG_DIR)
def open_rgby(id): #a function that reads RGBY image
colors = ['blue','green','red','yellow']
#colors = ['red']
# colors = ['green']
# colors = ['blue']
flags = cv2.IMREAD_GRAYSCALE
img = [cv2.imread(os.path.join('../input/test', id+'_'+color+'.png'), flags).astype(np.float32)
for color in colors]
return np.stack(img, axis=-1)
for id in test_16:
img = open_rgby(id)
cv2.imwrite(DEBUG_DIR + '/' + id + ".jpg", img)
cv2.imwrite(DEBUG_DIR + '/' + id + '_blue' + ".jpg", img[:,:,0])
cv2.imwrite(DEBUG_DIR + '/' + id + '_green' + ".jpg", img[:,:,1])
cv2.imwrite(DEBUG_DIR + '/' + id + '_red' + ".jpg", img[:,:,2])
|
# *****************************************************************************
#
# Copyright (c) 2019, the Perspective Authors.
#
# This file is part of the Perspective library, distributed under the terms of
# the Apache License 2.0. The full license can be found in the LICENSE file.
#
from .core import * # noqa: F401, F403
from .core._version import __version__ # noqa: F401
try:
from .table import * # noqa: F401, F403
except ImportError:
pass
try:
from .node import * # noqa: F401, F403
except ImportError:
pass
|
from manimlib.imports import *
import mpmath
mpmath.mp.dps = 7
def zeta(z):
max_norm = FRAME_X_RADIUS
try:
return np.complex(mpmath.zeta(z))
except:
return np.complex(max_norm, 0)
def d_zeta(z):
epsilon = 0.01
return (zeta(z + epsilon) - zeta(z))/epsilon
class ZetaTransformationScene(ComplexTransformationScene):
CONFIG = {
"anchor_density" : 35,
"min_added_anchors" : 10,
"max_added_anchors" : 300,
"num_anchors_to_add_per_line" : 75,
"post_transformation_stroke_width" : 2,
"default_apply_complex_function_kwargs" : {
"run_time" : 5,
},
"x_min" : 1,
"x_max" : int(FRAME_X_RADIUS+2),
"y_min": 1,
"y_max": int(FRAME_Y_RADIUS + 2),
"extra_lines_x_min" : -2,
"extra_lines_x_max" : 4,
"extra_lines_y_min" : -2,
"extra_lines_y_max" : 2,
}
def prepare_for_transformation(self, mob):
for line in mob.family_members_with_points():
#Find point of line cloest to 1 on C
if not isinstance(line, Line):
line.insert_n_curves(self.min_added_anchors)
continue
p1 = line.get_start()+LEFT
p2 = line.get_end()+LEFT
t = (-np.dot(p1, p2-p1))/(get_norm(p2-p1)**2)
closest_to_one = interpolate(
line.get_start(), line.get_end(), t
)
#See how big this line will become
diameter = abs(zeta(complex(*closest_to_one[:2])))
target_num_curves = np.clip(
int(self.anchor_density*np.pi*diameter),
self.min_added_anchors,
self.max_added_anchors,
)
num_curves = line.get_num_curves()
if num_curves < target_num_curves:
line.insert_n_curves(target_num_curves-num_curves)
line.make_smooth()
def add_extra_plane_lines_for_zeta(self, animate = False, **kwargs):
dense_grid = self.get_dense_grid(**kwargs)
if animate:
self.play(ShowCreation(dense_grid))
self.plane.add(dense_grid)
self.add(self.plane)
def get_dense_grid(self, step_size = 1./16):
epsilon = 0.1
x_range = np.arange(
max(self.x_min, self.extra_lines_x_min),
min(self.x_max, self.extra_lines_x_max),
step_size
)
y_range = np.arange(
max(self.y_min, self.extra_lines_y_min),
min(self.y_max, self.extra_lines_y_max),
step_size
)
vert_lines = VGroup(*[
Line(
self.y_min*UP,
self.y_max*UP,
).shift(x*RIGHT)
for x in x_range
if abs(x-1) > epsilon
])
vert_lines.set_color_by_gradient(
self.vert_start_color, self.vert_end_color
)
horiz_lines = VGroup(*[
Line(
self.x_min*RIGHT,
self.x_max*RIGHT,
).shift(y*UP)
for y in y_range
if abs(y) > epsilon
])
horiz_lines.set_color_by_gradient(
self.horiz_start_color, self.horiz_end_color
)
dense_grid = VGroup(horiz_lines, vert_lines)
dense_grid.set_stroke(width = 1)
return dense_grid
def add_reflected_plane(self, animate = False):
reflected_plane = self.get_reflected_plane()
if animate:
self.play(ShowCreation(reflected_plane, run_time = 5))
self.plane.add(reflected_plane)
self.add(self.plane)
def get_reflected_plane(self):
reflected_plane = self.plane.copy()
reflected_plane.rotate(np.pi, UP, about_point = RIGHT)
for mob in reflected_plane.family_members_with_points():
mob.set_color(
Color(rgb = 1-0.5*color_to_rgb(mob.get_color()))
)
self.prepare_for_transformation(reflected_plane)
reflected_plane.submobjects = list(reversed(
reflected_plane.family_members_with_points()
))
return reflected_plane
def apply_zeta_function(self, **kwargs):
transform_kwargs = dict(self.default_apply_complex_function_kwargs)
transform_kwargs.update(kwargs)
self.apply_complex_function(zeta, **kwargs)
class TestZetaOnHalfPlane(ZetaTransformationScene):
CONFIG = {
"anchor_density" : 15,
}
def construct(self):
self.add_transformable_plane()
self.add_extra_plane_lines_for_zeta()
self.prepare_for_transformation(self.plane)
print(sum([
mob.get_num_points()
for mob in self.plane.family_members_with_points()
]))
print(len(self.plane.family_members_with_points()))
self.apply_zeta_function()
self.wait()
class TestZetaOnFullPlane(ZetaTransformationScene):
def construct(self):
self.add_transformable_plane(animate = True)
self.add_extra_plane_lines_for_zeta(animate = True)
self.add_reflected_plane(animate = True)
self.apply_zeta_function()
class TestZetaOnLine(ZetaTransformationScene):
def construct(self):
line = Line(UP+20*LEFT, UP+20*RIGHT)
self.add_transformable_plane()
self.plane.submobjects = [line]
self.apply_zeta_function()
self.wait(2)
self.play(ShowCreation(line, run_time = 10))
self.wait(3) |
#!/usr/bin/env python
# Copyright 2018 Jetperch LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Simple programmatic GUI creation for parameters.
Much inspiration for this module was drawn from the excellent
`guidata package <https://code.google.com/p/guidata/>`_.
"""
from PySide2 import QtWidgets, QtGui, QtCore
import os
import numpy as np
from .ui_util import clear_layout
import logging
log = logging.getLogger(__name__)
class Parameter(object):
"""The base class for a parameter value with validation and GUI bindings.
:param str name: The name for this item.
:param value: The default value. None (default).
:param tooltip: The text for the tooltip. None (default) is equivalent
to the empty string ''.
Once an instance is created, it can be added to a parent widget using the
:meth:`populate` method. The parent widget should contain either no
layout or a form layout.
The remaining methods are normally used by the derived classes to
implement their specialized behaviors and should not be called by an
application.
"""
def __init__(self, name: str, value=None, tooltip: str=None):
self.name = str(name)
self._value = None
self.tooltip = tooltip
self.callback = None
"""The callable that is called on any value changes. The
callable will be passed self as the sole argument."""
self.validator = None
"""A custom validation callable that takes a potential value
as the sole argument and raises and exception for an invalid value.
The callable returns the new value.
This callable should be used by the application for any custom
validation beyond the basic type validation. A validator must not
have side effects!"""
if value is not None:
try:
self.value = value
except Exception:
log.exception('Invalid initial value for "%s" = "%s"',
self.name, value)
self._value = value
self.label = None
self.widget = None
self.layout = None
def populate_subclass(self, parent):
"""Populate the parent widget with additional widgets for the
derived subclass.
:param parent: The parent widget to populate.
Use :meth:`addWidget` to add additional GUI widgets.
The :meth:`on_changed` method is guaranteed to be called following
this call (if the current value is valid) to ensure that the GUI
idgets are updated with the current value. Derived implementations
do not need to call this trivial base implementation.
"""
pass
def populate(self, parent):
"""Populate a parent widget with this parameter.
:param parent: The parent widget to populate.
:return: self.
Do not override this method. Override :meth:`populate_subclass` which
is called by this method appropriately.
Each parameter consists of two default widgets:
self.label and self.widget.
The self.label is a QLabel that contains the self.name. The
self.widget contains a self.layout, and the exact contents are
configured by the derived class. Both widgets are added to the
parent by calling the parent's layout.addRow().
"""
layout = parent.layout()
if layout is None:
layout = QtWidgets.QFormLayout(parent)
self.label = QtWidgets.QLabel(self.name, parent)
if self.tooltip is not None:
self.label.setToolTip(self.tooltip)
self.widget = QtWidgets.QWidget(parent)
self.layout = QtWidgets.QHBoxLayout(self.widget)
self.layout.setContentsMargins(0, 0, 0, 0)
layout.addRow(self.label, self.widget)
self.populate_subclass(parent)
try:
self.validate(self._value)
self.on_valid()
self.on_changed()
except Exception:
self.on_invalid()
return self
def unpopulate(self, parent):
"""Unpopulated a parent widget to remove this parameter.
:param parent: The parent widget to unpopulate.
:return: self.
"""
if self.label is not None:
layout = parent.layout()
layout.removeRow(self.label)
self.label = None
self.widget = None
self.layout = None
def add_widget(self, widget, stretch=0):
"""Add a new widget to the layout.
:param widget: The widget to add which will have its parent set to
self.widget.
:param stretch: The stretch value. 0 (Default) uses a balanced layout.
"""
widget.setParent(self.widget)
self.layout.addWidget(widget, stretch)
def validate(self, x):
"""Validate an assignment to value for this parameter.
:param x: The potential parameter value.
:return: The actual parameter value.
:raise ValueError: If x is not an allowed value.
This method should be considered protected and should only be called
through the value.setter. Derived classes should safely presume that
self._value will be assigned with the result. Derived implementations
do not need to call this trivial base implementation.
"""
return x
@property
def value(self):
"""Get the current value for this parameter."""
return self._value
@value.setter
def value(self, x):
"""Set the current value for this parameter.
:param x: The new value for x.
:raises ValueError: If x is not valid.
Derived classes should not override this method. Instead
override :meth:`validate` and :meth:`on_changed`.
"""
try:
value = self.validate(x)
if callable(self.validator):
value = self.validator(value)
self.on_valid()
except Exception:
self.on_invalid()
raise
if value != self._value:
self._value = value
self.on_changed()
if self._value is not None and callable(self.callback):
self.callback(self) # notify observer
def on_valid(self):
"""Notify the GUI elements that the current value is valid.
Derived implementations do not need to call this trivial base
implementation.
"""
pass
def on_invalid(self):
"""Notify the GUI elements that the current value is invalid.
Derived implementations do not need to call this trivial base
implementation.
"""
pass
def update(self, value=None):
"""Set the value property using a method.
:param value: The new value for this instance. If None (Default), then
do not send up change notifications but mark as invalid.
:raises ValueError: If x is not valid.
Calling self.update(value) is very similar to "self.value = value",
except that a value of None is handled differently. This method
should normally be called by derived classes and not application code.
"""
if value is None:
self._value = None
self.on_invalid()
else:
self.value = value
def on_changed(self):
"""Notify the GUI of a value change.
Derived classes should override to update the GUI elements.
Every GUI element must be updated as appropriate during calls to
on_changed. To prevent a circular loop, GUI elements should only
change if needed so that they do not unnecessarily signal a change
when no change occurred.
"""
pass
class Bool(Parameter):
"""An boolean valued item.
:param str name: The name for this item.
:param bool value: The starting value for this item.
:param tooltip: The text for the tooltip.
"""
def __init__(self, name: str, value=False, tooltip: str=''):
self.checkbox = None
Parameter.__init__(self, name, value, tooltip)
def populate_subclass(self, parent):
self.checkbox = QtWidgets.QCheckBox()
self.checkbox.setChecked(self.value)
self.checkbox.clicked.connect(self.on_clicked)
self.add_widget(self.checkbox)
def validate(self, x):
return bool(x)
def on_clicked(self):
self.value = self.checkbox.isChecked()
def on_changed(self):
if self.checkbox and self.checkbox.isChecked() != self.value:
self.checkbox.setChecked(self.value)
class Int(Parameter):
"""An integer valued item.
:param str name: The name for this item.
:param int value: The starting value for this item.
:param (int, int) vrange: The (min, max) inclusive range for values.
None (default) allows for unbounded integers.
:param tooltip: The text for the tooltip.
"""
def __init__(self, name: str, value=None, vrange=None, tooltip: str=''):
if vrange is not None:
assert(len(vrange) == 2)
vrange = (int(vrange[0]), int(vrange[1]))
if value is None:
if vrange is not None:
value = vrange[0]
else:
value = 0
self.vrange = vrange
self.textedit = None
self.spinedit = None
self.slider = None
Parameter.__init__(self, name, value, tooltip)
def populate_subclass(self, parent):
if self.vrange is None:
self.textedit = QtWidgets.QLineEdit(self.widget)
self.textedit.setText(str(self.value))
self.textedit.textChanged.connect(self._on_text_changed)
self.add_widget(self.textedit)
else:
self.spinedit = QtWidgets.QSpinBox(self.widget)
self.spinedit.setRange(*self.vrange)
self.spinedit.setValue(self.value)
self.spinedit.valueChanged.connect(self.update)
self.add_widget(self.spinedit)
self.slider = QtWidgets.QSlider(QtCore.Qt.Horizontal, self.widget)
self.slider.setTracking(True)
self.slider.setRange(*self.vrange)
self.slider.setSingleStep(1)
page_step = (self.vrange[1] - self.vrange[0]) / 10
self.slider.setPageStep(page_step)
self.slider.setValue(self.value)
self.slider.valueChanged.connect(self._on_slider_changed)
self.add_widget(self.slider)
def on_valid(self):
if self.textedit is not None:
self.textedit.setStyleSheet("")
def on_invalid(self):
if self.textedit is not None:
self.textedit.setStyleSheet("QLineEdit{background:red;}")
def validate(self, x):
x = int(x)
if self.vrange is not None:
if x < self.vrange[0] or x > self.vrange[1]:
raise ValueError('Out of range')
return x
def _on_slider_changed(self, value):
v = int(value)
if v != self._value:
self.value = v
def _on_text_changed(self, value):
try:
self.value = str(value)
except ValueError:
pass
def on_changed(self):
v = self.value
if self.textedit:
text = str(v)
if str(self.textedit.text()) != text:
self.textedit.setText(text)
if self.spinedit is not None:
if self.spinedit.value() != v:
self.spinedit.setValue(v)
if self.slider is not None:
if self.slider.value() != v:
self.slider.setValue(v)
class Float(Parameter):
"""An floating point valued item.
:param str name: The name for this item.
:param float value: The starting value for this item.
:param (float, float) vrange: The (min, max) inclusive range for values.
None (default) allows for unbounded floating point values.
:param tooltip: The text for the tooltip.
"""
def __init__(self, name:str , value=None, vrange=None, tooltip: str=''):
if vrange is not None:
assert(len(vrange) == 2)
vrange = (float(vrange[0]), float(vrange[1]))
if value is None:
if vrange is not None:
value = vrange[0]
else:
value = 0.
self.textedit = None
self.slider = None
self.vrange = vrange
Parameter.__init__(self, name, value, tooltip)
def populate_subclass(self, parent):
self.textedit = QtWidgets.QLineEdit(self.widget)
self.textedit.setText(str(self.value))
self.textedit.textChanged.connect(self._on_text_changed)
self.add_widget(self.textedit)
if self.vrange is not None:
self.slider = QtWidgets.QSlider(QtCore.Qt.Horizontal, self.widget)
self.slider.setTracking(True)
self.slider.setRange(0, 250)
self.slider.setSingleStep(1)
page_step = (self.vrange[1] - self.vrange[0]) / 10
self.slider.setPageStep(page_step)
self.slider.setValue(self._float2slider(self.value))
self.slider.valueChanged.connect(self._on_slider_changed)
self.add_widget(self.slider, 1)
def _float2slider(self, x):
v = (x - self.vrange[0]) * 250 / (self.vrange[1] - self.vrange[0])
v = np.round(v)
return int(v)
def _slider2float(self, x):
return x * (self.vrange[1] - self.vrange[0]) / 250. + self.vrange[0]
def validate(self, x):
try:
x = float(x)
if self.vrange is not None:
if x < self.vrange[0] or x > self.vrange[1]:
raise ValueError('Out of range')
if self.textedit is not None:
self.textedit.setStyleSheet("")
except Exception:
if self.textedit is not None:
self.textedit.setStyleSheet("QLineEdit{background:red;}")
raise
return x
def _on_slider_changed(self, value):
v = self._slider2float(value)
if self._float2slider(self._value) != value:
self.value = v
def _on_text_changed(self, value):
try:
self.value = str(value)
except ValueError:
self.update(None)
def on_changed(self):
v = self.value
if self.textedit:
text = str(self.textedit.text())
if v != float(text):
self.textedit.setText(str(v))
if self.slider is not None:
v_pos = self._float2slider(v)
if self.slider.value() != v_pos:
self.slider.setValue(v_pos)
class Enum(Parameter):
"""An enumerated valued item.
:param str name: The name for this item.
:param str value: The starting value for this item.
:param list(str) values: The list of possible values.
:param tooltip: The text for the tooltip.
:param closed: When True, the allowed value states are closed on the set
of values. When False, arbitrary string values are allowed.
"""
def __init__(self, name: str, value=None, values=None, tooltip: str='', closed=True):
if value is None and values:
value = values[0]
self._values = values
self._closed = closed
self.comboBox = None
Parameter.__init__(self, name, value, tooltip)
def populate_subclass(self, parent):
self.comboBox = QtWidgets.QComboBox(self.widget)
self._update_values()
if not self._closed:
self.comboBox.setEditable(True)
self.add_widget(self.comboBox)
def _update_values(self):
if self.comboBox is None:
return
try:
self.comboBox.currentIndexChanged.disconnect()
except Exception:
pass
try:
self.comboBox.editTextChanged.disconnect()
except Exception:
pass
self.comboBox.clear()
if self._values:
for v in self._values:
self.comboBox.addItem(str(v))
self.comboBox.setEnabled(True)
elif self._closed:
self.comboBox.setEnabled(False)
else:
self.comboBox.setEnabled(True)
if self._value is not None:
idx = self.comboBox.findText(self._value)
if idx >= 0:
self.comboBox.setCurrentIndex(idx)
elif not self._closed:
self.comboBox.setEditText(self._value)
self.comboBox.currentIndexChanged.connect(self.update)
self.comboBox.editTextChanged.connect(self._on_text_changed)
@property
def values(self):
return self._values
@values.setter
def values(self, values):
self._values = values
self._update_values()
if self._values and self._closed:
if self._value not in self._values:
self.value = self._values[0]
def validate(self, x):
if x is None:
pass
elif isinstance(x, int): # presume index
if x < 0 or x >= len(self._values):
raise ValueError(x)
x = self._values[x]
elif isinstance(x, str): #allowed
if self._closed: # require to be in values
self._values.index(x)
else:
raise ValueError(x)
return x
def on_changed(self):
v = self.value
if self.comboBox is not None and v is not None:
if v != str(self.comboBox.currentText()):
try:
idx = self._values.index(v)
self.comboBox.setCurrentIndex(idx)
except (TypeError, AttributeError, ValueError):
if not self._closed:
self.comboBox.setEditText(v)
def _on_text_changed(self, txt):
txt = str(txt)
self.update(txt)
def on_valid(self):
if self.comboBox is not None:
self.comboBox.setStyleSheet("")
def on_invalid(self):
if self.comboBox is not None:
self.comboBox.setStyleSheet("QComboBox{background:red;}")
class String(Parameter):
"""An arbitrary string value.
:param str name: The name for this item.
:param str value: The starting value for this item. None (default) is
equivalent to ''.
:param tooltip: The text for the tooltip.
"""
def __init__(self, name: str, value=None, tooltip: str=''):
self.lineEdit = None
Parameter.__init__(self, name, value, tooltip)
def populate_subclass(self, parent):
self.lineEdit = QtWidgets.QLineEdit(self.widget)
if self.value is not None:
self.lineEdit.setText(self.value)
self.lineEdit.textChanged.connect(self._on_text_changed)
self.add_widget(self.lineEdit)
def on_valid(self):
if self.lineEdit is not None:
self.lineEdit.setStyleSheet("")
def on_invalid(self):
if self.lineEdit is not None:
self.lineEdit.setStyleSheet("QLineEdit{background:red;}")
def _on_text_changed(self, value):
try:
self.value = str(value)
except ValueError:
pass
def on_changed(self):
v = self.value
if v is None:
print('on_changed None')
return
if self.lineEdit is not None:
if v != str(self.lineEdit.text()):
self.lineEdit.setText(v)
class StringSelect(Enum):
"""An arbitrary string value with choices.
:param str name: The name for this item.
:param str value: The starting value for this item. None (default) is
equivalent to ''.
:param list(str) values: The list of possible values.
:param tooltip: The text for the tooltip.
"""
def __init__(self, name: str, value=None, values=None, tooltip: str=''):
Enum.__init__(self, name, value, values, tooltip, closed=False)
class RichTextView(Parameter):
"""A rich text view (no GUI edit).
:param str name: The name for this item.
:param value: The default value. None (default).
:param tooltip: The text for the tooltip. None (default) is equivalent
to the empty string ''.
"""
def __init__(self, name: str, value=None, tooltip: str=None):
self.view = None
Parameter.__init__(self, name, value, tooltip)
def populate_subclass(self, parent):
self.view = QtWidgets.QLabel()
self.add_widget(self.view)
def on_changed(self):
if self.view is not None:
if str(self.view.text()) != str(self._value):
self.view.setText(self.value)
PTYPE_OPEN = ['r', 'read', 'o', 'open']
PTYPE_SAVE = ['w', 'write', 's', 'save']
PTYPE_DIR = ['d', 'dir', 'directory']
PTYPE_MAP = {
'open': (PTYPE_OPEN, ":/joulescope/resources/play.png"),
'save': (PTYPE_SAVE, ":/joulescope/resources/record.png"),
'dir': (PTYPE_DIR, ":/joulescope/resources/pause.png"),
}
def ptype_lookup(ptype):
if ptype is None:
return 'open'
ptype = str(ptype).lower()
for key, (values, _) in PTYPE_MAP.items():
if ptype in values:
return key
raise ValueError(ptype)
class Path(String):
"""A path.
:param str name: The name for this item.
:param str value: The starting value for this item. None (default) is
equivalent to ''.
:param str ptype: The path type which is one of the values in
:data:`PTYPE_OPEN`, :data:`PTYPE_SAVE` or :data:`PTYPE_DIR`.
:param tooltip: The text for the tooltip.
"""
def __init__(self, name: str, value=None, ptype=None, tooltip: str=''):
self.ptype = ptype_lookup(ptype)
String.__init__(self, name, value=value, tooltip=tooltip)
self.path_button = None
def populate_subclass(self, parent):
String.populate_subclass(self, parent)
self.path_button = QtWidgets.QPushButton(self.widget)
self.path_button.clicked.connect(self._on_path_change)
icon1 = QtGui.QIcon()
icon_path = PTYPE_MAP[self.ptype][1]
icon1.addPixmap(QtGui.QPixmap(icon_path), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.path_button.setIcon(icon1)
self.path_button.setFlat(True)
self.path_button.setObjectName("pathButton")
self.path_button.setStyleSheet('QPushButton:flat { border: none; }')
self.path_button.setFocusPolicy(QtCore.Qt.TabFocus)
self.add_widget(self.path_button)
return self
def _on_path_change(self, event):
v = str(self.lineEdit.text())
if self.ptype == 'open':
path, _ = QtWidgets.QFileDialog.getOpenFileName(self.widget, 'Select file to open', v)
elif self.ptype == 'save':
path, _ = QtWidgets.QFileDialog.getSaveFileName(self.widget, 'Select file to save', v)
else: # self.ptype == 'dir':
path = QtWidgets.QFileDialog.getExistingDirectory(self.widget, 'Select directory', v)
if len(path):
self.lineEdit.setText(path)
self.value = path
else:
self.update(None)
def validate(self, x):
if x is None:
return x
x = str(x)
if self.ptype == 'open':
if not os.path.isfile(x):
raise ValueError('File not found "%s"' % x)
elif self.ptype == 'save':
parent = os.path.dirname(os.path.abspath(x))
if not os.path.isdir(parent):
raise ValueError('Parent directory not found: %s' % parent)
else: # self.ptype == 'dir':
if not os.path.isdir(x):
raise ValueError('Directory not found: %s' % x)
return x
class Directory(Path):
"""A directory.
:param str name: The name for this item.
:param str value: The starting value for this item. None (default) is
equivalent to ''.
:param tooltip: The text for the tooltip.
"""
def __init__(self, name: str, value=None, tooltip: str=None):
Path.__init__(self, name, value, 'dir', tooltip)
class FileOpen(Path):
"""An existing file.
:param str name: The name for this item.
:param str value: The starting value for this item. None (default) is
equivalent to ''.
:param tooltip: The text for the tooltip.
"""
def __init__(self, name: str, value=None, tooltip: str=None):
Path.__init__(self, name, value, 'open', tooltip)
class FileSave(Path):
"""A file.
:param str name: The name for this item.
:param str value: The starting value for this item. None (default) is
equivalent to ''.
:param tooltip: The text for the tooltip.
"""
def __init__(self, name: str, value=None, tooltip: str=None):
Path.__init__(self, name, value, 'save', tooltip)
class QClickLabel(QtWidgets.QLabel):
clicked = QtCore.Signal()
def __init__(self, parent=None):
QtWidgets.QLabel.__init__(self, parent)
def mousePressEvent(self, ev):
self.clicked.emit()
class Color(Parameter):
"""An color selection item.
:param str name: The name for this item.
:param bool value: The starting value for this item.
:param tooltip: The text for the tooltip.
"""
def __init__(self, name: str, value=False, tooltip: str=''):
self.label = None
self._parent = None
Parameter.__init__(self, name, value, tooltip)
self.picture = None
def populate_subclass(self, parent):
self._parent = parent
self.label = QClickLabel()
self.label.clicked.connect(self.onClick)
self.picture = QtGui.QPicture()
self.draw()
self.add_widget(self.label)
def to_qt_color(self, x=None):
if x is None:
x = self._value
if isinstance(x, QtGui.QColor):
pass # No action necessary
elif isinstance(x, str):
x = QtGui.QColor(x)
else:
x = QtGui.QColor(*x)
return x
def draw(self):
painter = QtGui.QPainter()
painter.begin(self.picture)
color = self.to_qt_color()
print('%s : %s' % (self._value, color.getRgb()))
painter.fillRect(QtCore.QRect(0, 0, 60, 20), QtGui.QBrush(color))
painter.end()
self.label.setPicture(self.picture)
def validate(self, x):
if x is None:
return None
return self.to_qt_color(x).getRgb()[:3]
def onClick(self):
dialog = QtWidgets.QColorDialog(self.to_qt_color(), self._parent)
if dialog.exec_():
self.value = dialog.selectedColor()
def on_changed(self):
if self.label is not None:
self.draw()
def demo():
# logging.basicConfig(level=logging.INFO)
import sys
path = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(__file__)), '..', '..'))
print(path)
sys.path.insert(0, path)
from joulescope_ui import joulescope_rc
app = QtWidgets.QApplication([])
window = QtWidgets.QMainWindow()
widget = QtWidgets.QWidget(window)
window.setCentralWidget(widget)
window.setGeometry(QtWidgets.QStyle.alignedRect(QtCore.Qt.LeftToRight, QtCore.Qt.AlignCenter, window.size(),
QtWidgets.QApplication.desktop().availableGeometry()))
status = RichTextView('Status', tooltip='Display the source parameter and value on any change.')
status.populate(widget)
removeV = Float('REMOVED', 10.0)
enumV = Enum('enumV', 'goober', None)
strV = StringSelect('strV', 'two', None)
params = [
Bool('bool1', True,
tooltip='<span><h3>A boolean checkbox value</h3>'
'<p>'
'This is a very long tooltip that will continue across '
'multiple lines. Remember that adjacent python strings '
'are automatically joined by the lexer, and no trailing '
'backslash is needed inside parenthesis. '
'We need to ensure that QT correctly wraps this text '
'inside the tooltip. '
'Ensure that this text is very long so that it will '
'need more than a single QT line on the screen.'
'</p></span>'),
Int('int1', 10),
Int('int2', 123, (100, 200)),
Int('int3', None, (100, 200)),
Int('int4', 'hello'),
Float('float1', 10.0),
Float('float2', 123., (100, 200)),
Float('float3', None, (100, 200)),
removeV,
Enum('enum1', 'world', ['hello', 'there', 'world']),
Enum('enum2', None, ['hello', 'there', 'world']),
Enum('enumE', None, None),
enumV,
String('str1', u'\u221A(-1) 2\u00B3 \u03A3 \u03C0... and it was delicious! --Anonymous'),
StringSelect('str2', 'world', ['hello', 'there', 'world']),
StringSelect('str3', u'\u24D7\u24D4\u24DB\u24DB\u24DE', ['hello', 'there', 'world']),
StringSelect('strS', u'\u24D7\u24D4\u24DB\u24DB\u24DE', None),
strV,
Directory('dir1', os.path.expanduser('~')),
FileOpen('open1', os.path.join(os.path.expanduser('~'), 'guiparams.txt')),
FileSave('save1', os.path.join(os.path.expanduser('~'), 'guiparams.txt')),
Color('color1', (0, 0, 255), 'A nice blue'),
Color('color2', '#400080', 'A nice purple'),
]
def callback(item):
try:
value = item.value
except Exception as ex:
value = str(ex)
status.value = '%s %s' % (item.name, value)
pV = {}
for p in params:
pV[p.name] = p
p.callback = callback
p.populate(widget)
removeV.unpopulate(widget)
assert(pV['enum1'].value == 'world')
assert(pV['enum2'].value == 'hello')
assert(enumV.value == 'goober')
enumV.values = ['hello', 'there', 'world']
assert(enumV.value == 'hello')
enumV.values = ['one', 'two', 'three', 'four']
assert(enumV.value == 'one')
assert(strV.value == 'two')
strV.values = ['hello', 'there', 'world']
assert(strV.value == 'two')
strV.values = ['one', 'two', 'three', 'four']
assert(strV.value == 'two')
window.show()
app.exec_()
return 0
if __name__ == '__main__':
demo()
|
# Copyright 2021 cstsunfu. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import hjson
import os
from typing import Dict, Union, Callable, List, Any
from dlk.utils.parser import BaseConfigParser
from dlk.utils.config import ConfigTool
from dlk.data.datamodules import datamodule_register, datamodule_config_register
from dlk.managers import manager_register, manager_config_register
from dlk.core.imodels import imodel_register, imodel_config_register
import pickle as pkl
from dlk.utils.io import open
import json
from dlk.utils.logger import Logger
logger = Logger.get_logger()
class Train(object):
"""Trainer
Config Example:
>>> {
>>> "_focus": {
>>> },
>>> "_link": {},
>>> "_search": {},
>>> "config": {
>>> "save_dir": "*@*", # must be provided
>>> "data_path": "*@*", # must be provided
>>> },
>>> "task": {
>>> "_name": task_name
>>> ...
>>> }
>>> }
"""
def __init__(self, config: Union[str, Dict], ckpt: str = ""):
super(Train, self).__init__()
if not isinstance(config, dict):
with open(config, 'r') as f:
config = hjson.load(f, object_pairs_hook=dict)
self.ckpt = ckpt
self.focus = config.pop('_focus', {})
self.configs = BaseConfigParser(config).parser_with_check()
if self.ckpt:
assert len(
self.configs
) == 1, f"Reuse the checkpoint(ckpt is not none), you must provide the (only one) config which generate the checkpoint."
self.config_names = []
for possible_config in self.configs:
config_name = []
for source, to in self.focus.items():
config_point = possible_config
trace = source.split('.')
for t in trace:
config_point = config_point[t]
config_name.append(to + str(config_point))
if config_name:
self.config_names.append('_'.join(config_name))
else:
self.config_names.append(possible_config['root']['_name'])
if len(self.config_names) != len(set(self.config_names)):
for config, name in zip(self.configs, self.config_names):
logger.info(
f"{name}:\n{json.dumps(config, indent=4, ensure_ascii=False)}"
)
raise NameError('The config_names is not unique.')
def run(self):
"""run for all configs
Returns:
None
"""
logger.info(
f"You have {len(self.config_names)} training config(s), they all will be run."
)
for i, (config, name) in enumerate(zip(self.configs,
self.config_names)):
logger.info(f"Runing the {i}th {name}...")
self.run_oneturn(config, name)
def dump_config(self, config: Dict, name: str):
"""dump the config and change the log file path to config['config']['save_dir']+name
Args:
config: {"config": {"save_dir": '..'}}
name: config name
Returns:
None
"""
log_path = os.path.join(config.get('config').get('save_dir'), name)
with open(os.path.join(config.get('config').get('save_dir'), name, "config.json"), 'w') as f:
json.dump(
{
"root": config,
"_focus": self.focus
},
f,
ensure_ascii=False,
indent=4
)
Logger.init_file_logger("log.txt", log_path)
def run_oneturn(self, config, name):
"""run this config
Args:
config: {"root": '...'}
name: config name
Returns:
None
"""
config = config['root']
# save configure
self.dump_config(config, name)
# get data
data = self.get_data(config)
# set datamodule
datamodule = self.get_datamodule(config, data)
# set training manager
manager = self.get_manager(config, name)
# init imodel and inject the origin test and valid data
imodel = self.get_imodel(config, data)
# start training
manager.fit(model=imodel, datamodule=datamodule)
manager.test(model=imodel, datamodule=datamodule)
def get_data(self, config):
"""get the data decided by config
Args:
config: {"config": {"data_path": '..'}}
Returns:
loaded data
"""
with open(config['config']['data_path'], 'rb') as f:
self.data = pkl.load(f).get('data', {})
return self.data
def get_datamodule(self, config, data):
"""get the datamodule decided by config, and fit the data to datamodule
Args:
config: {"task": {"datamodule": '..'}}
data: {"train": '..', 'valid': '..', ..}
Returns:
datamodule
"""
DataModule, DataModuleConfig = ConfigTool.get_leaf_module(
datamodule_register, datamodule_config_register, 'datamodule',
config['task']['datamodule'])
datamodule = DataModule(DataModuleConfig, data)
return datamodule
def get_manager(self, config, name):
"""get the tranin/predict manager decided by config
Args:
config: {"task": {"manager": '..'}, "config": {"save_dir"}}
name: the predict progress name
Returns:
manager
"""
Manager, ManagerConfig = ConfigTool.get_leaf_module(
manager_register, manager_config_register, 'manager',
config.get('task').get('manager'))
manager = Manager(ManagerConfig,
rt_config={
"save_dir": config.get('config').get("save_dir"),
"name": name
})
return manager
def get_imodel(self, config, data):
"""get the imodel decided by config, and inject the origin test and valid data
Args:
config: {"task": {"imodel": '..'}}
data: {"train": '..', 'valid': '..', ..}
Returns:
imodel
"""
IModel, IModelConfig = ConfigTool.get_leaf_module(
imodel_register, imodel_config_register, 'imodel',
config.get('task').get('imodel'))
imodel = IModel(IModelConfig)
if self.ckpt:
logger.info(f"reuse the checkpoint at {self.ckpt}")
imodel.load_from_checkpoint(self.ckpt)
if 'valid' in data:
imodel._origin_valid_data = data['valid']
if 'test' in data:
imodel._origin_test_data = data['test']
return imodel
|
from os.path import dirname, join, abspath
from setuptools import setup, find_packages
try: # for pip >= 10
from pip._internal.req import parse_requirements
except ImportError: # for pip <= 9.0.3
from pip.req import parse_requirements
with open(join(dirname(__file__), 'clinica/VERSION'), 'rb') as f:
version = f.read().decode('ascii').strip()
this_directory = abspath(dirname(__file__))
with open(join(this_directory, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
install_reqs = parse_requirements('requirements.txt', session='hack')
try:
requirements = [str(ir.req) for ir in install_reqs]
except Exception:
requirements = [str(ir.requirement) for ir in install_reqs]
setup(
name='clinica',
version=version,
url='http://www.clinica.run',
description='Software platform for clinical neuroimaging studies',
long_description=long_description,
long_description_content_type='text/markdown',
author='ARAMIS Lab',
maintainer='Clinica developers',
maintainer_email='[email protected]',
license='MIT license',
packages=find_packages(exclude=('tests', 'tests.*')),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': ['clinica = clinica.cmdline:execute']
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Operating System :: OS Independent',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'Programming Language :: Python',
],
install_requires=requirements,
python_requires='>=3.6'
)
|
import logging
import math
import urllib.parse
from djagger.decorators import schema
from django.conf import settings
from django.core.paginator import Paginator
from django.http import (
Http404,
HttpResponse,
JsonResponse
)
from django.urls import reverse
from spid_cie_oidc.authority.models import (
FederationDescendant,
FederationEntityAssignedProfile,
StaffToken,
get_first_self_trust_anchor
)
from spid_cie_oidc.authority.settings import MAX_ENTRIES_PAGE
from spid_cie_oidc.entity.jwtse import (
create_jws, unpad_jwt_head,
unpad_jwt_payload
)
from spid_cie_oidc.entity.models import TrustChain
from spid_cie_oidc.entity.settings import HTTPC_PARAMS
from spid_cie_oidc.entity.trust_chain_operations import get_or_create_trust_chain
from spid_cie_oidc.entity.utils import iat_now
from . schemas.fetch_endpoint_request import FetchRequest, FedAPIErrorResponse, FetchResponse
from . schemas.list_endpoint import ListRequest, ListResponse
from . schemas.advanced_entity_list_endpoint import AdvancedEntityListRequest, AdvancedEntityListResponse
from . schemas.resolve_endpoint import ResolveRequest, ResolveResponse
from . schemas.trust_mark_status_endpoint import TrustMarkRequest, TrustMarkResponse
logger = logging.getLogger(__name__)
@schema(
methods=['GET'],
get_request_schema = {
"application/x-www-form-urlencoded": FetchRequest
},
get_response_schema = {
"400": FedAPIErrorResponse,
"404": FedAPIErrorResponse,
"200": FetchResponse
},
tags = ['Federation API']
)
def fetch(request):
"""
All entities that are expected to publish entity statements
about other entities MUST expose a Fetch endpoint.
Fetching entity statements is performed to collect entity statements
one by one to gather trust chains.
To fetch an entity statement, an entity needs to know the identifier
of the entity to ask (the issuer), the fetch endpoint of that entity
and the identifier of the entity that you want the statement to be about (the subject).
"""
if request.GET.get("iss"):
iss = get_first_self_trust_anchor(sub=request.GET["iss"])
else:
iss = get_first_self_trust_anchor()
if not request.GET.get("sub"):
conf = get_first_self_trust_anchor()
if request.GET.get("format") == "json":
return JsonResponse(conf.entity_configuration_as_dict, safe=False)
else:
return HttpResponse(
conf.entity_configuration_as_jws,
content_type="application/entity-statement+jwt"
)
sub = FederationDescendant.objects.filter(
sub=request.GET["sub"], is_active=True
).first()
if not sub:
raise Http404()
if request.GET.get("format") == "json":
return JsonResponse(
sub.entity_statement_as_dict(iss.sub, request.GET.get("aud",[])), safe=False
)
else:
return HttpResponse(
sub.entity_statement_as_jws(iss.sub, request.GET.get("aud",[])),
content_type="application/entity-statement+jwt",
)
@schema(
methods=['GET'],
get_request_schema = {
"application/x-www-form-urlencoded": ListRequest
},
get_response_schema = {
"400": FedAPIErrorResponse,
"404": FedAPIErrorResponse,
"200": ListResponse
},
tags = ['Federation API']
)
def entity_list(request):
if request.GET.get("entity_type", "").lower():
_q = {"profile__profile_category": request.GET["entity_type"]}
else:
_q = {}
entries = FederationEntityAssignedProfile.objects.filter(**_q).values_list(
"descendant__sub", flat=True
)
return JsonResponse(list(set(entries)), safe=False)
@schema(
methods=['GET'],
get_request_schema = {
"application/x-www-form-urlencoded": AdvancedEntityListRequest
},
get_response_schema = {
"400": FedAPIErrorResponse,
"404": FedAPIErrorResponse,
"200": AdvancedEntityListResponse
},
tags = ['Federation API']
)
def advanced_entity_listing(request):
desecendants = FederationDescendant.objects.filter(
is_active = True,
).order_by("-modified")
entities_list = []
for descendant in desecendants:
entity = {
descendant.sub : {
"iat" : int(descendant.modified.timestamp())
}
}
entities_list.append(entity)
total_entries = desecendants.count()
_max_entries = getattr(settings, 'MAX_ENTRIES_PAGE', MAX_ENTRIES_PAGE)
p = Paginator(entities_list, _max_entries)
page = request.GET.get("page", 1)
entities = p.get_page(page)
next_page_path = ""
if entities.has_next():
param = {"page": entities.next_page_number()}
url = f'{reverse("oidcfed_advanced_entity_listing")}?{urllib.parse.urlencode(param)}'
next_page_path = f"{url}"
prev_page_path = ""
if entities.has_previous():
param = {"page": entities.previous_page_number()}
url = f'{reverse("oidcfed_advanced_entity_listing")}?{urllib.parse.urlencode(param)}'
prev_page_path = f"{url}"
try:
iss = get_first_self_trust_anchor().sub
except Exception:
return JsonResponse(
{
"error": "Missing trust anchor",
},
status = 404
)
res = {
"iss" : iss,
"iat" : iat_now(),
"entities" : entities_list,
"page" : int(page),
"total_pages" : math.ceil(total_entries / MAX_ENTRIES_PAGE),
"total_entries" : total_entries,
"next_page_path": next_page_path,
"prev_page_path": prev_page_path,
}
return JsonResponse(res, safe=False)
@schema(
methods=['GET'],
get_request_schema = {
"application/x-www-form-urlencoded": ResolveRequest
},
get_response_schema = {
"400": FedAPIErrorResponse,
"404": FedAPIErrorResponse,
"200": ResolveResponse
},
tags = ['Federation API']
)
def resolve_entity_statement(request, format: str = "jose"):
"""
resolves the final metadata of its descendants
In this implementation we only returns a preexisting
Metadata if it's valid
we avoid any possibility to trigger a new Metadata discovery if
"""
if not all((request.GET.get("sub", None), request.GET.get("anchor", None))):
raise Http404("sub and anchor parameters are REQUIRED.")
if request.GET.get("iss"):
iss = get_first_self_trust_anchor(sub=request.GET["iss"])
else:
iss = get_first_self_trust_anchor()
_q = dict(
sub=request.GET["sub"],
trust_anchor__sub=request.GET["anchor"],
is_active=True
)
# gets the cached one
entity = TrustChain.objects.filter(**_q).first()
# only with privileged actors with staff token can triggers a new trust chain
staff_token_head = request.headers.get("Authorization", None)
if staff_token_head:
staff_token = StaffToken.objects.filter(
token = staff_token_head
).first()
if staff_token.is_valid:
try:
# a staff token get a fresh trust chain on each call
entity = get_or_create_trust_chain(
httpc_params=HTTPC_PARAMS,
required_trust_marks = getattr(
settings, "OIDCFED_REQUIRED_TRUST_MARKS", []
),
subject=_q["sub"],
trust_anchor=_q["trust_anchor__sub"],
force = True
)
except Exception as e:
logger.error(
f"Failed privileged Trust Chain creation for {_q['sub']}: {e}"
)
if not entity:
raise Http404("entity not found.")
res = {
"iss": iss.sub,
"sub": request.GET["sub"],
# "aud": [],
"iat": entity.iat_as_timestamp,
"exp": entity.exp_as_timestamp,
"trust_marks": entity.trust_marks,
"metadata": entity.metadata,
"trust_chain": entity.chain
}
if request.GET.get("format") == "json" or format == "json":
return JsonResponse(res, safe=False)
else:
return HttpResponse(
create_jws(res, iss.jwks_fed[0]),
content_type="application/jose",
)
@schema(
methods=['GET'],
get_request_schema = {
"application/x-www-form-urlencoded": TrustMarkRequest
},
get_response_schema = {
"400": FedAPIErrorResponse,
"404": FedAPIErrorResponse,
"200": TrustMarkResponse
},
tags = ['Federation API']
)
def trust_mark_status(request):
failed_data = {"active": False}
if request.GET.get("sub", "") and request.GET.get("id", ""):
sub = request.GET["sub"]
_id = request.GET["id"]
elif request.GET.get("trust_mark", ""):
try:
unpad_jwt_head(request.GET["trust_mark"])
payload = unpad_jwt_payload(request.GET["trust_mark"])
sub = payload.get("sub", "")
_id = payload.get("id", "")
except Exception:
return JsonResponse(failed_data)
else:
return JsonResponse(failed_data)
res = FederationEntityAssignedProfile.objects.filter(
descendant__sub=sub, profile__profile_id=_id, descendant__is_active=True
)
if res:
return JsonResponse({"active": True})
else:
return JsonResponse(failed_data)
|
# coding: utf-8
import pprint
import six
from enum import Enum
class WebAppConfirmationResponse:
swagger_types = {
'access_token': 'str',
'scope': 'str',
'space': 'Space',
'state': 'str',
'token_type': 'str',
}
attribute_map = {
'access_token': 'access_token','scope': 'scope','space': 'space','state': 'state','token_type': 'token_type',
}
_access_token = None
_scope = None
_space = None
_state = None
_token_type = None
def __init__(self, **kwargs):
self.discriminator = None
self.access_token = kwargs.get('access_token', None)
self.scope = kwargs.get('scope', None)
self.space = kwargs.get('space', None)
self.state = kwargs.get('state', None)
self.token_type = kwargs.get('token_type', None)
@property
def access_token(self):
"""Gets the access_token of this WebAppConfirmationResponse.
The access code grants permissions to the web service API according to the OAuth standard.
:return: The access_token of this WebAppConfirmationResponse.
:rtype: str
"""
return self._access_token
@access_token.setter
def access_token(self, access_token):
"""Sets the access_token of this WebAppConfirmationResponse.
The access code grants permissions to the web service API according to the OAuth standard.
:param access_token: The access_token of this WebAppConfirmationResponse.
:type: str
"""
self._access_token = access_token
@property
def scope(self):
"""Gets the scope of this WebAppConfirmationResponse.
The scope contains the permissions granted to the web app within the space.
:return: The scope of this WebAppConfirmationResponse.
:rtype: str
"""
return self._scope
@scope.setter
def scope(self, scope):
"""Sets the scope of this WebAppConfirmationResponse.
The scope contains the permissions granted to the web app within the space.
:param scope: The scope of this WebAppConfirmationResponse.
:type: str
"""
self._scope = scope
@property
def space(self):
"""Gets the space of this WebAppConfirmationResponse.
This is the space into which the web app is installed into.
:return: The space of this WebAppConfirmationResponse.
:rtype: Space
"""
return self._space
@space.setter
def space(self, space):
"""Sets the space of this WebAppConfirmationResponse.
This is the space into which the web app is installed into.
:param space: The space of this WebAppConfirmationResponse.
:type: Space
"""
self._space = space
@property
def state(self):
"""Gets the state of this WebAppConfirmationResponse.
The state contains the state parameter content provided when initiating the app installation.
:return: The state of this WebAppConfirmationResponse.
:rtype: str
"""
return self._state
@state.setter
def state(self, state):
"""Sets the state of this WebAppConfirmationResponse.
The state contains the state parameter content provided when initiating the app installation.
:param state: The state of this WebAppConfirmationResponse.
:type: str
"""
self._state = state
@property
def token_type(self):
"""Gets the token_type of this WebAppConfirmationResponse.
The token type indicates the type of the access token. The type determines the authentication mechanism to use for accessing the web service API.
:return: The token_type of this WebAppConfirmationResponse.
:rtype: str
"""
return self._token_type
@token_type.setter
def token_type(self, token_type):
"""Sets the token_type of this WebAppConfirmationResponse.
The token type indicates the type of the access token. The type determines the authentication mechanism to use for accessing the web service API.
:param token_type: The token_type of this WebAppConfirmationResponse.
:type: str
"""
self._token_type = token_type
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
elif isinstance(value, Enum):
result[attr] = value.value
else:
result[attr] = value
if issubclass(WebAppConfirmationResponse, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, WebAppConfirmationResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
|
import urllib
import urllib.request
import os
import time
def mkdir(path):
path=path.strip()
path=path.rstrip("\\")
isExists=os.path.exists(path)
if not isExists:
os.makedirs(path)
return True
else:
return False
def cbk(a,b,c):
per=100.0*a*b/c
if per>100:
per=100
dir=os.path.abspath('.')
d = ["","a","b","c","d","e","n","l"]
# names = ["kyo","benimaru","daimon","terry","andy","joe","ryo","robert","yuri","leona",
# "ralf","clark","athena","kensou","chin","chizuru","mai","king","kim","chang",
# "choi","yashiro","shermie","chris","yamazaki","mary","billy","iori","orochi",
# "shingo","iori2","orichi-yashiro","orichi-shermie","orichi-chris"]
names = ["benimaru"]
for name in names:
mkdir(os.path.join(dir,name))
for name in names:
for p in d:
for i in range(0,50):
if i==0 :
if p!="" and p!="a":
continue
else:
print("")
file = p+"%02d" % i+".gif"
work_path=os.path.join(dir,name+'/'+file)
if os.path.exists(work_path):
print(name+' '+file +' exists!!')
continue
else:
print(file +' not download!')
img_src = 'http://oss.emugif.com/picture2014/kof97/'+name+'/'+file
try:
urllib.request.urlretrieve(img_src,work_path,cbk)
except urllib.error.HTTPError as e:
print("下载失败: %d url:%s" % (e.code,file))
break
else:
print("下载url:%s成功!" % (file))
finally:
time.sleep(2)
|
import logging
import pytest
from ocs_ci.framework.testlib import tier4
from ocs_ci.ocs import constants
from ocs_ci.utility import prometheus
log = logging.getLogger(__name__)
@tier4
@pytest.mark.polarion_id("OCS-1052")
def test_ceph_manager_stopped(measure_stop_ceph_mgr):
"""
Test that there is appropriate alert when ceph manager
is unavailable and that this alert is cleared when the manager
is back online.
"""
api = prometheus.PrometheusAPI()
# get alerts from time when manager deployment was scaled down
alerts = measure_stop_ceph_mgr.get('prometheus_alerts')
target_label = constants.ALERT_MGRISABSENT
target_msg = 'Storage metrics collector service not available anymore.'
states = ['pending', 'firing']
prometheus.check_alert_list(
label=target_label,
msg=target_msg,
alerts=alerts,
states=states,
severity='critical')
api.check_alert_cleared(
label=target_label,
measure_end_time=measure_stop_ceph_mgr.get('stop')
)
@tier4
@pytest.mark.polarion_id("OCS-904")
def test_ceph_monitor_stopped(measure_stop_ceph_mon):
"""
Test that there is appropriate alert related to ceph monitor quorum
when there is even number of ceph monitors and that this alert
is cleared when monitors are back online.
"""
api = prometheus.PrometheusAPI()
# get alerts from time when manager deployment was scaled down
alerts = measure_stop_ceph_mon.get('prometheus_alerts')
for target_label, target_msg, target_states, target_severity in [
(
constants.ALERT_MONQUORUMATRISK,
'Storage quorum at risk',
['pending'],
'error'
),
(
constants.ALERT_CLUSTERWARNINGSTATE,
'Storage cluster is in degraded state',
['pending', 'firing'],
'warning'
)
]:
prometheus.check_alert_list(
label=target_label,
msg=target_msg,
alerts=alerts,
states=target_states,
severity=target_severity
)
api.check_alert_cleared(
label=target_label,
measure_end_time=measure_stop_ceph_mon.get('stop')
)
@tier4
@pytest.mark.polarion_id("OCS-900")
def test_ceph_osd_stopped(measure_stop_ceph_osd):
"""
Test that there is appropriate alert related to situation when ceph osd
is down. Alert is cleared when osd disk is back online.
"""
api = prometheus.PrometheusAPI()
# get alerts from time when manager deployment was scaled down
alerts = measure_stop_ceph_osd.get('prometheus_alerts')
for target_label, target_msg, target_states, target_severity, ignore in [
(
constants.ALERT_OSDDISKNOTRESPONDING,
'Disk not responding',
['pending', 'firing'],
'error',
False
),
(
constants.ALERT_DATARECOVERYTAKINGTOOLONG,
'Data recovery is slow',
['pending'],
'warning',
True
),
(
constants.ALERT_CLUSTERWARNINGSTATE,
'Storage cluster is in degraded state',
['pending', 'firing'],
'warning',
False
)
]:
prometheus.check_alert_list(
label=target_label,
msg=target_msg,
alerts=alerts,
states=target_states,
severity=target_severity,
ignore_more_occurences=ignore
)
# the time to wait is increased because it takes more time for osd pod
# to be ready than for other pods
osd_up_wait = 360
api.check_alert_cleared(
label=target_label,
measure_end_time=measure_stop_ceph_osd.get('stop'),
time_min=osd_up_wait
)
|
import json
from aliyunsdkecs.request.v20140526 import DescribeInstancesRequest
def test_ali_instances(ali_client):
region_id = ali_client.get_region_id()
assert region_id == 'eu-central-1'
request = DescribeInstancesRequest.DescribeInstancesRequest()
request.set_PageSize(10)
response = ali_client.do_action_with_exception(request)
instances_dict = json.loads(response.decode())
assert len(instances_dict) > 0
if 'Instances' in instances_dict and 'Instance' in instances_dict['Instances'] \
and len(instances_dict['Instances']['Instance']) > 0:
inst = instances_dict['Instances']['Instance'][0]
assert inst
assert inst['RegionId'] == region_id
|
from al_services.alsvc_nsrl.nsrl import NSRL
|
import os
import re
from importlib import import_module
from django.conf import settings as django_settings
from etcd_config.manager import EtcdConfigManager
from etcd_config.utils import attrs_to_dir
from .utils import copy_if_mutable, dict_rec_update, find_project_root
class EtcdSettingsProxy(object):
def __init__(self):
self.env = getattr(django_settings, 'DJES_ENV', None)
dev_params = getattr(django_settings, 'DJES_DEV_PARAMS', None)
etcd_details = getattr(django_settings, 'DJES_ETCD_DETAILS', None)
self._init_req_getter(
getattr(django_settings, 'DJES_REQUEST_GETTER', None))
self._locate_wsgi_file(
getattr(django_settings, 'DJES_WSGI_FILE', None))
if etcd_details is not None:
self._etcd_mgr = EtcdConfigManager(dev_params, **etcd_details)
self._config_sets = self._etcd_mgr.get_config_sets()
self._env_defaults = self._etcd_mgr.get_env_defaults(self.env)
else:
self._etcd_mgr = None
self._config_sets = dict()
self._env_defaults = EtcdConfigManager.get_dev_params(dev_params)
def _locate_wsgi_file(self, wsgi_file):
if wsgi_file is None:
self._wsgi_file = None
elif wsgi_file.startswith(os.path.sep):
self._wsgi_file = wsgi_file
else:
self._wsgi_file = os.path.join(
find_project_root('manage.py'),
wsgi_file)
def _init_req_getter(self, s):
if s is not None:
r = re.compile('(?P<module>.*)\.(?P<f>[\w_]+)')
m = re.match(r, s)
mod_s = m.group('module')
fun_s = m.group('f')
mod = import_module(mod_s)
self._req_getter = getattr(mod, fun_s)
else:
self._req_getter = None
def _parse_req_config_sets(self):
sets = []
if self._req_getter is not None:
request = self._req_getter()
if request and getattr(request, "META", None):
sets = request.META.get('HTTP_X_DYNAMIC_SETTING', '').split()
return sets
def start_monitors(self):
if self._etcd_mgr is not None:
self._etcd_mgr.monitor_env_defaults(
env=self.env, conf=self._env_defaults,
wsgi_file=self._wsgi_file)
self._etcd_mgr.monitor_config_sets(conf=self._config_sets)
def __getattr__(self, attr):
try:
dj_value = getattr(django_settings, attr)
dj_value_exists = True
except AttributeError:
dj_value_exists = False
dj_value = None
try:
value = self._env_defaults[attr]
value_exists = True
except KeyError:
value_exists = dj_value_exists
value = dj_value
for override_set in self._parse_req_config_sets():
config_set = self._config_sets.get(override_set, {})
if attr in config_set:
new_value = config_set[attr]
value = copy_if_mutable(value)
if isinstance(value, dict) and isinstance(new_value, dict):
dict_rec_update(value, new_value)
else:
value = new_value
if value or value_exists:
return value
else:
raise AttributeError(attr)
def as_dict(self):
items = attrs_to_dir(django_settings)
items.update(self._env_defaults)
return items
proxy = EtcdSettingsProxy()
|
import unittest
from tools.transcripts import Transcript, GenePredTranscript
class PositiveStrandTranscriptTests(unittest.TestCase):
"""
Tests the Transcript functionality part of sequence_lib.
Tests the example positive strand BED record drawn out below:
chrom 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
seq G T A T T C T T G G A C C T A A
tx - - a t T C - T G G - - C t a -
tx.pos 0 1 2 3 4 5 6 7 8 9
cds.pos 0 1 2 3 4 5
"""
def setUp(self):
self.tokens = ['chr1', '2', '15', 'test', '0', '+', '4', '13', '0,128,0', '3', '4,3,3', '0,5,10']
self.t = Transcript(self.tokens)
self.transcript_seq = 'ATTCTGGCTA'
self.cds_seq = 'TCTGGC'
self.amino_acid = 'SG'
self.chrom_seq = {'chr1': 'GTATTCTTGGACCTAA'}
def test_sizes(self):
"""
Make sure sizes are correct
"""
self.assertEqual(len(self.t), len(self.transcript_seq))
self.assertEqual(len(self.t.get_cds(self.chrom_seq)), len(self.cds_seq))
self.assertEqual(len(self.t.get_protein_sequence(self.chrom_seq)), len(self.amino_acid))
def test_chromosome_invalid_coordinates(self):
"""
chromosome coordinate translations should return None if the coordinate is invalid
in other spaces
"""
for i in (-10, -1, 0, 1, 6, 10, 11, 16, 100):
self.assertIsNone(self.t.chromosome_coordinate_to_mrna(i))
for i in (-10, -1, 0, 1, 2, 3, 6, 10, 11, 15, 100):
self.assertIsNone(self.t.chromosome_coordinate_to_cds(i))
def test_mrna_invalid_coordinates(self):
"""
mrna coordinate translation should return None if the coordinate is invalid
in other spaces
"""
for i in (-10, -1, 16, 100):
self.assertIsNone(self.t.mrna_coordinate_to_chromosome(i))
for i in (-10, -1, 0, 1, 9, 10, 100):
self.assertIsNone(self.t.mrna_coordinate_to_cds(i))
def test_cds_invalid_coordinates(self):
"""
CDS coordinate translations should return None if the coordinate is invalid
in other spaces
"""
for i in (-10, -1, 6, 100):
self.assertIsNone(self.t.cds_coordinate_to_chromosome(i))
self.assertIsNone(self.t.cds_coordinate_to_mrna(i))
def test_chromosome_coordinate_translations(self):
"""
Check all possible chromosome translations for correct result
"""
cds_result = [None, None, None, None, 0, 1, None, 2, 3, 4, None, None, 5, None, None, None]
mrna_result = [None, None, 0, 1, 2, 3, None, 4, 5, 6, None, None, 7, 8, 9, None]
for i in xrange(16):
self.assertEqual(self.t.chromosome_coordinate_to_cds(i), cds_result[i])
self.assertEqual(self.t.chromosome_coordinate_to_mrna(i), mrna_result[i])
def test_mrna_coordinate_translations(self):
"""
Check all possible mrna translations for correct result
"""
chrom_result = [2, 3, 4, 5, 7, 8, 9, 12, 13, 14, None]
cds_result = [None, None, 0, 1, 2, 3, 4, 5, None, None, None]
for i in xrange(11):
self.assertEqual(self.t.mrna_coordinate_to_chromosome(i), chrom_result[i])
self.assertEqual(self.t.mrna_coordinate_to_cds(i), cds_result[i])
def test_cds_coordinate_translations(self):
"""
Check all possible mrna translations for correct result
"""
chrom_result = [4, 5, 7, 8, 9, 12]
mrna_result = [2, 3, 4, 5, 6, 7]
for i in xrange(6):
self.assertEqual(self.t.cds_coordinate_to_chromosome(i), chrom_result[i])
self.assertEqual(self.t.cds_coordinate_to_mrna(i), mrna_result[i])
def test_reciprocal_translations(self):
"""
Test reciprocal translations between coordinate spaces
"""
for i in xrange(16):
tmp = self.t.chromosome_coordinate_to_mrna(i)
# can't have reciprocal connection once None appears
if tmp is not None:
self.assertEqual(self.t.mrna_coordinate_to_chromosome(tmp), i)
tmp = self.t.chromosome_coordinate_to_cds(i)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_chromosome(tmp), i)
tmp = self.t.mrna_coordinate_to_chromosome(i)
if tmp is not None:
self.assertEqual(self.t.chromosome_coordinate_to_mrna(tmp), i)
tmp = self.t.mrna_coordinate_to_cds(i)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_mrna(tmp), i)
tmp = self.t.cds_coordinate_to_mrna(i)
if tmp is not None:
self.assertEqual(self.t.mrna_coordinate_to_cds(tmp), i)
tmp = self.t.chromosome_coordinate_to_mrna(i)
if tmp is not None:
tmp = self.t.mrna_coordinate_to_cds(tmp)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_chromosome(tmp), i)
tmp = self.t.cds_coordinate_to_chromosome(i)
if tmp is not None:
tmp = self.t.chromosome_coordinate_to_mrna(tmp)
self.assertEqual(self.t.mrna_coordinate_to_cds(tmp), i)
tmp = self.t.cds_coordinate_to_chromosome(self.t.mrna_coordinate_to_cds(i))
if tmp is not None:
self.assertEqual(self.t.chromosome_coordinate_to_mrna(tmp), i)
def test_sequences(self):
"""
Tests that the proper sequences are created from the intervals
"""
self.assertEqual(self.t.get_mrna(self.chrom_seq), self.transcript_seq)
self.assertEqual(self.t.get_cds(self.chrom_seq), self.cds_seq)
self.assertEqual(self.t.get_protein_sequence(self.chrom_seq), self.amino_acid)
class NegativeStrandTranscriptTests(unittest.TestCase):
"""
Tests the Transcript functionality of sequence_lib.
Tests the example negative strand BED record drawn out below:
chrom 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
seq G T A T T C T T G G A C C T A A
tx - - a t T C - T G G - - C t a -
tx.pos 9 8 7 6 5 4 3 2 1 0
cds.pos 5 4 3 2 1 0
"""
def setUp(self):
self.t = Transcript(['chr1', '2', '15', 'A', '0', '-', '4', '13', '0,128,0', '3', '4,3,3', '0,5,10'])
self.transcript_seq = 'TAGCCAGAAT'
self.cds_seq = 'GCCAGA'
self.amino_acid = 'AR'
self.chrom_seq = {'chr1': 'GTATTCTTGGACCTAA'}
def test_chromosome_invalid_coordinates(self):
"""
chromosome coordinate translations should return None if the coordinate is invalid
in other spaces
"""
for i in (-10, -1, 0, 1, 6, 10, 11, 15, 16, 100):
self.assertIsNone(self.t.chromosome_coordinate_to_mrna(i))
for i in (-10, -1, 0, 1, 2, 3, 6, 10, 11, 13, 14, 15, 100):
self.assertIsNone(self.t.chromosome_coordinate_to_cds(i))
def test_mrna_invalid_coordinates(self):
"""
mrna coordinate translation should return None if the coordinate is invalid
in other spaces
"""
for i in (-10, -1, 16, 100):
self.assertIsNone(self.t.mrna_coordinate_to_chromosome(i))
for i in (-10, -1, 0, 1, 8, 9, 10, 100):
self.assertIsNone(self.t.mrna_coordinate_to_cds(i))
def test_cds_invalid_coordinates(self):
"""
CDS coordinate translations should return None if the coordinate is invalid
in other spaces
"""
for i in (-10, -1, 6, 100):
self.assertIsNone(self.t.cds_coordinate_to_chromosome(i))
self.assertIsNone(self.t.cds_coordinate_to_mrna(i))
def test_chromosome_coordinate_translations(self):
"""
Check all possible chromosome translations for correct result
"""
cds_result = [None, None, None, None, 5, 4, None, 3, 2, 1, None, None, 0, None, None, None]
mrna_result = [None, None, 9, 8, 7, 6, None, 5, 4, 3, None, None, 2, 1, 0, None]
for i in xrange(16):
self.assertEqual(self.t.chromosome_coordinate_to_cds(i), cds_result[i])
self.assertEqual(self.t.chromosome_coordinate_to_mrna(i), mrna_result[i])
def test_mrna_coordinate_translations(self):
"""
Check all possible mrna translations for correct result
"""
chrom_result = [14, 13, 12, 9, 8, 7, 5, 4, 3, 2, None]
cds_result = [None, None, 0, 1, 2, 3, 4, 5, None, None, None]
for i in xrange(11):
self.assertEqual(self.t.mrna_coordinate_to_chromosome(i), chrom_result[i])
self.assertEqual(self.t.mrna_coordinate_to_cds(i), cds_result[i])
def test_cds_coordinate_translations(self):
"""
Check all possible mrna translations for correct result
"""
chrom_result = [12, 9, 8, 7, 5, 4]
mrna_result = [2, 3, 4, 5, 6, 7]
for i in xrange(6):
self.assertEqual(self.t.cds_coordinate_to_chromosome(i), chrom_result[i])
self.assertEqual(self.t.cds_coordinate_to_mrna(i), mrna_result[i])
def test_reciprocal_translations(self):
"""
Test reciprocal translations between coordinate spaces
"""
for i in xrange(16):
tmp = self.t.chromosome_coordinate_to_mrna(i)
# can't have reciprocal connection once None appears
if tmp is not None:
self.assertEqual(self.t.mrna_coordinate_to_chromosome(tmp), i)
tmp = self.t.chromosome_coordinate_to_cds(i)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_chromosome(tmp), i)
tmp = self.t.mrna_coordinate_to_chromosome(i)
if tmp is not None:
self.assertEqual(self.t.chromosome_coordinate_to_mrna(tmp), i)
tmp = self.t.mrna_coordinate_to_cds(i)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_mrna(tmp), i)
tmp = self.t.cds_coordinate_to_mrna(i)
if tmp is not None:
self.assertEqual(self.t.mrna_coordinate_to_cds(tmp), i)
tmp = self.t.chromosome_coordinate_to_mrna(i)
if tmp is not None:
tmp = self.t.mrna_coordinate_to_cds(tmp)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_chromosome(tmp), i)
tmp = self.t.cds_coordinate_to_chromosome(i)
if tmp is not None:
tmp = self.t.chromosome_coordinate_to_mrna(tmp)
self.assertEqual(self.t.mrna_coordinate_to_cds(tmp), i)
tmp = self.t.cds_coordinate_to_chromosome(self.t.mrna_coordinate_to_cds(i))
if tmp is not None:
self.assertEqual(self.t.chromosome_coordinate_to_mrna(tmp), i)
class ComplicatedTranscript1(unittest.TestCase):
"""
Tests the Transcript functionality part of sequence_lib.
Tests the example complicated mrna below:
chrom 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
seq G T A T T C T T G G A C C T A A G C C T G
tx - t a t - - t t G G - - - T A A - c c t -
tx.pos 0 1 2 3 4 5 6 7 8 9 10 11 12
cds.pos 0 1 2 3 4
"""
def setUp(self):
self.tokens = ['chr1', '1', '20', 'A', '0', '+', '8', '16', '0,128,0', '4', '3,4,3,3', '0,5,12,16']
self.t = Transcript(self.tokens)
self.transcript_seq = 'TATTTGGTAACCT'
self.cds_seq = 'GGTAA'
self.amino_acid = 'G'
self.chrom_seq = {'chr1':'GTATTCTTGGACCTAAGCCTG'}
def test_sizes(self):
"""
Make sure sizes are correct
"""
self.assertEqual(len(self.t), len(self.transcript_seq))
self.assertEqual(len(self.t.get_cds(self.chrom_seq)), len(self.cds_seq))
self.assertEqual(len(self.t.get_protein_sequence(self.chrom_seq)), len(self.amino_acid))
def test_chromosome_coordinate_translations(self):
"""
Check all possible chromosome translations for correct result
"""
cds_result = [None, None, None, None, None, None, None, None, 0, 1, None, None, None, 2, 3, 4, None, None, None, None, None]
mrna_result = [None, 0, 1, 2, None, None, 3, 4, 5, 6, None, None, None, 7, 8, 9, None, 10, 11, 12, None]
for i in xrange(21):
self.assertEqual(self.t.chromosome_coordinate_to_cds(i), cds_result[i])
self.assertEqual(self.t.chromosome_coordinate_to_mrna(i), mrna_result[i])
def test_mrna_coordinate_translations(self):
"""
Check all possible mrna translations for correct result
"""
chrom_result = [1, 2, 3, 6, 7, 8, 9, 13, 14, 15, 17, 18, 19]
cds_result = [None, None, None, None, None, 0, 1, 2, 3, 4, None, None, None]
for i in xrange(13):
self.assertEqual(self.t.mrna_coordinate_to_chromosome(i), chrom_result[i])
self.assertEqual(self.t.mrna_coordinate_to_cds(i), cds_result[i])
def test_cds_coordinate_translations(self):
"""
Check all possible mrna translations for correct result
"""
chrom_result = [8, 9, 13, 14, 15]
mrna_result = [5, 6, 7, 8, 9]
for i in xrange(5):
self.assertEqual(self.t.cds_coordinate_to_chromosome(i), chrom_result[i])
self.assertEqual(self.t.cds_coordinate_to_mrna(i), mrna_result[i])
def test_reciprocal_translations(self):
"""
Test reciprocal translations between coordinate spaces
"""
for i in xrange(-1, 12):
tmp = self.t.chromosome_coordinate_to_mrna(i)
# can't have reciprocal connection once None appears
if tmp is not None:
self.assertEqual(self.t.mrna_coordinate_to_chromosome(tmp), i)
tmp = self.t.chromosome_coordinate_to_cds(i)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_chromosome(tmp), i)
tmp = self.t.mrna_coordinate_to_chromosome(i)
if tmp is not None:
self.assertEqual(self.t.chromosome_coordinate_to_mrna(tmp), i)
tmp = self.t.mrna_coordinate_to_cds(i)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_mrna(tmp), i)
tmp = self.t.cds_coordinate_to_mrna(i)
if tmp is not None:
self.assertEqual(self.t.mrna_coordinate_to_cds(tmp), i)
tmp = self.t.chromosome_coordinate_to_mrna(i)
if tmp is not None:
tmp = self.t.mrna_coordinate_to_cds(tmp)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_chromosome(tmp), i)
tmp = self.t.cds_coordinate_to_chromosome(i)
if tmp is not None:
tmp = self.t.chromosome_coordinate_to_mrna(tmp)
self.assertEqual(self.t.mrna_coordinate_to_cds(tmp), i)
tmp = self.t.cds_coordinate_to_chromosome(self.t.mrna_coordinate_to_cds(i))
if tmp is not None:
self.assertEqual(self.t.chromosome_coordinate_to_mrna(tmp), i)
def test_sequences(self):
"""
Tests that the proper sequences are created from the intervals
"""
self.assertEqual(self.t.get_mrna(self.chrom_seq), self.transcript_seq)
self.assertEqual(self.t.get_cds(self.chrom_seq), self.cds_seq)
self.assertEqual(self.t.get_protein_sequence(self.chrom_seq), self.amino_acid)
def test_get_bed(self):
self.assertEqual(self.t.get_bed(), self.tokens)
self.assertEqual(self.t.get_bed(new_start=1, new_stop=12),
['chr1', '1', '10', 'A', '0', '+', '8', '10', '0,128,0', '2', '3,4', '0,5'])
self.assertEqual(self.t.get_bed(new_start=19, new_stop=19),
['chr1', '19', '19', 'A', '0', '+', '0', '0', '0,128,0', '1', '0', '0'])
self.assertEqual(self.t.get_bed(new_start=1, new_stop=4),
['chr1', '1', '4', 'A', '0', '+', '0', '0', '0,128,0', '1', '3', '0'])
class ComplicatedTranscript2(unittest.TestCase):
"""
Tests the Transcript functionality part of sequence_lib.
Tests the example negative strand complicated mrna below:
chrom 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20
seq G T A T T C T T G G A C C T A A G C C T G
tx - t a t - - t T G G - - - T A A - c c t -
tx.pos 12 11 10 9 8 7 6 5 4 3 2 1 0
cds.pos 5 4 3 2 1 0
"""
def setUp(self):
self.tokens = ['chr1', '1', '20', 'A', '0', '-', '7', '16', '0,128,0', '4', '3,4,3,3', '0,5,12,16']
self.t = Transcript(self.tokens)
self.transcript_seq = 'AGGTTACCAAATA'
self.cds_seq = 'TTACCA'
self.amino_acid = 'LP'
self.chrom_seq = {'chr1': 'GTATTCTTGGACCTAAGCCTG'}
def test_sizes(self):
"""
Make sure sizes are correct
"""
self.assertEqual(len(self.t), len(self.transcript_seq))
self.assertEqual(len(self.t.get_cds(self.chrom_seq)), len(self.cds_seq))
self.assertEqual(len(self.t.get_protein_sequence(self.chrom_seq)), len(self.amino_acid))
self.assertEqual(len(self.t.get_cds(self.chrom_seq)), self.t.cds_size)
def test_chromosome_coordinate_translations(self):
"""
Check all possible chromosome translations for correct result
"""
cds_result = [None, None, None, None, None, None, None, 5, 4, 3, None, None, None, 2, 1, 0, None, None, None, None, None]
mrna_result = [None, 12, 11, 10, None, None, 9, 8, 7, 6, None, None, None, 5, 4, 3, None, 2, 1, 0, None]
for i in xrange(21):
self.assertEqual(self.t.chromosome_coordinate_to_cds(i), cds_result[i])
self.assertEqual(self.t.chromosome_coordinate_to_mrna(i), mrna_result[i])
def test_mrna_coordinate_translations(self):
"""
Check all possible mrna translations for correct result
"""
chrom_result = [19, 18, 17, 15, 14, 13, 9, 8, 7, 6, 3, 2, 1]
cds_result = [None, None, None, 0, 1, 2, 3, 4, 5, None, None, None, None]
for i in xrange(13):
self.assertEqual(self.t.mrna_coordinate_to_chromosome(i), chrom_result[i])
self.assertEqual(self.t.mrna_coordinate_to_cds(i), cds_result[i])
def test_cds_coordinate_translations(self):
"""
Check all possible mrna translations for correct result
"""
chrom_result = [15, 14, 13, 9, 8, 7]
mrna_result = [3, 4, 5, 6, 7, 8]
for i in xrange(5):
self.assertEqual(self.t.cds_coordinate_to_chromosome(i), chrom_result[i])
self.assertEqual(self.t.cds_coordinate_to_mrna(i), mrna_result[i])
def test_reciprocal_translations(self):
"""
Test reciprocal translations between coordinate spaces
"""
for i in xrange(-1, 12):
tmp = self.t.chromosome_coordinate_to_mrna(i)
# can't have reciprocal connection once None appears
if tmp is not None:
self.assertEqual(self.t.mrna_coordinate_to_chromosome(tmp), i)
tmp = self.t.chromosome_coordinate_to_cds(i)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_chromosome(tmp), i)
tmp = self.t.mrna_coordinate_to_chromosome(i)
if tmp is not None:
self.assertEqual(self.t.chromosome_coordinate_to_mrna(tmp), i)
tmp = self.t.mrna_coordinate_to_cds(i)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_mrna(tmp), i)
tmp = self.t.cds_coordinate_to_mrna(i)
if tmp is not None:
self.assertEqual(self.t.mrna_coordinate_to_cds(tmp), i)
tmp = self.t.chromosome_coordinate_to_mrna(i)
if tmp is not None:
tmp = self.t.mrna_coordinate_to_cds(tmp)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_chromosome(tmp), i)
tmp = self.t.cds_coordinate_to_chromosome(i)
if tmp is not None:
tmp = self.t.chromosome_coordinate_to_mrna(tmp)
self.assertEqual(self.t.mrna_coordinate_to_cds(tmp), i)
tmp = self.t.cds_coordinate_to_chromosome(self.t.mrna_coordinate_to_cds(i))
if tmp is not None:
self.assertEqual(self.t.chromosome_coordinate_to_mrna(tmp), i)
def test_sequences(self):
"""
Tests that the proper sequences are created from the intervals
"""
self.assertEqual(self.t.get_mrna(self.chrom_seq), self.transcript_seq)
self.assertEqual(self.t.get_cds(self.chrom_seq), self.cds_seq)
self.assertEqual(self.t.get_protein_sequence(self.chrom_seq), self.amino_acid)
def test_get_bed(self):
self.assertEqual(self.t.get_bed(), self.tokens)
self.assertEqual(self.t.get_bed(new_start=4),
['chr1', '6', '20', 'A', '0', '-', '7', '16', '0,128,0', '3', '4,3,3', '0,7,11'])
self.assertEqual(self.t.get_bed(new_start=17),
['chr1', '17', '20', 'A', '0', '-', '0', '0', '0,128,0', '1', '3', '0'])
self.assertEqual(self.t.get_bed(new_start=10, new_stop=17),
['chr1', '13', '16', 'A', '0', '-', '13', '16', '0,128,0', '1', '3', '0'])
class SingleExonTranscript1(unittest.TestCase):
"""
Tests the Transcript functionality part of sequence_lib.
Tests the example single exon mrna below:
chrom 0 1 2 3 4 5
seq G T A T T C
tx g T A T t c
tx.pos 0 1 2 3 4 5
cds.pos 0 1 2
"""
def setUp(self):
self.t = Transcript(['chr1', '0', '6', 'A', '0', '+', '1', '4', '0,128,0', '1', '6', '0'])
self.transcript_seq = 'GTATTC'
self.cds_seq = 'TAT'
self.amino_acid = 'Y'
self.chrom_seq = {'chr1': 'GTATTCTTGGACCTAA'}
def test_sizes(self):
"""
Make sure sizes are correct
"""
self.assertEqual(len(self.t), len(self.transcript_seq))
self.assertEqual(len(self.t.get_cds(self.chrom_seq)), len(self.cds_seq))
self.assertEqual(len(self.t.get_protein_sequence(self.chrom_seq)), len(self.amino_acid))
def test_mrna_invalid_coordinates(self):
"""
mrna coordinate translation should return None if the coordinate is invalid
in other spaces
"""
for i in (-10, -1, 6, 100):
self.assertIsNone(self.t.mrna_coordinate_to_chromosome(i))
for i in (-10, -1, 0, 4, 5, 9, 10, 100):
self.assertIsNone(self.t.mrna_coordinate_to_cds(i))
def test_cds_invalid_coordinates(self):
"""
CDS coordinate translations should return None if the coordinate is invalid
in other spaces
"""
for i in (-10, -1, 4, 100):
self.assertIsNone(self.t.cds_coordinate_to_chromosome(i))
self.assertIsNone(self.t.cds_coordinate_to_mrna(i))
def test_chromosome_coordinate_translations(self):
"""
Check all possible chromosome translations for correct result
"""
cds_result = [None, 0, 1, 2, None, None, None]
mrna_result = [0, 1, 2, 3, 4, 5, None]
for i in xrange(6):
self.assertEqual(self.t.chromosome_coordinate_to_cds(i), cds_result[i])
self.assertEqual(self.t.chromosome_coordinate_to_mrna(i), mrna_result[i])
def test_mrna_coordinate_translations(self):
"""
Check all possible mrna translations for correct result
"""
chrom_result = [0, 1, 2, 3, 4, 5, None]
cds_result = [None, 0, 1, 2, None, None, None]
for i in xrange(6):
self.assertEqual(self.t.mrna_coordinate_to_chromosome(i), chrom_result[i])
self.assertEqual(self.t.mrna_coordinate_to_cds(i), cds_result[i])
def test_cds_coordinate_translations(self):
"""
Check all possible mrna translations for correct result
"""
chrom_result = [1, 2, 3, None]
mrna_result = [1, 2, 3, None]
for i in xrange(4):
self.assertEqual(self.t.cds_coordinate_to_chromosome(i), chrom_result[i])
self.assertEqual(self.t.cds_coordinate_to_mrna(i), mrna_result[i])
def test_reciprocal_translations(self):
"""
Test reciprocal translations between coordinate spaces
"""
for i in xrange(-1, 7):
tmp = self.t.chromosome_coordinate_to_mrna(i)
# can't have reciprocal connection once None appears
if tmp is not None:
self.assertEqual(self.t.mrna_coordinate_to_chromosome(tmp), i)
tmp = self.t.chromosome_coordinate_to_cds(i)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_chromosome(tmp), i)
tmp = self.t.mrna_coordinate_to_chromosome(i)
if tmp is not None:
self.assertEqual(self.t.chromosome_coordinate_to_mrna(tmp), i)
tmp = self.t.mrna_coordinate_to_cds(i)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_mrna(tmp), i)
tmp = self.t.cds_coordinate_to_mrna(i)
if tmp is not None:
self.assertEqual(self.t.mrna_coordinate_to_cds(tmp), i)
tmp = self.t.chromosome_coordinate_to_mrna(i)
if tmp is not None:
tmp = self.t.mrna_coordinate_to_cds(tmp)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_chromosome(tmp), i)
tmp = self.t.cds_coordinate_to_chromosome(i)
if tmp is not None:
tmp = self.t.chromosome_coordinate_to_mrna(tmp)
self.assertEqual(self.t.mrna_coordinate_to_cds(tmp), i)
tmp = self.t.cds_coordinate_to_chromosome(self.t.mrna_coordinate_to_cds(i))
if tmp is not None:
self.assertEqual(self.t.chromosome_coordinate_to_mrna(tmp), i)
def test_sequences(self):
"""
Tests that the proper sequences are created from the intervals
"""
self.assertEqual(self.t.get_mrna(self.chrom_seq), self.transcript_seq)
self.assertEqual(self.t.get_cds(self.chrom_seq), self.cds_seq)
self.assertEqual(self.t.get_protein_sequence(self.chrom_seq), self.amino_acid)
class SingleExonTranscript2(unittest.TestCase):
"""
Tests the Transcript functionality part of sequence_lib.
Tests the example single exon mrna below:
chrom 0 1 2 3 4 5
seq G T A T T C
tx G T A T T C
tx.pos 0 1 2 3 4 5
cds.pos 0 1 2 3 4 5
"""
def setUp(self):
self.t = Transcript(['chr1', '0', '6', 'A', '0', '+', '0', '6', '0,128,0', '1', '6', '0'])
self.transcript_seq = 'GTATTC'
self.cds_seq = self.transcript_seq
self.amino_acid = 'VF'
self.chrom_seq = {'chr1': 'GTATTCTTGGACCTAA'}
def test_sizes(self):
"""
Make sure sizes are correct
"""
self.assertEqual(len(self.t), len(self.transcript_seq))
self.assertEqual(len(self.t.get_cds(self.chrom_seq)), len(self.cds_seq))
self.assertEqual(len(self.t.get_protein_sequence(self.chrom_seq)), len(self.amino_acid))
def test_chromosome_coordinate_translations(self):
"""
Check all possible chromosome translations for correct result
"""
cds_result = mrna_result = [0, 1, 2, 3, 4, 5, None]
for i in xrange(6):
self.assertEqual(self.t.chromosome_coordinate_to_cds(i), cds_result[i])
self.assertEqual(self.t.chromosome_coordinate_to_mrna(i), mrna_result[i])
def test_mrna_coordinate_translations(self):
"""
Check all possible mrna translations for correct result
"""
chrom_result = cds_result = [0, 1, 2, 3, 4, 5, None]
for i in xrange(6):
self.assertEqual(self.t.mrna_coordinate_to_chromosome(i), chrom_result[i])
self.assertEqual(self.t.mrna_coordinate_to_cds(i), cds_result[i])
def test_cds_coordinate_translations(self):
"""
Check all possible mrna translations for correct result
"""
chrom_result = mrna_result = [0, 1, 2, 3, 4, 5, None]
for i in xrange(6):
self.assertEqual(self.t.cds_coordinate_to_chromosome(i), chrom_result[i])
self.assertEqual(self.t.cds_coordinate_to_mrna(i), mrna_result[i])
def test_reciprocal_translations(self):
"""
Test reciprocal translations between coordinate spaces
"""
for i in xrange(-1, 7):
tmp = self.t.chromosome_coordinate_to_mrna(i)
# can't have reciprocal connection once None appears
if tmp is not None:
self.assertEqual(self.t.mrna_coordinate_to_chromosome(tmp), i)
tmp = self.t.chromosome_coordinate_to_cds(i)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_chromosome(tmp), i)
tmp = self.t.mrna_coordinate_to_chromosome(i)
if tmp is not None:
self.assertEqual(self.t.chromosome_coordinate_to_mrna(tmp), i)
tmp = self.t.mrna_coordinate_to_cds(i)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_mrna(tmp), i)
tmp = self.t.cds_coordinate_to_mrna(i)
if tmp is not None:
self.assertEqual(self.t.mrna_coordinate_to_cds(tmp), i)
tmp = self.t.chromosome_coordinate_to_mrna(i)
if tmp is not None:
tmp = self.t.mrna_coordinate_to_cds(tmp)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_chromosome(tmp), i)
tmp = self.t.cds_coordinate_to_chromosome(i)
if tmp is not None:
tmp = self.t.chromosome_coordinate_to_mrna(tmp)
self.assertEqual(self.t.mrna_coordinate_to_cds(tmp), i)
tmp = self.t.cds_coordinate_to_chromosome(self.t.mrna_coordinate_to_cds(i))
if tmp is not None:
self.assertEqual(self.t.chromosome_coordinate_to_mrna(tmp), i)
def test_sequences(self):
"""
Tests that the proper sequences are created from the intervals
"""
self.assertEqual(self.t.get_mrna(self.chrom_seq), self.transcript_seq)
self.assertEqual(self.t.get_cds(self.chrom_seq), self.cds_seq)
self.assertEqual(self.t.get_protein_sequence(self.chrom_seq), self.amino_acid)
class NoncodingTranscript(unittest.TestCase):
"""
Tests the Transcript functionality part of sequence_lib.
Tests the example non-coding spliced mrna below:
chrom 0 1 2 3 4 5 6 7 8 9 10
seq G T A T T C T T G G A
tx g t a t - - t - g g a
tx.pos 0 1 2 3 4 5 6 7
"""
def setUp(self):
self.t = Transcript(['chr1', '0', '11', 'A', '0', '+', '0', '0', '0,128,0', '3', '4,1,3', '0,6,8'])
self.transcript_seq = 'GTATTGGA'
self.cds_seq = ''
self.amino_acid = ''
self.chrom_seq = {'chr1': 'GTATTCTTGGACCTAA'}
def test_sizes(self):
"""
Make sure sizes are correct
"""
self.assertEqual(len(self.t), len(self.transcript_seq))
self.assertEqual(len(self.t.get_cds(self.chrom_seq)), len(self.cds_seq))
self.assertEqual(len(self.t.get_protein_sequence(self.chrom_seq)), len(self.amino_acid))
def test_chromosome_coordinate_translations(self):
"""
Check all possible chromosome translations for correct result
"""
cds_result = [None] * 12
mrna_result = [0, 1, 2, 3, None, None, 4, None, 5, 6, 7, None]
for i in xrange(12):
self.assertEqual(self.t.chromosome_coordinate_to_cds(i), cds_result[i])
self.assertEqual(self.t.chromosome_coordinate_to_mrna(i), mrna_result[i])
def test_mrna_coordinate_translations(self):
"""
Check all possible mrna translations for correct result
"""
chrom_result = [0, 1, 2, 3, 6, 8, 9, 10, None]
cds_result = [None] * 9
for i in xrange(9):
self.assertEqual(self.t.mrna_coordinate_to_chromosome(i), chrom_result[i])
self.assertEqual(self.t.mrna_coordinate_to_cds(i), cds_result[i])
def test_cds_coordinate_translations(self):
"""
Check all possible mrna translations for correct result
"""
chrom_result = mrna_result = [None] * 10
for i in xrange(10):
self.assertEqual(self.t.cds_coordinate_to_chromosome(i), chrom_result[i])
self.assertEqual(self.t.cds_coordinate_to_mrna(i), mrna_result[i])
def test_reciprocal_translations(self):
"""
Test reciprocal translations between coordinate spaces
"""
for i in xrange(-1, 12):
tmp = self.t.chromosome_coordinate_to_mrna(i)
# can't have reciprocal connection once None appears
if tmp is not None:
self.assertEqual(self.t.mrna_coordinate_to_chromosome(tmp), i)
tmp = self.t.chromosome_coordinate_to_cds(i)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_chromosome(tmp), i)
tmp = self.t.mrna_coordinate_to_chromosome(i)
if tmp is not None:
self.assertEqual(self.t.chromosome_coordinate_to_mrna(tmp), i)
tmp = self.t.mrna_coordinate_to_cds(i)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_mrna(tmp), i)
tmp = self.t.cds_coordinate_to_mrna(i)
if tmp is not None:
self.assertEqual(self.t.mrna_coordinate_to_cds(tmp), i)
tmp = self.t.chromosome_coordinate_to_mrna(i)
if tmp is not None:
tmp = self.t.mrna_coordinate_to_cds(tmp)
if tmp is not None:
self.assertEqual(self.t.cds_coordinate_to_chromosome(tmp), i)
tmp = self.t.cds_coordinate_to_chromosome(i)
if tmp is not None:
tmp = self.t.chromosome_coordinate_to_mrna(tmp)
self.assertEqual(self.t.mrna_coordinate_to_cds(tmp), i)
tmp = self.t.cds_coordinate_to_chromosome(self.t.mrna_coordinate_to_cds(i))
if tmp is not None:
self.assertEqual(self.t.chromosome_coordinate_to_mrna(tmp), i)
def test_sequences(self):
"""
Tests that the proper sequences are created from the intervals
"""
self.assertEqual(self.t.get_mrna(self.chrom_seq), self.transcript_seq)
self.assertEqual(self.t.get_cds(self.chrom_seq), self.cds_seq)
self.assertEqual(self.t.get_protein_sequence(self.chrom_seq), self.amino_acid)
class PositiveStrandGenePredTranscript(PositiveStrandTranscriptTests):
"""
Tests the Transcript functionality part of sequence_lib.
Tests the example positive strand BED record drawn out below:
chrom 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
seq G T A T T C T T G G A C C T A A
tx - - a t T C - T G G - - C t a -
tx.pos 0 1 2 3 4 5 6 7 8 9
cds.pos 0 1 2 3 4 5
"""
def setUp(self):
self.tokens = ['A', 'chr1', '+', '2', '15', '4', '13', '3', '2,7,12', '6,10,15', '1',
'q2', 'cmpl', 'cmpl', '2,1,1']
self.t = GenePredTranscript(self.tokens)
self.transcript_seq = 'ATTCTGGCTA'
self.cds_seq = 'TCTGGC'
self.amino_acid = 'L' # this transcript has a offset of 2, so the first in-frame codon is TGG
self.chrom_seq = {'chr1': 'GTATTCTTGGACCTAA'}
def test_sequences(self):
"""
Tests that the proper sequences are created from the intervals
"""
self.assertEqual(self.t.get_mrna(self.chrom_seq), self.transcript_seq)
self.assertEqual(self.t.get_cds(self.chrom_seq), self.cds_seq)
self.assertEqual(self.t.get_protein_sequence(self.chrom_seq), self.amino_acid)
def test_get_gp(self):
self.assertEqual(self.t.get_gene_pred(), self.tokens)
class NegativeStrandGenePredTranscript(NegativeStrandTranscriptTests):
"""
Tests the Transcript functionality part of sequence_lib.
Tests the example positive strand BED record drawn out below:
chrom 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15
seq G T A T T C T T G G A C C T A A
tx - - a t T C - T G G - - C t a -
tx.pos 0 1 2 3 4 5 6 7 8 9
cds.pos 0 1 2 3 4 5
"""
def setUp(self):
self.tokens = ['A', 'chr1', '-', '2', '15', '4', '13', '3', '2,7,12', '6,10,15', '1',
'q2', 'cmpl', 'cmpl', '2,2,1']
self.t = GenePredTranscript(self.tokens)
self.transcript_seq = 'TAGCCAGAAT'
self.cds_seq = 'GCCAGA'
self.amino_acid = 'Q' # this transcript has a offset of 1, so the first in-frame codon is CAG
self.chrom_seq = {'chr1': 'GTATTCTTGGACCTAA'}
def test_sequences(self):
"""
Tests that the proper sequences are created from the intervals
"""
self.assertEqual(self.t.get_mrna(self.chrom_seq), self.transcript_seq)
self.assertEqual(self.t.get_cds(self.chrom_seq), self.cds_seq)
self.assertEqual(self.t.get_protein_sequence(self.chrom_seq), self.amino_acid)
def test_get_gp(self):
self.assertEqual(self.t.get_gene_pred(), self.tokens)
if __name__ == '__main__':
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.