metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "jonishaso/pythonExelScript",
"score": 2
} |
#### File: jonishaso/pythonExelScript/pp.py
```python
import pandas as pd
from datetime import datetime, timedelta
import calendar
import sys
import json
import smtplib
import time
import schedule
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.mime.base import MIMEBase
from email import encoders
SERVER = "smtp-mail.outlook.com"
FROM = "<EMAIL>"
EPWD = "<PASSWORD>"
json_list_data = json.load(open('Z:/IT/Development/name-list.json'))
syd_name = json_list_data['syd']
mel_name = json_list_data['mel']
income_files = []
time_format_date = '%Y-%m-%d'
file_surfix = "papercut-print-log-"
syd_folder_path = "Z:/IT/Development/printer_report/raw_data/syd/"
mel_folder_path = "Z:/IT/Development/printer_report/raw_data/mel/"
def send_mail(subject, text_body, attachemnt_name='', attachemnt_path='', to=''):
if to == '':
return
msg = MIMEMultipart()
msg['From'] = FROM
msg['To'] = to
msg['Subject'] = subject
msg.attach(MIMEText(text_body, 'plain'))
if attachemnt_name != '' and attachemnt_path != '':
filename = attachemnt_name
attachment = open(attachemnt_path, "rb")
part = MIMEBase('application', 'octet-stream')
part.set_payload((attachment).read())
encoders.encode_base64(part)
part.add_header('Content-Disposition',
"attachment; filename= %s" % filename)
msg.attach(part)
server = smtplib.SMTP(SERVER, 587)
server.connect(SERVER, 587)
server.starttls()
server.login(FROM, EPWD)
outer = msg.as_string()
server.sendmail(FROM, to, outer)
server.quit()
def income_collection(begin, end):
day_range = []
start_date = datetime.strptime(begin, time_format_date)
end_date = datetime.strptime(end, time_format_date)
month_start = start_date.month
year_start = start_date.year
cc = calendar.Calendar(0)
month_range = list(range(month_start, end_date.month+1))
for e in month_range:
for i in cc.itermonthdays4(year_start, e):
i_datetime = datetime(i[0], i[1], i[2])
i_weekday = i[3]
diff_start = (i_datetime - start_date).days
diff_end = (i_datetime - end_date).days
if diff_end <= 0 and diff_start >= 0 and i_weekday in [0, 1, 2, 3, 4]:
time_str = i_datetime.isoformat().split('T')[0]
day_range.append("{}{}.csv".format(file_surfix, time_str))
return day_range
def last_week_days(given_day=''):
day_range = []
if given_day != '':
today = datetime.strptime(given_day, time_format_date)
else:
today = datetime.now()
week_num = (today.isocalendar())[1]
cal = calendar.Calendar(0)
for i in cal.itermonthdays4(today.year, today.month):
if (datetime(i[0], i[1], i[2]).isocalendar())[1] == (week_num-1) and i[3] in [0, 1, 2, 3, 4]:
i_datetime = datetime(i[0], i[1], i[2])
time_str = i_datetime.isoformat().split('T')[0]
day_range.append("{}{}.csv".format(file_surfix, time_str))
else:
continue
return day_range
def calculate(sub_income_df):
total = 0
pages = sub_income_df.iloc[:]['Pages'].values.tolist()
copy = sub_income_df.iloc[:]['Copies'].values.tolist()
for index, e in enumerate(pages):
total += e * copy[index]
return total
def single_day_outcome(income_df, name_list):
outcome = []
for i in name_list:
temp_name = i['name'].split(' ')
temp_name = "{}{}".format(
temp_name[0].lower(), temp_name[1][0].lower())
records = income_df.loc[(income_df['User'] == temp_name)]
records_sin = records.loc[(records['Duplex'] == 'NOT DUPLEX')]
records_dup = records.loc[(records['Duplex'] == 'DUPLEX')]
records_bw = records.loc[(records['Grayscale'] == 'GRAYSCALE')]
records_color = records.loc[(records['Grayscale'] == 'NOT GRAYSCALE')]
outcome.append({'name': i['name'],
'pages': calculate(records),
'double_side': calculate(records_dup),
'single_side': calculate(records_sin),
'black&white': calculate(records_bw),
'color': calculate(records_color)
})
return outcome
def count_pages():
syd_temp_all_frame = []
mel_temp_all_frame = []
file_list = []
file_str = ''
""" python ./pp.py # previous week of current date
python ./pp.py 2019-02-08 # previous week of 2019-02-08
python ./pp.py 2019-02-08 2019-02-12 # time from 02-08 to 02-12
"""
try:
if len(sys.argv) == 1:
file_list = last_week_days()
elif len(sys.argv) == 2:
datetime.strptime(sys.argv[1], time_format_date)
file_list = last_week_days(sys.argv[1])
elif len(sys.argv) == 3:
temp_a = datetime.strptime(sys.argv[1], time_format_date)
temp_b = datetime.strptime(sys.argv[2], time_format_date)
if (temp_b - temp_a).days <= 0:
raise ValueError('start date is later than end date')
file_list = income_collection(sys.argv[1], sys.argv[2])
except ValueError:
print('err: input date format err')
return
for i in file_list:
try:
syd_csv_file = pd.read_csv(
syd_folder_path + i, header=1, usecols=[1, 2, 3, 11, 12], index_col=False, engine='c', error_bad_lines=False)
mel_csv_file = pd.read_csv(
mel_folder_path + i, header=1, usecols=[1, 2, 3, 11, 12], index_col=False, engine='c', error_bad_lines=False)
except FileNotFoundError:
continue
else:
syd_temp_all_frame.append(syd_csv_file)
mel_temp_all_frame.append(mel_csv_file)
file_str += i + "; "
""" if len(temp_all_frame) == 0:
return {
"period": file_list[0].split(file_surfix)[1].split('.')[0] + "~" + file_list[-1].split(file_surfix)[1].split('.')[0],
"valid_files": file_str,
"outcome_df": pd.DataFrame(data={'': []})
}
else: """
if len(syd_temp_all_frame) == 0:
syd_new_df = pd.DataFrame(data=[])
mel_new_df = pd.DataFrame(data=[])
return {
"period": "no_raw_data_match",
"valid_files": file_str,
"syd_outcome_df": syd_new_df,
"mel_outcome_df": mel_new_df
}
else:
syd_new_df = pd.DataFrame(data=single_day_outcome(pd.concat(syd_temp_all_frame), syd_name)).sort_values(
by=['pages'], ascending=False).reset_index().drop(columns=['index'], axis=0)
mel_new_df = pd.DataFrame(data=single_day_outcome(pd.concat(mel_temp_all_frame), mel_name)).sort_values(
by=['pages'], ascending=False).reset_index().drop(columns=['index'], axis=0)
return {
"period": file_list[0].split(file_surfix)[1].split('.')[0] + "~" + file_list[-1].split(file_surfix)[1].split('.')[0],
"valid_files": file_str,
"syd_outcome_df": syd_new_df,
"mel_outcome_df": mel_new_df
}
def write_excel_file(final_result):
outcome_file_name = './' + \
datetime.now().isoformat().split('.')[0].replace(':', '-') + '.xlsx'
outcome_file = pd.ExcelWriter(outcome_file_name, engine='xlsxwriter')
final_result["syd_outcome_df"].to_excel(
outcome_file, sheet_name='Sydney office', index=False)
final_result["mel_outcome_df"].to_excel(
outcome_file, sheet_name='Melbourne office', index=False)
outcome_workbook = outcome_file.book
syd_outcome_worksheet = outcome_file.sheets['Sydney office']
mel_outcome_worksheet = outcome_file.sheets['Melbourne office']
bold = outcome_workbook.add_format(
{'bold': True, 'bg_color': 'yellow', 'font_size': 18})
align = outcome_workbook.add_format({'align': 'center', 'font_size': 16})
bold_red = outcome_workbook.add_format(
{'bold': True, 'bg_color': 'yellow'})
syd_outcome_worksheet.set_column('A:F', 40, align)
syd_outcome_worksheet.write('A1', 'Name', bold)
syd_outcome_worksheet.write('B1', 'Total No. of pages', bold)
syd_outcome_worksheet.write('C1', 'No. of pages Double-sided', bold)
syd_outcome_worksheet.write('D1', 'No. of pages Single side', bold)
syd_outcome_worksheet.write('E1', 'No. of pages Black & white', bold)
syd_outcome_worksheet.write('F1', 'No. of pages Color', bold)
mel_outcome_worksheet.set_column('A:F', 40, align)
mel_outcome_worksheet.write('A1', 'Name', bold)
mel_outcome_worksheet.write('B1', 'Total No. of pages', bold)
mel_outcome_worksheet.write('C1', 'No. of pages Double-sided', bold)
mel_outcome_worksheet.write('D1', 'No. of pages Single side', bold)
mel_outcome_worksheet.write('E1', 'No. of pages Black & white', bold)
mel_outcome_worksheet.write('F1', 'No. of pages Color', bold)
row = 1
for row_num, value in final_result["syd_outcome_df"].iterrows():
syd_outcome_worksheet.write(row, 0, value['name'], align)
syd_outcome_worksheet.write(row, 1, value['pages'], align)
syd_outcome_worksheet.write(row, 2, value['double_side'], align)
syd_outcome_worksheet.write(row, 3, value['single_side'], align)
syd_outcome_worksheet.write(row, 4, value['black&white'], align)
syd_outcome_worksheet.write(row, 5, value['color'], align)
row += 1
syd_outcome_worksheet.write(row+1, 0, "Total", bold)
syd_outcome_worksheet.write(row+1, 1, '=SUM(B2:B{})'.format(row), bold)
syd_outcome_worksheet.write(row+1, 3, "period", bold_red)
syd_outcome_worksheet.write(row+1, 4, final_result["period"], bold_red)
row = 1
for row_num, value in final_result["mel_outcome_df"].iterrows():
mel_outcome_worksheet.write(row, 0, value['name'], align)
mel_outcome_worksheet.write(row, 1, value['pages'], align)
mel_outcome_worksheet.write(row, 2, value['double_side'], align)
mel_outcome_worksheet.write(row, 3, value['single_side'], align)
mel_outcome_worksheet.write(row, 4, value['black&white'], align)
mel_outcome_worksheet.write(row, 5, value['color'], align)
row += 1
mel_outcome_worksheet.write(row+1, 0, "Total", bold)
mel_outcome_worksheet.write(row+1, 1, '=SUM(B2:B{})'.format(row), bold)
mel_outcome_worksheet.write(row+1, 3, "period", bold_red)
mel_outcome_worksheet.write(row+1, 4, final_result["period"], bold_red)
outcome_file.save()
return outcome_file_name
def jobs():
excel_file_name = write_excel_file(count_pages())
# send_mail("sched", "Printer Report", to="<EMAIL>",attachemnt_path=excel_file_name, attachemnt_name="printing_report.xlsx")
# excel_file_name = write_excel_file(count_pages())
jobs()
schedule.every(2).seconds.do(jobs)
while 1:
schedule.run_pending()
time.sleep(1)
``` |
{
"source": "jonishaso/rateReport",
"score": 2
} |
#### File: jonishaso/rateReport/eod_rate.py
```python
import MySQLdb as mdb
from datetime import datetime as d
from json import load
from struct import unpack
from socket import socket, AF_INET, SOCK_STREAM
with open('./setting.json') as ff:
j = load(ff)
db_usr = j['dbUsr']
db_pwd = j['<PASSWORD>']
db_hst = j['dbHst']
db_name = j['dbNm']
with open('./config.json') as cc:
j = load(cc)
usd_check = j['usdcheck']
def get_response(symbol: str, from_t: int, to_t: int, step: int):
if step not in [1, 15, 60]:
return b''
client = socket(AF_INET, SOCK_STREAM)
client.connect(('mt4demomaster.rztrader.com', 443))
send_str = ('WHISTORYNEW-symbol={}|period={}|from={}|to={}\nQUIT\n'
.format(symbol, str(step), str(from_t), str(to_t))).encode('ascii')
client.send(send_str)
return client.recv(10148)
async def rate_at_eod(symbol: str, trade_date: str, next_day: int = 0, eod_hour: int = 23):
if 'USD' == symbol:
return{
"symbol": symbol,
"origin_time": trade_date,
# "eod_time": '',
"rate": 1.0
}
original_date = d.fromisoformat(trade_date)
eod_date = int(d(
original_date.year, original_date.month, (
original_date.day + next_day),
eod_hour, 0, 0).timestamp()
)
response = get_response(symbol + '.rp', eod_date, eod_date, 60)
if len(response) == 4 or len(response) == 0:
return{
"symbol": symbol,
"origin_time": trade_date,
# "eod_time": '',
"rate": 0.0
}
else:
header = unpack('iii', response[:12])
# print(header)
digit = 10 ** (header[1]*(-1))
body = unpack('iiiii', response[12:][:20])
return{
"symbol": symbol,
"origin_time": trade_date,
# "eod_time": d.fromtimestamp(body[0]),
"rate": (body[1] + body[4]) * digit
}
def create_symbol_list(f_time: str, e_time: str):
try:
d.fromisoformat(f_time)
d.fromisoformat(e_time)
except Exception as e:
print(e)
return
symbol_date_pair = []
con = mdb.connect(db_hst, db_usr, db_pwd, db_name)
cur = con.cursor()
try:
cur.execute(
"select substring_index(t.symbol, '.', 1), date(t.close_time) from MT4_TRADES as t where t.close_time between date(%s) and date(%s) and t.symbol != '' and t.conv_rate1 != 0.0 group by t.symbol, date(t.close_time);", [f_time, e_time])
for i in cur.fetchall():
s = i[0]
t = i[1].isoformat()
alter_s = ''
if 6 == len(s):
if "USD" == s[:3]:
continue
if "USD" != s[:3] and "USD" != s[3:]:
try:
alter_s = usd_check[s[:3]].split('.')[0]
temp = (alter_s, t)
except KeyError:
print('create symbol list error symbol {}'.format(s))
continue
else:
temp = (s, t)
else:
try:
alter_s = s
temp = (alter_s, t)
except KeyError:
print('create symbol list error symbol {}'.format(s))
continue
if temp not in symbol_date_pair:
symbol_date_pair.append(temp)
else:
continue
cur.execute(
"select substring_index(t.symbol, '.', 1), date(t.open_time) from MT4_TRADES as t where t.open_time between date(%s) and date(%s) and t.symbol != '' and t.conv_rate1 != 0.0 group by t.symbol, date(t.open_time);", [f_time, e_time])
for i in cur.fetchall():
s = i[0]
t = i[1].isoformat()
alter_s = ''
if 6 == len(s):
if "USD" == s[:3]:
continue
if "USD" != s[:3] and "USD" != s[3:]:
try:
alter_s = usd_check[s[:3]].split('.')[0]
temp = (alter_s, t)
except KeyError:
print('create symbol list error symbol {}'.format(s))
continue
else:
temp = (s, t)
else:
try:
alter_s = s
temp = (alter_s, t)
except KeyError:
print('create symbol list error symbol {}'.format(s))
continue
if temp not in symbol_date_pair:
symbol_date_pair.append(temp)
else:
continue
except Exception as e:
print('database error in get_valid_user function')
con.rollback()
finally:
con.close()
return symbol_date_pair
``` |
{
"source": "jonitoh/anonymizer-standalone",
"score": 2
} |
#### File: app/internal/__init__.py
```python
from fastapi import APIRouter
from app.internal import admin
from app.core.config import Settings
def create_api_internal(settings: Settings) -> APIRouter:
""" Complete creation of the api internal """
# Instanciate api
api_internal = APIRouter()
# Include all routers
api_internal.include_router(admin.get_router(settings))
return api_internal
```
#### File: backend/app/main.py
```python
from fastapi import FastAPI
from fastapi.responses import RedirectResponse
from fastapi.staticfiles import StaticFiles
from fastapi.middleware.cors import CORSMiddleware
from pymongo import MongoClient
from app.core.config import get_settings
from app.internal import create_api_internal
from app.routers import create_api_router
mongo_client = None
def get_client(uri: str):
"""
Setup a mongo client for the site
:return:
"""
global mongo_client
if bool(mongo_client):
return mongo_client
return MongoClient(uri)
def create_app() -> FastAPI:
""" Complete creation of the app """
global mongo_client # TODO: to remove; too messy
# Instanciate settings
settings = get_settings()
# Instanciate database
mongo_client = get_client(settings.MONGO_DATABASE_URI)
# Instanciate app
app = FastAPI(
title=settings.PROJECT_NAME,
openapi_url=settings.OPENAPI_URL,
debug=settings.DEBUG,
)
# C.O.R.S
if settings.CORS_ORIGINS:
app.add_middleware(
CORSMiddleware,
allow_origins=settings.CORS_ORIGINS,
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Add static folder
app.mount(settings.STATIC_FOLDER, StaticFiles(directory="static"), name="static")
# Include all routers
app.include_router(create_api_router(settings), prefix=settings.API_VERSION_URL)
# Include all internals
app.include_router(create_api_internal(settings), prefix=settings.API_VERSION_URL)
# HELLO WORLD ROUTE
@app.get('/hello-world')
def test_route():
return {'message': 'Hello World'}
# ROOT ROUTE
@app.get("/", include_in_schema=False)
def redirect_to_docs() -> RedirectResponse:
return RedirectResponse("/docs")
"""@app.on_event("startup")
async def connect_to_database() -> None:
database = _get_database()
if not database.is_connected:
await database.connect()
@app.on_event("shutdown")
async def shutdown() -> None:
database = _get_database()
if database.is_connected:
await database.disconnect()"""
return app
app = create_app()
``` |
{
"source": "jonitoh/edaviz",
"score": 3
} |
#### File: edaviz/edaviz/heatmaps.py
```python
from __future__ import division
import itertools
import datetime
import matplotlib as mpl
from matplotlib.collections import LineCollection
import matplotlib.pyplot as plt
from matplotlib import gridspec
import matplotlib.patheffects as patheffects
import numpy as np
import pandas as pd
from scipy.cluster import hierarchy
import seaborn as sns
from seaborn import cm
from seaborn.axisgrid import Grid
from seaborn.utils import (
despine, axis_ticklabels_overlap, relative_luminance, to_utf8)
from seaborn.external.six import string_types
__all__ = ['afficher_heatmap', 'afficher_hetmap_avec_cellules_variables']
def _index_to_label(index):
"""
(Unchanged funtions)
Convert a pandas index or multiindex to an axis label.
"""
if isinstance(index, pd.MultiIndex):
return "-".join(map(to_utf8, index.names))
else:
return index.name
def _index_to_ticklabels(index):
"""
(Unchanged funtions)
Convert a pandas index or multiindex into ticklabels.
"""
if isinstance(index, pd.MultiIndex):
return ["-".join(map(to_utf8, i)) for i in index.values]
else:
return index.values
def _convert_colors(colors):
"""
(Unchanged funtions)
Convert either a list of colors or nested lists of colors to RGB.
"""
to_rgb = mpl.colors.colorConverter.to_rgb
if isinstance(colors, pd.DataFrame):
# Convert dataframe
return pd.DataFrame({col: colors[col].map(to_rgb)
for col in colors})
elif isinstance(colors, pd.Series):
return colors.map(to_rgb)
else:
try:
to_rgb(colors[0])
# If this works, there is only one level of colors
return list(map(to_rgb, colors))
except ValueError:
# If we get here, we have nested lists
return [list(map(to_rgb, l)) for l in colors]
def _matrix_mask(data, mask):
"""
(Unchanged funtions)
Ensure that data and mask are compatible and add missing values.
Values will be plotted for cells where ``mask`` is ``False``.
``data`` is expected to be a DataFrame; ``mask`` can be an array or
a DataFrame.
"""
if mask is None:
mask = np.zeros(data.shape, np.bool)
if isinstance(mask, np.ndarray):
# For array masks, ensure that shape matches data then convert
if mask.shape != data.shape:
raise ValueError("Mask must have the same shape as data.")
mask = pd.DataFrame(mask,
index=data.index,
columns=data.columns,
dtype=np.bool)
elif isinstance(mask, pd.DataFrame):
# For DataFrame masks, ensure that semantic labels match data
if not mask.index.equals(data.index) \
and mask.columns.equals(data.columns):
err = "Mask must have the same index and columns as data."
raise ValueError(err)
# Add any cells with missing data to the mask
# This works around an issue where `plt.pcolormesh` doesn't represent
# missing data properly
mask = mask | pd.isnull(data)
return mask
def _normalize_cell_size(size, size_min, size_max, size_true, size_false, size_nan):
""" """
if isinstance(size, bool):
return size_true if size else size_false
elif np.isnan(size):
return size_nan
elif size <= size_min:
return size_min
elif size >= size_max:
return size_max
else:
return size
class _CustomisedCellHeatMapper(object):
"""Custom version of _HeatMapper adding the control of the cell size."""
DEFAULT_VMIN_CELLS = .1
DEFAULT_VMAX_CELLS = 1
def __init__(self, data, vmin, vmax, cmap, center, robust, annot, fmt,
annot_kws, cbar, cbar_kws, shape_kws,
data_cells, vmin_cells, vmax_cells, robust_cells,
xticklabels=True, yticklabels=True, mask=None, normalize_cells=True,
square_shaped_cells=True):
"""
Initialize the plotting object.
"""
# We always want to have a DataFrame with semantic information
# and an ndarray to pass to matplotlib
if isinstance(data, pd.DataFrame):
plot_data = data.values
else:
plot_data = np.asarray(data)
data = pd.DataFrame(plot_data)
# We always want to have a DataFrame with semantic information
# and an ndarray to pass to matplotlib
if data_cells is None:
data_cells = pd.DataFrame(data=np.ones(data.shape, dtype=float),
columns=data.columns,
index=data.index)
if isinstance(data_cells, pd.DataFrame):
plot_cells = data_cells.values
else:
plot_cells = np.asarray(data_cells)
data_cells = pd.DataFrame(plot_cells)
# Validate the mask and convert to DataFrame
mask = _matrix_mask(data, mask)
plot_data = np.ma.masked_where(np.asarray(mask), plot_data)
plot_cells = np.ma.masked_where(np.asarray(mask), plot_cells)
# Get good names for the rows and columns
xtickevery = 1
if isinstance(xticklabels, int):
xtickevery = xticklabels
xticklabels = _index_to_ticklabels(data.columns)
elif xticklabels is True:
xticklabels = _index_to_ticklabels(data.columns)
elif xticklabels is False:
xticklabels = []
ytickevery = 1
if isinstance(yticklabels, int):
ytickevery = yticklabels
yticklabels = _index_to_ticklabels(data.index)
elif yticklabels is True:
yticklabels = _index_to_ticklabels(data.index)
elif yticklabels is False:
yticklabels = []
# Get the positions and used label for the ticks
nx, ny = data.T.shape
if not len(xticklabels):
self.xticks = []
self.xticklabels = []
elif isinstance(xticklabels, string_types) and xticklabels == "auto":
self.xticks = "auto"
self.xticklabels = _index_to_ticklabels(data.columns)
else:
self.xticks, self.xticklabels = self._skip_ticks(xticklabels,
xtickevery)
if not len(yticklabels):
self.yticks = []
self.yticklabels = []
elif isinstance(yticklabels, string_types) and yticklabels == "auto":
self.yticks = "auto"
self.yticklabels = _index_to_ticklabels(data.index)
else:
self.yticks, self.yticklabels = self._skip_ticks(yticklabels,
ytickevery)
# Get good names for the axis labels
xlabel = _index_to_label(data.columns)
ylabel = _index_to_label(data.index)
self.xlabel = xlabel if xlabel is not None else ""
self.ylabel = ylabel if ylabel is not None else ""
# Determine good default values for the colormapping
self._determine_cmap_params(plot_data, vmin, vmax,
cmap, center, robust)
# Determine good default values for the sizemapping
self._determine_cells_params(plot_cells, vmin_cells,
vmax_cells, robust_cells,
normalize_cells, square_shaped_cells)
# Sort out the annotations
if annot is None:
annot = False
annot_data = None
elif isinstance(annot, bool):
if annot:
annot_data = plot_data
else:
annot_data = None
else:
try:
annot_data = annot.values
except AttributeError:
annot_data = annot
if annot.shape != plot_data.shape:
raise ValueError('Data supplied to "annot" must be the same '
'shape as the data to plot.')
annot = True
# Save other attributes to the object
self.data = data
self.plot_data = plot_data
self.data_cells = data_cells
self.plot_cells = plot_cells
self.annot = annot
self.annot_data = annot_data
self.fmt = fmt
self.annot_kws = {} if annot_kws is None else annot_kws
self.cbar = cbar
self.cbar_kws = {} if cbar_kws is None else cbar_kws
self.cbar_kws.setdefault('ticks', mpl.ticker.MaxNLocator(6))
self.shape_kws = {} if shape_kws is None else shape_kws
def _determine_cmap_params(self, plot_data, vmin, vmax,
cmap, center, robust):
"""Use some heuristics to set good defaults for colorbar and range."""
calc_data = plot_data.data[~np.isnan(plot_data.data)]
if vmin is None:
vmin = np.percentile(calc_data, 2) if robust else calc_data.min()
if vmax is None:
vmax = np.percentile(calc_data, 98) if robust else calc_data.max()
self.vmin, self.vmax = vmin, vmax
# Choose default colormaps if not provided
if cmap is None:
if center is None:
self.cmap = cm.rocket
else:
self.cmap = cm.icefire
elif isinstance(cmap, string_types):
self.cmap = mpl.cm.get_cmap(cmap)
elif isinstance(cmap, list):
self.cmap = mpl.colors.ListedColormap(cmap)
else:
self.cmap = cmap
# Recenter a divergent colormap
if center is not None:
vrange = max(vmax - center, center - vmin)
normlize = mpl.colors.Normalize(center - vrange, center + vrange)
cmin, cmax = normlize([vmin, vmax])
cc = np.linspace(cmin, cmax, 256)
self.cmap = mpl.colors.ListedColormap(self.cmap(cc))
def _determine_cells_params(self, plot_cells, vmin_cells, vmax_cells, robust_cells, normalize_cells):
"""Use some heuristics to set good defaults for colorbar and range."""
# ( NEW )
if plot_cells is None:
self.plot_cells = np.ones(plot_cells.shape)
self.vmax_cells, self.vmin_cells = self.DEFAULT_VMAX_CELLS, self.DEFAULT_VMIN_CELLS
else:
# Handle incorrect types (only accepted or np.bool and np.numeric)
type_cells = plot_cells.applymap(type)
available_types = set(type_cells.values.flatten())
invalid_types = [
ctype for ctype in available_types if not isinstance(ctype, (bool, float))]
if invalid_types:
raise TypeError(f"Incorrect types: {invalid_types} ")
# Format into a unique type with the right imputation
plot_cells = plot_cells.replace({True: 1.0, False: 0})
# Normalize the the range of values
calc_cells = plot_cells.data[~np.isnan(plot_cells.data)]
if vmin_cells is None:
vmin_cells = 0
if vmax_cells is None:
vmax_cells = 1.0
robust_vmin_cells = np.percentile(
calc_cells, 5) if robust else calc_cells.min()
robust_vmax_cells = np.percentile(
calc_cells, 95) if robust else calc_cells.max()
if robust_vmin_cells == 0:
robust_vmin_cells = self.DEFAULT_VMIN_CELLS
# Normalize the values
plot_cells = plot_cells.applymap(_normalize_cell_size,
vmin=robust_vmin_cells,
vmax=robust_vmax_cells,
true_value=robust_vmax_cells,
false_value=robust_vmin_cells,
nan_value=0.0
)
# Store the values
self.plot_cells = plot_cells
self.vmax_cells = robust_vmax_cells
self. vmin_cells = robust_vmin_cells
def _annotate_and_size_cells(self, ax, mesh, square_shaped_cells):
"""Add textual labels with the value in each cell."""
# ( MODIFY: former _annotate_heatmap )
mesh.update_scalarmappable()
height, width = self.annot_data.shape
xpos, ypos = np.meshgrid(np.arange(width) + .5, np.arange(height) + .5)
for x, y, m, color, val, cell_size in zip(xpos.flat, ypos.flat,
mesh.get_array(), mesh.get_facecolors(),
self.annot_data.flat, self.plot_cells.flat):
if m is not np.ma.masked:
# vv = (val - self.vmin) / (self.vmax - self.vmin)# done
# size = np.clip(s / self.cellsize_vmax, 0.1, 1.0)
shape = None
if square_shaped_cells:
shape = plt.Rectangle((x - cell_size / 2, y - cell_size / 2),
cell_size,
cell_size,
facecolor=color,
**self.shape_kws)
else:
shape = plt.Circle((x - cell_size / 2, y - cell_size / 2),
cell_size,
facecolor=color,
fill=True,
**self.shape_kws)
ax.add_patch(shape)
if self.annot:
lum = relative_luminance(color)
text_color = ".15" if lum > .408 else "w"
annotation = ("{:" + self.fmt + "}").format(val)
text_kwargs = dict(
color=text_color, ha="center", va="center")
text_kwargs.update(self.annot_kws)
ax.text(x, y, annotation, **text_kwargs)
def _skip_ticks(self, labels, tickevery):
"""Return ticks and labels at evenly spaced intervals."""
n = len(labels)
if tickevery == 0:
ticks, labels = [], []
elif tickevery == 1:
ticks, labels = np.arange(n) + .5, labels
else:
start, end, step = 0, n, tickevery
ticks = np.arange(start, end, step) + .5
labels = labels[start:end:step]
return ticks, labels
def _auto_ticks(self, ax, labels, axis):
"""Determine ticks and ticklabels that minimize overlap."""
transform = ax.figure.dpi_scale_trans.inverted()
bbox = ax.get_window_extent().transformed(transform)
size = [bbox.width, bbox.height][axis]
axis = [ax.xaxis, ax.yaxis][axis]
tick, = axis.set_ticks([0])
fontsize = tick.label1.get_size()
max_ticks = int(size // (fontsize / 72))
if max_ticks < 1:
return [], []
tick_every = len(labels) // max_ticks + 1
tick_every = 1 if tick_every == 0 else tick_every
ticks, labels = self._skip_ticks(labels, tick_every)
return ticks, labels
def _plot_custom_pcolormesh(self, ax, **kwargs):
""" """
mesh = ax.pcolormesh(self.plot_data, vmin=self.vmin, vmax=self.vmax,
cmap=self.cmap, **kws)
pass
def plot(self, ax, cax, kws):
"""Draw the heatmap on the provided Axes."""
# Remove all the Axes spines
despine(ax=ax, left=True, bottom=True)
# Draw the heatmap
# mesh = self._plot_custom_pcolormesh(ax, **kws)
mesh = ax.pcolormesh(self.plot_data, vmin=self.vmin, vmax=self.vmax,
cmap=self.cmap, **kws)
# Set the axis limits
ax.set(xlim=(0, self.data.shape[1]), ylim=(0, self.data.shape[0]))
# Invert the y axis to show the plot in matrix form
ax.invert_yaxis()
# Possibly add a colorbar
if self.cbar:
cb = ax.figure.colorbar(mesh, cax, ax, **self.cbar_kws)
cb.outline.set_linewidth(0)
# If rasterized is passed to pcolormesh, also rasterize the
# colorbar to avoid white lines on the PDF rendering
if kws.get('rasterized', False):
cb.solids.set_rasterized(True)
# Add row and column labels
if isinstance(self.xticks, string_types) and self.xticks == "auto":
xticks, xticklabels = self._auto_ticks(ax, self.xticklabels, 0)
else:
xticks, xticklabels = self.xticks, self.xticklabels
if isinstance(self.yticks, string_types) and self.yticks == "auto":
yticks, yticklabels = self._auto_ticks(ax, self.yticklabels, 1)
else:
yticks, yticklabels = self.yticks, self.yticklabels
ax.set(xticks=xticks, yticks=yticks)
xtl = ax.set_xticklabels(xticklabels)
ytl = ax.set_yticklabels(yticklabels, rotation="vertical")
# Possibly rotate them if they overlap
if hasattr(ax.figure.canvas, "get_renderer"):
ax.figure.draw(ax.figure.canvas.get_renderer())
if axis_ticklabels_overlap(xtl):
plt.setp(xtl, rotation="vertical")
if axis_ticklabels_overlap(ytl):
plt.setp(ytl, rotation="horizontal")
# Add the axis labels
ax.set(xlabel=self.xlabel, ylabel=self.ylabel)
# Annotate the cells with the formatted values
self._annotate_and_size_cells(ax, mesh)
def customised_cells_heatmap(data, vmin=None, vmax=None, cmap=None, center=None, robust=False,
annot=None, fmt=".2g", annot_kws=None,
cbar=True, cbar_kws=None, cbar_ax=None,
data_cells=None, robust_cells=True,
vmin_cells=None, vmax_cells=None,
square=False, xticklabels="auto", yticklabels="auto",
mask=None, ax=None, ax_kws=None, shape_kws=None,
normalize_cells=True, square_shaped_cells=True):
# Initialize the plotter object
plotter = _CustomisedCellHeatMapper(data, vmin, vmax,
cmap, center, robust,
annot, fmt, annot_kws,
cbar, cbar_kws, shape_kws,
data_cells, vmin_cells, vmax_cells,
robust_cells, xticklabels, yticklabels, mask, normalize_cells,
square_shaped_cells)
# Draw the plot and return the Axes
if ax is None:
ax = plt.gca()
if square:
ax.set_aspect("equal")
# delete grid
ax.grid(False)
plotter.plot(ax, cbar_ax)
return ax
``` |
{
"source": "Jonitrexis/uj-discord-bot",
"score": 2
} |
#### File: uj-discord-bot/tests/test_text.py
```python
from source.response_utils.get_text import get_text
def test_get_text():
text_none = ""
assert get_text(text_none) == (None, 0)
text_wrong = "abc"
assert get_text(text_wrong) == (None, 0)
text_1 = ";okabc;"
assert get_text(text_1) == ("abc", 0)
text_2 = ";Okabc;"
assert get_text(text_2) == ("abc", 0)
text_3 = ";oKabc;"
assert get_text(text_3) == ("abc", 0)
text_4 = ";OKabc;"
assert get_text(text_4) == ("abc", 0)
text_5 = ";okabc ;"
assert get_text(text_5) == ("abc ", 0)
text_6 = ";ok abc;"
assert get_text(text_6) == (" abc", 0)
text_7 = "cześć bocie"
author = "bejbe"
assert get_text(text_7, author) == ("<NAME>", 3)
text_8 = ";ok~abc;"
assert get_text(text_8) == ("abc", 1)
text_9 = ";ok~~abc;"
assert get_text(text_9) == ("abc", 2)
text_10 = ";ok~~~abc;"
assert get_text(text_10) == ("abc", 3)
``` |
{
"source": "joniturunen/picam",
"score": 2
} |
#### File: picamutils/aws/s3fileuploader.py
```python
import logging, boto3, os
from dotenv import dotenv_values
from pathlib import Path
from botocore.exceptions import ClientError
cli = False
class S3FileUploader:
env_path = Path('./') / '.env'
envs = dotenv_values(env_path)
# Set variables from .env file
access_key = envs['AWS_ACCESS_KEY']
secret_key = envs['AWS_SECRET_KEY']
access_print = access_key[:3] + (len(access_key)-6)*'*' + access_key[-3:]
secret_print = secret_key[:3] + (len(secret_key)-6)*'*' + secret_key[-3:]
s3_bucket = envs['S3_BUCKET']
def __init__(self,local_file, object_name=None):
self.local_file = local_file
self.object_name = object_name
if cli: print(f'access_key: {self.access_print}\nsecret_key: {self.secret_print}\ns3_bucket: {self.s3_bucket}')
def upload_file_to_bucket(self):
if self.object_name is None:
self.object_name = local_file
s3_client = boto3.client('s3', aws_access_key_id=self.access_key, aws_secret_access_key=self.secret_key)
try:
s3_client.upload_file(self.local_file, self.s3_bucket, self.object_name, ExtraArgs={'ACL': 'public-read'})
return True
except FileNotFoundError:
print(f'\tFile not found!')
return False
except NoCredentialsError:
print(f'\tNo credentials provided!')
return False
if __name__ == '__main__':
cli = True
local_file = 'tmp/snapshot.jpg'
object_name = 'latest.jpg'
s3 = S3FileUploader(local_file, object_name)
result = s3.upload_file_to_bucket()
if result: print(f'File `{local_file}` saved as `{object_name}` successfully.')
``` |
{
"source": "joniumGit/distributed-minesweeper",
"score": 2
} |
#### File: game-server/test/test_free.py
```python
import pytest
def test_start_too_many_mines(client):
r = client.post('/start', params={'width': 8, 'height': 8, 'mines': 95})
assert r.status_code == 422
assert r.json()['detail'][0]['msg'].startswith('Too many mines'), r.json()
def test_start_too_small_field(client):
r = client.post('/start', params={'width': 2, 'height': 4, 'mines': 95})
assert r.status_code == 422
assert r.json()['detail'][0]['msg'].startswith('Bad dimensions'), r.json()
def test_start(client):
r = client.post('/start', params={'width': 8, 'height': 8, 'mines': 10})
assert r.status_code == 201
def _initializing(client, path=None, method=None):
from server.models import MAX_HEIGHT, MAX_WIDTH
r = client.post('/start', params={
'width': MAX_WIDTH - 1,
'height': MAX_HEIGHT - 1,
'mines': int(MAX_WIDTH * MAX_HEIGHT * 0.6)
})
assert r.status_code == 201, f'Failed, got: {r.status_code}\n{r.content}'
if path is None:
path = r.headers['Location']
if method is None:
method = 'get'
r = getattr(client, method)(path)
assert r.status_code == 202
i = 0
while i < 200:
import time
r = getattr(client, method)(path)
if r.status_code != 202:
break
i += 1
time.sleep(0.01)
assert i != 200
def test_initializing(client):
_initializing(client)
@pytest.mark.parametrize('p,method', [
('/reload', 'get'),
('/check?x=0&y=0', 'get'),
('/open?x=0&y=0', 'post'),
('/flag?x=0&y=0', 'post'),
('/flag?x=0&y=0', 'delete'),
('/', 'get'),
])
def test_initializing_endpoints(client, p, method):
_initializing(client, path=p, method=method)
def test_status(client):
test_start(client)
r = client.get('/reload')
assert r.json()['status'] == 'ongoing'
def test_set_delete_flag(client):
r = client.post('/start', params={'width': 8, 'height': 8, 'mines': 10})
assert r.status_code == 201
r = client.post('/flag', params={'x': 0, 'y': 0})
assert r.status_code == 201
r = client.post('/flag', params={'x': 0, 'y': 0})
assert r.status_code == 304
r = client.delete('/flag', params={'x': 0, 'y': 0})
assert r.status_code == 204
r = client.delete('/flag', params={'x': 0, 'y': 0})
assert r.status_code == 304
def test_not_started(client):
r = client.get('/')
assert r.status_code == 404
m = {'x': 0, 'y': 0}
r = client.post('/open', params=m)
assert r.status_code == 404
r = client.post('/flag', params=m)
assert r.status_code == 404
r = client.delete('/flag', params=m)
assert r.status_code == 404
r = client.get('/reload')
assert r.status_code == 404
r = client.get('/check', params=m)
assert r.status_code == 404
def test_flagging(client):
from headers import LOCATION
r = client.post('/start', params={'width': 8, 'height': 8, 'mines': 10})
assert r.status_code == 201
r = client.post('/flag', params={'x': 0, 'y': 0})
assert LOCATION in r.headers
assert r.status_code == 201
loc = r.headers[LOCATION]
r = client.get(loc)
assert r.status_code == 200, loc
assert r.json()['flag']
r = client.delete('/flag', params={'x': r.json()['x'], 'y': r.json()['y']})
assert LOCATION in r.headers
assert r.status_code == 204
r = client.get(r.headers[LOCATION])
assert r.status_code == 200
assert 'flag' not in r.json()
```
#### File: src/minesweeper/game.py
```python
from dataclasses import dataclass, field
from enum import Enum
from typing import Optional, List, Generator
from .logic import Field, Square
class IllegalStateError(RuntimeError):
def __init__(self):
super(IllegalStateError, self).__init__('Invalid game state')
class Status(Enum):
INITIALIZING = 'initializing'
ONGOING = 'ongoing'
WIN = 'win'
LOSE = 'lose'
def __eq__(self, other):
return self is other or self.value == other
def __repr__(self):
return self.value
def __str__(self):
return self.value
@dataclass
class Move:
status: Optional[Status] = None
items: List[Square] = field(default_factory=list)
class Minesweeper:
_field: Field
_status: Status = Status.INITIALIZING
def __init__(self, width: int, height: int, mines: int):
self._field = Field(width, height, mines)
@property
def width(self):
return self._field.width
@property
def height(self):
return self._field.height
@property
def mines(self):
return self._field.mines
def open(self, x: int, y: int) -> Move:
if self.status is Status.ONGOING:
squares = list(self._field.open(x, y))
if self._field:
self._status = Status.WIN
squares.extend(self._field.iter_mines())
return Move(status=self._status, items=squares)
elif len(squares) == 1:
square = squares[0]
if square.mine:
self._status = Status.LOSE
squares.extend(self._field.iter_mines())
return Move(status=self._status, items=squares)
else:
return Move(items=squares)
else:
return Move(items=squares)
else:
raise IllegalStateError()
def flag(self, x: int, y: int) -> Move:
if self.status is Status.ONGOING:
return Move(items=list(self._field.flag(x, y)))
else:
raise IllegalStateError()
def __iter__(self) -> Generator[Square, None, None]:
if self.status is not Status.INITIALIZING:
yield from self._field
if self.status is not Status.ONGOING:
yield from self._field.iter_mines()
else:
raise IllegalStateError()
def check(self, x: int, y: int) -> Square:
if self.status is not Status.INITIALIZING:
return self._field.check(x, y)
else:
raise IllegalStateError()
@property
def status(self) -> Status:
return self._status
def initialize(self):
if self.status is Status.INITIALIZING:
self._field.generate()
self._status = Status.ONGOING
__all__ = ['Square', 'Move', 'Minesweeper', 'Status']
```
#### File: distributed-minesweeper/python-client/client.py
```python
import time
from typing import List, Optional, Tuple, Union
import headers
import requests
field: List[List[Optional[Union[int, str]]]] = [[]]
def get_node() -> Tuple[str, str]:
r = requests.post('http://localhost/start')
assert r.status_code == 201, r
print('Got Node')
time.sleep(2)
print('Starting')
return r.headers[headers.LOCATION], r.headers[headers.AUTHORIZATION]
def init_field(node_start: str) -> str:
global field
while True:
try:
width = int(input('Width: '))
height = int(input('Height: '))
mines = int(input('Mines: '))
r = client.post(node_start, params=dict(width=width, height=height, mines=mines))
assert r.status_code == 201, r
field = [[None for _ in range(0, width)] for _ in range(0, height)]
print('Field Done')
return r.headers[headers.LOCATION]
except (ValueError, AssertionError):
print('Failed...')
def print_field():
print(' ' + ' '.join(f'{i}' for i in range(0, len(field[0]))))
for idx, row in enumerate(field):
print(f'{idx}|' + ' '.join(' ' if v is None else f'{v}' for v in row))
def get_input() -> Tuple[bool, int, int]:
while True:
input_ = input('Give (f,)x,y: ')
parts = input_.split(',')
try:
if len(parts) == 3:
f, x, y = parts
x = int(x)
y = int(y)
f = True
else:
x, y = parts
x = int(x)
y = int(y)
f = False
break
except ValueError:
print('Invalid Input')
return f, x, y
def handle_flag(x: int, y: int):
if field[y][x] == 'f':
client.delete(f'{node}flag', params=dict(x=x, y=y))
field[y][x] = None
elif field[y][x] is None:
client.post(f'{node}flag', params=dict(x=x, y=y))
field[y][x] = 'f'
def handle_open(x: int, y: int) -> bool:
r = client.post(f'{node}open', params=dict(x=x, y=y))
if r.status_code == 200:
for s in r.json()['items']:
if s.get('flag', False):
value = 'f'
elif s.get('mine', False):
if s.get('open', False):
value = 'x'
else:
value = '*'
else:
value = s.get('value', None)
field[s['y']][s['x']] = value
if r.json().get('status', 'ongoing') != 'ongoing':
print_field()
print(r.json()['status'])
return True
return False
LOCATION, AUTH = get_node()
with requests.Session() as client:
client.headers[headers.AUTHORIZATION] = AUTH
node = init_field(LOCATION)
while True:
print_field()
f_, x_, y_ = get_input()
if f_:
handle_flag(x_, y_)
elif handle_open(x_, y_):
break
``` |
{
"source": "joniumGit/iot-course-simulation",
"score": 3
} |
#### File: robot/src/robot.py
```python
from dataclasses import dataclass
from random import Random
from paho.mqtt import client as mqtt
rand = Random(0)
COLORS = [
'RED',
'GREEN',
'BLUE',
'CYAN',
'YELLOW',
'BLACK',
'WHITE'
]
DEFAULT_LENGTH = 10
SMALLEST_SLEEP = 0.4
@dataclass
class Item:
color: str
length: float
def __repr__(self):
return f"Item({self.color}, {self.length})"
def generate_item() -> Item:
import time
time.sleep(rand.random() + SMALLEST_SLEEP)
mod = rand.random() * (1.4 - 0.8) + 0.8
return Item(
rand.choice(COLORS),
DEFAULT_LENGTH * mod,
)
def send_item(item: Item, c: mqtt.Client):
import time
time.sleep(rand.random())
c.publish('iot-data', repr(item))
sink = mqtt.Client("iot-robot")
sink.connect('iot-mosquitto')
sink.loop_start()
while True:
o = generate_item()
send_item(o, sink)
``` |
{
"source": "joniumGit/moons",
"score": 3
} |
#### File: moons/misc/custom_fitter.py
```python
from typing import Tuple
import numpy as np
from scipy.optimize import least_squares
from sklearn.base import BaseEstimator, RegressorMixin
from sklearn.utils.validation import check_X_y
from sklearn.utils.validation import check_array, check_is_fitted
class OnePerRegression(RegressorMixin, BaseEstimator):
"""
Regression model for k * x ** a + b
"""
def __init__(self, initial_guess: Tuple[float, float, float] = (1, -1, 0)):
"""
initial_guess:
- Tuple[float, float, float]
- k * x ^ a + b (k, a, b)
Constrained as follows:
- k[0,INF]
- a[-15,0]
- b[0,INF]
"""
super(OnePerRegression, self).__init__()
self.initial_guess = initial_guess
def _constraint(self):
# return np.asarray((
# [0, -15, 0],
# [np.inf, -1, np.inf]
# ))
return np.asarray((
[-np.inf, -np.inf, -np.inf],
[np.inf, np.inf, np.inf]
))
def _more_tags(self):
return {
"poor_score": True
}
def fun(self, guess):
k, a, b = guess[0:3]
return self.y_ - k * np.float_power(self.x_, a) + b
def fit(self, X, y):
if y is None:
raise ValueError()
x, y = check_X_y(X, y, y_numeric=True, force_all_finite=True, dtype='float64')
self.n_features_in_ = x.shape[1]
self.x_ = np.average(x, axis=1)
self.y_ = y
solution = least_squares(
self.fun,
np.asarray(self.initial_guess).astype('float64'),
bounds=self._constraint(),
loss='linear',
)
self.coef_ = [solution.x[0], solution.x[1]]
self.intercept_ = solution.x[2]
return self
def predict(self, X):
check_is_fitted(self, 'coef_')
x: np.ndarray = check_array(X, force_all_finite=True, dtype='float64')
if x.shape[1] != self.n_features_in_:
raise ValueError("Wrong number of features")
x = np.average(X, axis=1)
return self.coef_[0] * np.float_power(x, self.coef_[1]) + self.intercept_
class ReciprocalRegression(RegressorMixin, BaseEstimator):
"""
Regression model for k * 1 / x + b
"""
def __init__(self, initial_guess: Tuple[float, float, float] = (1, 0)):
"""
initial_guess:
- Tuple[float, float, float]
- k * 1 / x + b (k, a, b)
Constrained as follows:
- k[0,INF]
- b[0,INF]
"""
super(ReciprocalRegression, self).__init__()
self.initial_guess = initial_guess
def _constraint(self):
return np.asarray((
[0, 0],
[np.inf, np.inf]
))
def _more_tags(self):
return {
"poor_score": True
}
def fun(self, guess):
k, b = guess
return self.y_ - k * self.x_ + b
def fit(self, X, y):
if y is None:
raise ValueError()
x, y = check_X_y(X, y, y_numeric=True, force_all_finite=True, dtype='float64')
self.n_features_in_ = x.shape[1]
self.x_ = np.reciprocal(np.average(x, axis=1))
self.y_ = y
solution = least_squares(
self.fun,
np.asarray(self.initial_guess).astype('float64'),
bounds=self._constraint(),
loss='linear',
max_nfev=100
)
self.coef_ = [solution.x[0]]
self.intercept_ = solution.x[1]
return self
def predict(self, X):
check_is_fitted(self, 'coef_')
x: np.ndarray = check_array(X, force_all_finite=True, dtype='float64')
if x.shape[1] != self.n_features_in_:
raise ValueError("Wrong number of features")
x = np.average(X, axis=1)
return self.coef_[0] * np.reciprocal(x) + self.intercept_
```
#### File: analysis/common/kernels.py
```python
import os
import spiceypy as spice
from vicarutil.image import VicarImage
from ..internal import log
META_KERNEL: str
KERNEL_BASE: str
def provide_kernels(path: str):
log.info(f"Received kernel base path: {path}")
if path.endswith('/'):
path = path[:-1]
global META_KERNEL
global KERNEL_BASE
META_KERNEL = f'{path}/mk/commons.tm'
KERNEL_BASE = f'{path}/mk/'
def load_kernels_for_image(image: VicarImage):
try:
spice.furnsh(META_KERNEL)
year = image.labels.property('IDENTIFICATION')['IMAGE_TIME'][0:4]
for f in os.listdir(KERNEL_BASE):
if year in f:
kernel = KERNEL_BASE + f
log.info("Loading kernel: " + f)
spice.furnsh(kernel)
except KeyError:
log.warning("Failed to find identification tag from image: %s", image.name)
def release_kernels():
try:
spice.kclear()
except Exception as e:
log.critical("Failed to unload kernel!", exc_info=e)
__all__ = ['provide_kernels', 'load_kernels_for_image', 'release_kernels']
```
#### File: analysis/fitting/second_degree.py
```python
from typing import Tuple
import numpy as np
from ..internal import log
from ...support import SMPipe, sci_4
def roots_2nd_deg(eq1: np.ndarray, eq2: np.ndarray):
"""
Equation coefficients from largest to smallest
Returns roots from largest to smallest
"""
return -np.sort(-np.roots(eq1 - eq2))
def error_estimate_for_y(bg: SMPipe, fg: SMPipe):
"""
Trying to combine the standard errors of estimates of teh two models
SQRT(SCALE) ~ STD Error of Prediction
"""
return np.sqrt(bg.base.result_.scale + fg.base.result_.scale)
def contrast_2nd_deg(eq1: np.ndarray, eq2: np.ndarray) -> Tuple[float, float]:
"""
Equation coefficients from largest to smallest
Returns distance if all roots real
"""
equation = eq1 - eq2
roots: np.ndarray = np.roots(equation)
try:
if np.alltrue(np.isreal(roots)):
x_val = -0.5 * equation[1] / equation[0]
d = equation[0] * np.power(x_val, 2) + equation[1] * x_val + equation[2]
return x_val, d,
except Exception as e:
log.exception("Exception in contrast", exc_info=e)
return np.NAN, np.NAN
def integrate_2nd_deg(eq1: np.ndarray, eq2: np.ndarray) -> float:
"""
Equation coefficients from largest to smallest
Returns Area between curves
"""
equation = eq1 - eq2
roots: np.ndarray = np.roots(equation)
if np.alltrue(np.isreal(roots)):
try:
vals = [
np.reciprocal(float(i))
* j
* (np.power(np.max(roots), i) - np.power(np.min(roots), i))
for i, j in enumerate(equation[::-1], start=1)
]
return np.sum(vals)
except Exception as e:
log.exception("Exception in integral", exc_info=e)
return np.NAN
def contrast_error_2nd_deg(bg: SMPipe, fg: SMPipe) -> float:
"""
Evaluates the maximum error for x, and contrast
"""
return error_estimate_for_y(bg, fg)
def integral_error_2nd_deg(bg: SMPipe, fg: SMPipe, contrast_error: float = None) -> float:
"""
Evaluates the maximum error for the integral
"""
roots = roots_2nd_deg(bg.eq, fg.eq)
if contrast_error is None:
return np.abs(roots[0] - roots[1]) * contrast_error_2nd_deg(bg, fg)
else:
return np.abs(roots[0] - roots[1]) * contrast_error
def additional_2nd_deg_info(bg: SMPipe, fg: SMPipe) -> Tuple[str, np.ndarray]:
eq1 = bg.eq
eq2 = fg.eq
roots = roots_2nd_deg(eq1, eq2)
out = " "
if np.alltrue(np.isreal(roots)):
x_max, contrast = contrast_2nd_deg(eq1, eq2)
integral = integrate_2nd_deg(eq1, eq2)
contrast_error = contrast_error_2nd_deg(bg, fg)
integral_error = np.abs(roots[0] - roots[1]) * contrast_error
newline = '\n'
log.info(
f"""
Values:
- BG EQ: {str(eq1).replace(newline, "")} ERR: {str(bg.errors).replace(newline, "")}
- FG EQ: {str(eq2).replace(newline, "")} ERR: {str(fg.errors).replace(newline, "")}
- Contrast: {contrast:.7e} ERR: {contrast_error:.7e}
- X Pos: {x_max:.7e}
- Integral: {integral:.7e} ERR: {integral_error:.7e}
"""
)
out += r" $\Delta_{max}=" f" {sci_4(contrast)}" r"\pm "
out += f"{sci_4(contrast_error)}" f", x={x_max:3.2f} $"
out += fr" $\int\Delta={sci_4(integral)} "
out += r"\pm" f"{sci_4(integral_error)}, x_0={roots[1]:3.2f}, x_1={roots[0]:3.2f}$"
return out, roots
__all__ = [
'roots_2nd_deg',
'integral_error_2nd_deg',
'contrast_error_2nd_deg',
'integrate_2nd_deg',
'contrast_2nd_deg',
'error_estimate_for_y',
'additional_2nd_deg_info'
]
```
#### File: vicarui/analysis/__init__.py
```python
from types import ModuleType
from typing import Optional, Dict, Tuple, Union, Callable, List
from .common import provide_kernels
from .fitting import DataPacket
from .reduction import br_reduction
from ..support import ImageWrapper
class _Holder(object):
mission: str = "cassini"
listeners: List[Callable[[str], None]] = list()
def select_mission(mission: str):
if mission is None:
mission = "empty"
mission = mission.strip()
_Holder.mission = mission
for listener in _Holder.listeners:
listener(mission)
def get_mission() -> str:
return _Holder.mission
def register_mission_listener(listener: Callable[[str], None]):
_Holder.listeners.append(listener)
def remove_mission_listener(listener: Callable[[str], None]):
_Holder.listeners.remove(listener)
def anal_module() -> Optional[ModuleType]:
import importlib
try:
mission = get_mission()
if '.' in mission:
__i = importlib.import_module(f"{mission}")
else:
__i = importlib.import_module(f".missions.{mission}", package=__package__)
return __i
except (ImportError, AttributeError) as e:
from .internal import log
log.exception("Exception in mission fetching", exc_info=e)
select_mission("empty")
return None
def get_config() -> Optional[Dict[str, Tuple[Union[str, float, int], Union[str, float, int]]]]:
"""
Returns a default config for the Analysis module
"""
m = anal_module()
try:
if m:
# noinspection PyUnresolvedReferences
return m.get_config()
except AttributeError:
pass
return None
def set_info(
image: ImageWrapper,
image_axis=None,
analysis_axis=None,
bg_axis=None,
**config
) -> str:
"""
Sets info for the subplot axes and returns a title
"""
m = anal_module()
try:
if m:
# noinspection PyUnresolvedReferences
return m.set_info(
image,
image_axis=image_axis,
analysis_axis=analysis_axis,
bg_axis=bg_axis,
**config
)
except AttributeError:
pass
return ""
def get_additional_functions() -> Optional[Dict[str, str]]:
"""
Returns clear and function names for additional functions provided by the analysis module
"""
m = anal_module()
try:
if m:
# noinspection PyUnresolvedReferences
return m.get_additional_functions()
except AttributeError:
pass
return None
__all__ = [
'set_info',
'get_additional_functions',
'get_config',
'get_mission',
'register_mission_listener',
'remove_mission_listener',
'anal_module',
'DataPacket',
'br_reduction',
'provide_kernels'
]
```
#### File: missions/cassini/set_info.py
```python
from .config import *
from .funcs import norm, target_estimate
from .helpers import ImageHelper
from ...common import load_kernels_for_image, release_kernels
from ....support import sci_2
def set_info(
image: ImageWrapper,
image_axis=None,
analysis_axis=None,
**config
):
raw = image.raw
try:
load_kernels_for_image(raw)
helper = ImageHelper(raw, **config)
config = helper.config
target, target_id = helper.target_full
utc = helper.time_utc
pa = helper.phase_angle * spice.dpr()
title = "%s FROM: %s - %s @ UTC %s \nPA=%.2f DEG" % (helper.id, CASSINI, target, utc, pa)
try:
filters: List[str] = helper['INSTRUMENT']['FILTER_NAME']
title += " Filters: " + ','.join(filters)
exposure: float = helper['INSTRUMENT']['EXPOSURE_DURATION']
title += f" Exp: {exposure / 1000:.2f}s"
number: str = helper.id
title += f" Image n: {number}"
h1 = helper.saturn_equator_offset(CASSINI_ID)
h2 = helper.saturn_equator_offset(target_id)
sun_to_rings, shadow_in_image, shadow_to_image = helper.shadow_angles
ang_xy = f'{sun_to_rings:.2f} deg'
ang_img = f'{shadow_in_image:.2f} deg'
ang_bore = f'{shadow_to_image:.2f} deg'
title += (
"\n"
fr"Target from Ring Plane: ${sci_2(h2):}\,km$ Cassini from Ring Plane: ${sci_2(h1)}\,km$"
"\n"
f"Shadow angle in Image: {ang_img}, to Image plane: {ang_bore}, to Ring: {ang_xy}"
)
except Exception as e:
log.warning("Failed to find some data", exc_info=e)
if image_axis is not None:
try:
# noinspection PyUnresolvedReferences
from matplotlib.axes import Axes
ax: Axes = image_axis
try:
from matplotlib.ticker import AutoMinorLocator
from ....support.misc import MPL_FONT_CONFIG
second_x = ax.secondary_xaxis(location=1.07, functions=helper.size_x_transforms)
second_y = ax.secondary_yaxis(location=1.07, functions=helper.size_y_transforms)
second_y.yaxis.set_minor_locator(AutoMinorLocator(10))
second_x.xaxis.set_minor_locator(AutoMinorLocator(10))
second_y.set_ylabel(
f"At {helper.size_name} intercept "
f"$(px = {sci_2(helper.size_per_px[0])},"
f" {sci_2(helper.size_per_px[1])})$ KM",
**MPL_FONT_CONFIG
)
def mod_ax(axes: Axes, vertical: bool = False, **_):
ax2 = axes.secondary_xaxis(
location=-0.22,
functions=helper.size_y_transforms if vertical else helper.size_x_transforms
)
ax2.xaxis.set_minor_locator(AutoMinorLocator(10))
analysis_axis.axes_modifier = mod_ax
except Exception as e:
log.exception("Something happened", exc_info=e)
if config[SUN_SATURN_VECTORS] or config[TARGET_ESTIMATE]:
sun_pos = helper.trpf(SUN_ID)
if helper.target_id == SATURN_ID:
saturn_pos = helper.crpf(SATURN_ID)
else:
saturn_pos = helper.trpf(SATURN_ID)
t_sun, t_saturn = (-norm(v)[0:2] for v in (sun_pos, saturn_pos))
if config[SUN_SATURN_VECTORS]:
x = 70
y = 70
sun = np.column_stack(
(
[x, y],
[
x + t_sun[0] * 60 / np.linalg.norm(t_sun),
y + t_sun[1] * 60 / np.linalg.norm(t_sun)
]
)
)
sat = np.column_stack(
(
[x, y],
[
x + t_saturn[0] * 60 / np.linalg.norm(t_saturn),
y + t_saturn[1] * 60 / np.linalg.norm(t_saturn)
]
)
)
ax.plot(*sun, label="Sun", color=SUN_COLOR, linewidth=1)
ax.plot(*sat, label="Saturn", color=SATURN_COLOR, linewidth=1)
if config[TARGET_ESTIMATE]:
x, y = target_estimate(image, helper)
log.debug(f"Estimate {x},{y}")
ax.scatter(x, y, s=16, c=TARGET_ALT_COLOR, alpha=0.65)
except ImportError as e:
log.exception("No matplotlib", exc_info=e)
except Exception as e:
log.exception("Something bad happened", exc_info=e)
return title
except Exception as e:
log.exception("Failed to load data: %s", raw.name, exc_info=e)
return "Failed to load data"
finally:
release_kernels()
__all__ = ['set_info']
```
#### File: support/concurrent/task.py
```python
from typing import Dict, NoReturn
from PySide2.QtCore import QThread
from .lock import Lock
_lock = Lock()
_tasks: Dict[int, QThread] = dict()
def _add_task(task: QThread) -> NoReturn:
task_id = id(task)
_tasks[task_id] = task
task.finished.connect(lambda: _lock.run_blocking(lambda: _tasks.pop(task_id)))
class Tasker:
@staticmethod
def run(task: QThread) -> NoReturn:
"""
Runs a task and holds a reference to it until done
"""
_lock.run_blocking(lambda: _add_task(task))
task.start(priority=QThread.HighPriority)
__all__ = ['Tasker']
```
#### File: support/pipeline/adapter_interface.py
```python
from abc import ABC, abstractmethod
from typing import Optional, Generic, TypeVar
import numpy as np
from sklearn.base import BaseEstimator, RegressorMixin
T = TypeVar('T')
class WrapperRegressor(BaseEstimator, RegressorMixin, ABC, Generic[T]):
@abstractmethod
def fit(self, X_, y, sample_weight: np.ndarray = None) -> T:
"""
Sample weight works with RANSAC
"""
pass
@abstractmethod
def predict(self, X_, y=None):
pass
@property
@abstractmethod
def errors_(self) -> np.ndarray:
"""
Same order as coef_
"""
pass
@property
@abstractmethod
def coef_(self) -> np.ndarray:
"""
Coefficients in the order that they are to be represented in the equation
"""
pass
@property
@abstractmethod
def intercept_(self) -> Optional[float]:
"""
Return intercept if fitted else none
"""
pass
@property
@abstractmethod
def std_(self) -> Optional[float]:
"""
Standard error prediction
"""
pass
__all__ = ['WrapperRegressor']
```
#### File: support/tex/tex.py
```python
import numpy as np
def e(s: str):
part = s.split('e')
try:
p1 = int(part[1])
if len(part) != 1:
return part[0] + r"\cdot 10^{" + f"{'+' if p1 >= 0 else ''}{str(p1)}" + "}"
else:
return part[0]
except IndexError:
return "nan"
def sci_n(val: float, precision: int, plus_sign: bool = False) -> str:
return (
(
'+'
if plus_sign and val > 0
else ''
) + e(("{0:." + str(precision) + "e}").format(val))
) if np.isfinite(val) else "nan"
def sci_2(val: float, plus_sign: bool = False) -> str:
return sci_n(val, 2, plus_sign)
def sci_4(val: float, plus_sign: bool = False) -> str:
return sci_n(val, 4, plus_sign)
def sci_5(val: float, plus_sign: bool = False) -> str:
return sci_n(val, 5, plus_sign)
```
#### File: vicarui/viewer/appwindow.py
```python
from PySide2 import QtWidgets as qt
from .widget import PlotWidget, FileListWidget
class AppWindow(qt.QWidget):
def __init__(self, *args, **kwargs):
super(AppWindow, self).__init__(*args, **kwargs)
plw = PlotWidget()
flw = FileListWidget()
plw.setSizePolicy(qt.QSizePolicy.Expanding, qt.QSizePolicy.Expanding)
flw.setSizePolicy(qt.QSizePolicy.MinimumExpanding, qt.QSizePolicy.Expanding)
from .helper import stack
flw.show_image.connect(plw.open_image)
flw.show_multiple.connect(lambda f: plw.show_image(stack(flw, f)))
layout = qt.QHBoxLayout()
layout.addWidget(flw)
layout.addWidget(plw, stretch=90)
self.setLayout(layout)
self.plw = plw
self.flw = flw
```
#### File: viewer/helper/figurewrapper.py
```python
from typing import Callable, Optional, Any, Union, Tuple, Dict
import numpy as np
from PySide2.QtCore import QThread
from astropy.visualization import ImageNormalize
from matplotlib.axes import Axes
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg
from matplotlib.figure import Figure
from .imageevent import VicarEvent
from ...analysis import set_info
from ...logging import log
from ...support import stop_progress, start_progress, signal, BRTask, ImageWrapper
class FigureWrapper(FigureCanvasQTAgg):
event_handler: Optional[VicarEvent] = None
image_shown = signal()
class Holder:
image: ImageWrapper
data: Axes = None
original: Axes = None
background: Axes = None
line: Axes = None
norm: ImageNormalize = None
click: Callable[[], Tuple[int, int]] = None
set_info: Callable = None
_holder: Holder = None
_task: QThread = None
def __init__(self, width=7.5, height=7.5, dpi=125):
self.fig = Figure(figsize=(width, height), dpi=dpi)
super(FigureWrapper, self).__init__(self.fig)
def clear(self, restore: bool = False):
if hasattr(self, '_data'):
if restore:
try:
self._limits = self._data.images[0].get_clim()
delattr(self, '_data')
except IndexError:
pass
else:
if hasattr(self, '_limits'):
delattr(self, '_limits')
delattr(self, '_data')
if self.event_handler is not None:
self.event_handler.detach()
self.event_handler = None
self.fig.clf(keep_observers=True)
def _data_limits(self, ax: Axes):
if hasattr(self, '_limits'):
try:
ax.images[0].set_clim(self._limits)
delattr(self, '_limits')
except IndexError:
pass
def _show_image_2(self):
og = self._holder.original
bg = self._holder.background
norm = self._holder.norm
line = self._holder.line
data = self._holder.data
image = self._holder.image
self.event_handler = VicarEvent(image.processed, data, line, self._holder.click)
self._holder.set_info()
reduced = image.normalize(image.processed)
normalizer = norm(reduced)
data.imshow(reduced, norm=normalizer, cmap="gray", aspect="equal", interpolation='none', origin='upper')
og.imshow(image.original, cmap="gray", interpolation='none', origin='upper')
bg.imshow(image.background, cmap="coolwarm", interpolation='none', origin='upper')
bg.imshow(image.outliers, cmap='binary_r', interpolation="none", origin="upper", alpha=0.3)
bg.set_title(bg.get_title() + f" mse: {image.mse:.5e}")
self._data_limits(data)
data.minorticks_on()
self.figure.set_tight_layout('true')
self.draw()
self.flush_events()
self._holder = None
self._task = None
self.image_shown.emit()
stop_progress()
def _show_image_1(self, image: ImageWrapper, **kwargs):
self._data: Axes = self.fig.add_subplot(3, 3, (2, 6), label='Image Display')
data = self._data
og = self.fig.add_subplot(331, label="Original Image")
bg = self.fig.add_subplot(334, label="Background")
line = self.fig.add_subplot(3, 3, (7, 9), label="Line")
self._holder.data = data
self._holder.original = og
self._holder.background = bg
self._holder.line = line
data.set_title("image")
og.set_title("original")
bg.set_title("background")
line.set_title("line")
def __iset():
try:
self.fig.suptitle(
set_info(
image,
image_axis=data,
analysis_axis=line,
background=bg,
**kwargs
),
fontsize='small',
fontfamily='monospace'
)
except Exception as e:
log.exception("Failed to set info", exc_info=e)
self._holder.set_info = __iset
def show_image(
self,
image: ImageWrapper,
norm: Callable[[np.ndarray], Union[ImageNormalize, None]],
br_pack: Dict[str, Any],
click_area: Callable[[], Tuple[int, int]],
restore: bool = False,
**kwargs
):
start_progress()
self.clear(restore)
self._holder = self.Holder()
self._holder.image = image
self._holder.norm = norm
self._holder.click = click_area
self._show_image_1(image, **kwargs)
self._task = BRTask(image, br_pack)
self._task.done.connect(self._show_image_2)
self._task.start()
def click(self, pkg: Tuple[float, float, bool]):
x, y, right = pkg
try:
handler = self.event_handler
from matplotlib.backend_bases import MouseButton
handler.data_axis_handler(x, y, MouseButton.RIGHT if right else MouseButton.LEFT)
except AttributeError:
pass
__all__ = ['FigureWrapper']
```
#### File: viewer/helper/imageevent.py
```python
from typing import Tuple, Callable
import numpy as np
from matplotlib.axes import Axes
from matplotlib.backend_bases import MouseEvent, MouseButton
from matplotlib.ticker import FuncFormatter, AutoMinorLocator
from ...analysis import DataPacket
from ...logging import log
from ...support import sci_4, sci_n
class VicarEvent:
line_has_data: bool = False
fit_x_start: int = -1
fit_x_end: int = -1
fit_degree: int = 2
dpkt: DataPacket
def __init__(self, data: np.ndarray, data_axis: Axes, line_axis: Axes, area: Callable[[], Tuple[int, int]]):
self.data = data
self.area = area
from ...support import wrap_axes
self.data_axis = wrap_axes(data_axis)
self.line_axis = wrap_axes(line_axis)
self.cid = data_axis.figure.canvas.mpl_connect('button_press_event', self)
self.dpkt = DataPacket(data)
self.outliers = list()
self.rect = None
self.redraw = False
def data_axis_handler(self, x: float, y: float, btn: MouseButton):
self.clear_line_soft()
if self.rect:
self.rect.remove()
self.rect = None
if btn in {MouseButton.LEFT, MouseButton.RIGHT}:
width = self.area()[0]
window = self.area()[1]
row = int(y)
col = int(x)
log.debug(f"Click detected at {row},{col}")
self.dpkt.configure(width, window, 2)
vertical = btn == MouseButton.RIGHT
if self.line_axis.axes_modifier is not None:
self.line_axis.axes_modifier(self.line_axis, vertical=vertical)
r = self.dpkt.select(
col,
row,
vertical=vertical,
lw=1,
fill=False,
)
if btn == MouseButton.LEFT:
r.set_color('b')
self.line_axis.set_left(f"HORIZONTAL slice with HEIGHT: {row - width} <= y <= {row + width}")
self.dpkt.scatter(self.line_axis, s=8, c='b')
else:
r.set_color('r')
self.line_axis.set_left(f"VERTICAL slice with WIDTH: {col - width} <= x <= {col + width}")
self.dpkt.scatter(self.line_axis, s=8, c='r')
self.rect = self.data_axis.add_patch(r)
self.line_has_data = True
else:
self.line_has_data = False
if self.redraw:
self.line_axis.refresh()
self.redraw = False
def _fit(self):
bg, fg = self.dpkt.fit(
self.fit_x_start,
self.fit_x_end,
in_kwargs={'color': 'black', 'linewidth': 3},
out_kwargs={'color': 'gray', 'linewidth': 3}
)
self.line_axis.append_left(
fg.title
+ fr' mse: ${sci_4(fg.mse)}$'
+ fg.additional
+ '\n'
+ bg.title
+ fr' mse: ${sci_4(bg.mse)}$'
+ bg.additional
)
self.line_axis.add_line(fg.line)
self.line_axis.add_line(bg.line)
if bg.outliers is not None:
self.outliers.append(
self.line_axis.scatter(
*bg.outliers,
c='white',
s=4,
marker='.'
)
)
if fg.outliers is not None:
self.outliers.append(
self.line_axis.scatter(
*fg.outliers,
c='white',
s=4,
marker='.'
)
)
def __call__(self, event: MouseEvent):
if event.canvas.cursor().shape() != 0:
return
if event.inaxes == self.line_axis and self.line_has_data:
self.redraw = True
for outlier in self.outliers:
outlier.remove()
self.outliers.clear()
if event.button == MouseButton.LEFT and self.fit_x_start == -1 and self.fit_x_end == -1:
self.fit_x_start = event.xdata
self.line_axis.set_left(self.line_axis.get_first_left())
self.line_axis.append_left("set end ")
elif event.button == MouseButton.LEFT and self.fit_x_end == -1:
self.fit_x_end = event.xdata
if self.fit_x_start == self.fit_x_end:
return
elif self.fit_x_end < self.fit_x_start:
temp = self.fit_x_end
self.fit_x_end = self.fit_x_start
self.fit_x_start = temp
self.line_axis.set_left(self.line_axis.get_first_left())
self._fit()
else:
if event.button == MouseButton.LEFT:
self.fit_x_start = event.xdata
self.line_axis.set_left(self.line_axis.get_first_left())
self.line_axis.append_left("set end ")
else:
self.fit_x_start = -1
self.line_axis.set_left(self.line_axis.get_first_left())
self.line_axis.append_left("set start ")
self.fit_x_end = -1
self.line_axis.clear_lines()
elif event.inaxes == self.data_axis:
self.data_axis_handler(event.xdata, event.ydata, event.button)
if self.redraw:
self.line_axis.refresh()
self.redraw = False
def clear_line_soft(self):
self.redraw = True
self.line_axis.clear()
self.line_axis.grid(alpha=0.4)
self.line_axis.grid(which='minor', linestyle='--', alpha=0.2)
ax2 = self.line_axis.secondary_yaxis(location="right")
ax2.set_yticklabels([])
ax2.yaxis.set_minor_locator(AutoMinorLocator(4))
self.line_axis.minorticks_on()
self.line_axis.yaxis.set_major_formatter(FuncFormatter(lambda x, _: f"${sci_n(x, 1)}$"))
def clear_line(self):
self.clear_line_soft()
self.line_has_data = False
self.fit_x_start = -1
self.fit_x_end = -1
self.rect = None
self.outliers.clear()
def detach(self):
try:
self.data_axis.figure.canvas.mpl_disconnect(self.cid)
except Exception as e:
log.exception("Exception in detach", exc_info=e)
```
#### File: viewer/model/model.py
```python
from pathlib import Path
from typing import Dict, List, Optional, Callable
from ...logging import info
from ...support import invoke_safe, FileType
# Not used anymore
class FileModel:
file_store: Dict[FileType, List[Path]]
callback_store: Dict[FileType, Callable[[Path], None]]
clear_callback: Optional[Callable[[], None]] = None
def __init__(self, *args, **kwargs):
super(FileModel, self).__init__(*args, **kwargs)
self.file_store = dict()
self.callback_store = dict()
def __len__(self):
return len(self.file_store)
@invoke_safe
def __getitem__(self, item: FileType) -> Optional[List[Path]]:
return self.file_store[item]
@invoke_safe
def set_clear_callback(self, c: Callable[[None], None]):
self.clear_callback = c
@invoke_safe
def set_callback(self, for_type: FileType, c: Callable[[Path], None]):
self.callback_store[for_type] = c
@invoke_safe
def accept_files(self, files: Dict[FileType, List[Path]]):
amount = sum([len(files[k]) for k in files])
info("Accepting files, count %d", amount)
if amount != 0 and self.clear_callback is not None:
self.clear_callback()
for k in files:
self.file_store[k] = files[k]
if k in self.callback_store:
for f in files[k]:
self.callback_store[k](f)
```
#### File: viewer/widget/adjustment.py
```python
from typing import Callable, Any, Union, Tuple, Dict
import numpy as np
from PySide2 import QtWidgets as qt
from PySide2.QtGui import QIntValidator
from astropy.visualization import ImageNormalize, ZScaleInterval, HistEqStretch
from ..helper import NW, CL
from ...support import Busy, typedsignal
from ...logging import log
class AdjustmentWidget(qt.QWidget):
click = typedsignal(tuple)
"""
Tuple[float, float, bool] click event
"""
def __init__(self, *args, **kwargs):
super(AdjustmentWidget, self).__init__(*args, **kwargs)
self.setFixedHeight(35)
br_toggle = qt.QCheckBox(text="Background Reduction")
br_toggle.setChecked(False)
self.br_toggle = br_toggle
normal_toggle = qt.QCheckBox(text="Normalization")
normal_toggle.setChecked(False)
self.normal_toggle = normal_toggle
degree_label = qt.QLabel(text="Degree")
degree = qt.QComboBox()
for i in range(1, 6):
degree.addItem(str(i))
degree.setCurrentIndex(1)
self.degree_label = degree_label
self.degree = degree
border_label = qt.QLabel(text="Border")
border_value = qt.QLineEdit()
border_value.setText(str(2))
border_value.setFixedWidth(40)
validator = QIntValidator()
validator.setBottom(0)
border_value.setValidator(validator)
self.border_label = border_label
self.border_value = border_value
img_proc_toggle = qt.QCheckBox(text="Post-Processing")
img_proc_toggle.setChecked(False)
self.img_proc_toggle = img_proc_toggle
layout = qt.QHBoxLayout()
layout.setSpacing(10)
layout.addWidget(br_toggle, alignment=NW)
layout.addWidget(degree, alignment=NW)
layout.addWidget(degree_label, alignment=CL)
layout.addSpacerItem(qt.QSpacerItem(10, 5, hData=qt.QSizePolicy.Minimum, vData=qt.QSizePolicy.Minimum))
layout.addWidget(normal_toggle, alignment=NW)
layout.addSpacerItem(qt.QSpacerItem(10, 5, hData=qt.QSizePolicy.Minimum, vData=qt.QSizePolicy.Minimum))
layout.addWidget(border_value, alignment=NW)
layout.addWidget(border_label, alignment=CL)
layout.addSpacerItem(qt.QSpacerItem(10, 5, hData=qt.QSizePolicy.Minimum, vData=qt.QSizePolicy.Minimum))
layout.addWidget(img_proc_toggle, alignment=NW)
layout.addStretch()
click_width = qt.QLineEdit()
validator = QIntValidator()
validator.setBottom(1)
click_width.setValidator(validator)
click_width.setFixedWidth(30)
click_window = qt.QLineEdit()
validator = QIntValidator()
validator.setBottom(1)
click_window.setValidator(validator)
click_window.setFixedWidth(30)
self.click_width = click_width
self.click_window = click_window
self.click_label = qt.QLabel(text="Click area (width, window)")
layout.addWidget(self.click_label)
layout.addWidget(click_width, alignment=NW)
layout.addWidget(click_window, alignment=NW)
simulate_click1 = qt.QPushButton("L")
simulate_click2 = qt.QPushButton("R")
for btn in [simulate_click1, simulate_click2]:
btn.setFixedWidth(15)
simulate_click_x = qt.QLineEdit()
simulate_click_y = qt.QLineEdit()
simulate_click_x.setFixedWidth(60)
simulate_click_y.setFixedWidth(60)
simulate_click_x.setValidator(QIntValidator())
simulate_click_x.setValidator(QIntValidator())
simulate_click_x.setPlaceholderText("Click X")
simulate_click_y.setPlaceholderText("Click Y")
layout.addWidget(simulate_click_x)
layout.addWidget(simulate_click_y)
layout.addWidget(simulate_click1)
layout.addWidget(simulate_click2)
def __click(right: bool = False):
try:
x = float(simulate_click_x.text())
y = float(simulate_click_y.text())
self.click.emit((x, y, right))
except ValueError:
pass
except Exception as e:
log.exception("Failed clock event", exc_info=e)
simulate_click1.clicked.connect(__click)
from functools import partial
simulate_click2.clicked.connect(partial(__click, right=True))
self.click_buttons = (simulate_click1, simulate_click2)
self.click_x = simulate_click_x
self.click_y = simulate_click_y
click_width.setText(str(1))
click_window.setText(str(100))
reload_btn = qt.QPushButton(text="Reload")
self.reload_btn = reload_btn
Busy.listen(self, lambda busy: self.reload_btn.setEnabled(not busy))
layout.addWidget(reload_btn, alignment=NW)
self.setLayout(layout)
def get_br_package(self) -> Dict[str, Any]:
return {
'normalize': self.normal_toggle.isChecked(),
'reduce': self.br_toggle.isChecked(),
'degree': self.degree.currentIndex() + 1,
'border': int(self.border_value.text()) if self.border_value.text().strip() != '' else 0
}
def get_image_normalize(self) -> Callable[[np.ndarray], Union[ImageNormalize, None]]:
if self.img_proc_toggle.isChecked():
return lambda image: ImageNormalize(interval=ZScaleInterval(), stretch=HistEqStretch(image))
else:
return lambda image: None
def get_click_area(self) -> Tuple[int, int]:
w = self.click_width.text().strip()
wind = self.click_window.text().strip()
return int(w) if w != '' else 0, int(wind) if wind != '' else 0
``` |
{
"source": "jonix6/fbdparser",
"score": 2
} |
#### File: FBDParser/charmaps/__init__.py
```python
from .hanzi748 import hanzi748
from .symbol748 import symbol748
from .symbols import symbolsA, symbolsB
from .variant748 import variants748
from ..exceptions import FBDEncodingError
class CMap(dict):
def __str__(self):
return 'character map contains {0} codes'.format(len(self))
FZ748Map = CMap()
FZ748Map.update(hanzi748)
FZ748Map.update(symbol748)
unicodeMap = CMap()
unicodeMap.update(symbolsA)
unicodeMap.update(symbolsB)
class CMap748:
def __init__(self, complements=None):
self.complements = complements or {}
def _assert(func):
def wrapper(obj, x):
try:
assert 0x8080 <= x <= 0xFEFF
a, b = x >> 8, x & 0xFF
if 0x80 <= a <= 0xA0:
assert b >= 0x80
elif a >= 0xA4:
assert b >= 0x21
except AssertionError:
raise FBDEncodingError('invaild 748 encoding: 0x{:x}'.format(x))
return func(obj, x)
return wrapper
@_assert
def register(self, code, char):
self.complements[code] = char
@_assert
def normalize(self, x):
return FZ748Map.get(x, x)
@_assert
def decode(self, x, translate=True):
if x in self.complements:
return self.complements[x]
x = FZ748Map.get(x, x)
char = bytearray([x >> 8, x & 0xFF]).decode('gb18030')
if translate:
char = char.translate(unicodeMap)
return char
```
#### File: FBDParser/charmaps/symbols.py
```python
def gb2unicode_simple(x):
a, b = (x & 0xFF00) >> 8, x & 0x00FF
if 0xAA <= a <= 0xAF and 0xA1 <= b <= 0xFE:
return 0xE000 + (a - 0xAA) * 0x5E + b - 0xA1
elif 0xA1 <= a <= 0xA7 and (0x40 <= b <= 0x7E or 0x80 <= b <= 0xA0):
return 0xE4C6 + (a - 0xA1) * 0x60 + (0x3F + b - 0x80 if b >= 0x80 else b - 0x40)
return ord(bytearray([a, b]).decode('gb18030'))
def _unichr(x):
if x <= 0xFFFF:
return x
# U+10000 ~ U+10FFFF
return bytearray([
0xF0 | (x >> 18 & 0x7), 0x80 | (x >> 12 & 0x3F),
0x80 | (x >> 6 & 0x3F), 0x80 | (x & 0x3F)]).decode('utf-8')
class UnicodeMap(dict):
def __str__(self):
return 'unicode map contains {0} symbols'.format(len(self))
def update(self, hashmap):
for a, b in filter(lambda x: x[0] != x[1], hashmap.items()):
if a != b:
self[gb2unicode_simple(a)] = _unichr(b)
"A库符号"
symbolsA = UnicodeMap()
_update = symbolsA.update
# Area A1
_update({
0xA140: 0xA140, # 带括弧的小写罗马数字1((ⅰ))
0xA141: 0xA141, # 带括弧的小写罗马数字2((ⅱ))
0xA142: 0xA142, # 带括弧的小写罗马数字3((ⅲ))
0xA143: 0xA143, # 带括弧的小写罗马数字4((ⅳ))
0xA144: 0xA144, # 带括弧的小写罗马数字5((ⅴ))
0xA145: 0xA145, # 带括弧的小写罗马数字6((ⅵ))
0xA146: 0xA146, # 带括弧的小写罗马数字7((ⅶ))
0xA147: 0xA147, # 带括弧的小写罗马数字8((ⅷ))
0xA148: 0xA148, # 带括弧的小写罗马数字9((ⅸ))
0xA149: 0xA149, # 带括弧的小写罗马数字10((ⅹ))
0xA14A: 0xA14A, # 带括弧的小写罗马数字11((ⅺ))
0xA14B: 0xA14B, # 带括弧的小写罗马数字12((ⅻ))
0xA14C: 0x003D, # 三分宽等号 = =
0xA14D: 0x2212, # 三分宽减号 = −
0xA14E: 0x2215, # 三分宽斜线(除号) = ∕
0xA14F: 0x1D7CE, # 𝟎
0xA150: 0x1D7CF, # 𝟏
0xA151: 0x1D7D0, # 𝟐
0xA152: 0x1D7D1, # 𝟑
0xA153: 0x1D7D2, # 𝟒
0xA154: 0x1D7D3, # 𝟓
0xA155: 0x1D7D4, # 𝟔
0xA156: 0x1D7D5, # 𝟕
0xA157: 0x1D7D6, # 𝟖
0xA158: 0x1D7D7, # 𝟗
0xA159: 0x2664, # ♤
0xA15A: 0x2667, # ♧
0xA15B: 0x00B6, # ¶
0xA15C: 0x26BE, # ⚾
0xA15D: 0x263E, # 上1/4月亮 = ☾
0xA15E: 0x263D, # 下1/4月亮 = ☽
0xA15F: 0x263A, # 笑脸 = ☺
0xA160: 0x1F31C, # 半脸 = 🌜
0xA161: 0x1F31B, # 半脸 = 🌛
0xA162: 0x3036, # 〶
0xA163: 0x2252, # 近似符等号 = ≒
0xA164: 0xA164, # 吨号(T + S)
0xA165: 0x002B, # 三分宽加号 = +
0xA166: 0x223C, # 三分宽减号 = ∼
0xA167: 0x00A9, # ©
0xA168: 0x24D2, # ⓒ
0xA169: 0x24B8, # Ⓒ
0xA16A: 0x00AE, # ®
0xA16B: 0x24C7, # Ⓡ
0xA16D: 0x203E, # 上横线 = ‾
0xA16E: 0x005F, # 下横线 = _
0xA16F: 0x25E2, # ◢
0xA170: 0x25E3, # ◣
0xA171: 0x25E5, # ◥
0xA172: 0x25E4, # ◤
0xA173: 0x256D, # ╭
0xA174: 0x256E, # ╮
0xA175: 0x2570, # ╰
0xA176: 0x256F, # ╯
0xA177: 0x2550, # 双横线 = ═
0xA178: 0x2551, # 双竖线 = ║
0xA179: 0x2223, # 分开、绝对值 = ∣
0xA17A: 0x2926, # ⤦
0xA17B: 0x2924, # ⤤
0xA17C: 0x2923, # ⤣
0xA17D: 0x293E, # ⤾
0xA17E: 0x293F, # ⤿
0xA180: 0x21E7, # ⇧
0xA181: 0x21E9, # ⇩
0xA182: 0xA182, # 数字阳框码0(□ + 0)
0xA183: 0xA183, # 数字阳框码1(□ + 1)
0xA184: 0xA184, # 数字阳框码2(□ + 2)
0xA185: 0xA185, # 数字阳框码3(□ + 3)
0xA186: 0xA186, # 数字阳框码4(□ + 4)
0xA187: 0xA187, # 数字阳框码5(□ + 5)
0xA188: 0xA188, # 数字阳框码6(□ + 6)
0xA189: 0xA189, # 数字阳框码7(□ + 7)
0xA18A: 0xA18A, # 数字阳框码8(□ + 8)
0xA18B: 0xA18B, # 数字阳框码9(□ + 9)
0xA18C: 0xA18C, # 数字阴框码0(0️⃣)
0xA18D: 0xA18D, # 数字阴框码1(1️⃣)
0xA18E: 0xA18E, # 数字阴框码2(2️⃣)
0xA18F: 0xA18F, # 数字阴框码3(3️⃣)
0xA190: 0xA190, # 数字阴框码4(4️⃣)
0xA191: 0xA191, # 数字阴框码5(5️⃣)
0xA192: 0xA192, # 数字阴框码6(6️⃣)
0xA193: 0xA193, # 数字阴框码7(7️⃣)
0xA194: 0xA194, # 数字阴框码8(8️⃣)
0xA195: 0xA195, # 数字阴框码9(9️⃣)
0xA196: 0x1F6AD, # 🚭
0xA197: 0x1F377, # 🍷
0xA198: 0x26A0, # ⚠
0xA199: 0x2620, # ☠
0xA19A: 0xA19A, # (🚫 + 🔥)
0xA19B: 0x2B4D, # ⭍
0xA19C: 0x21B7, # ↷
0xA19D: 0x293A, # ⤺
0xA19E: 0x2716, # ✖
0xA19F: 0x003F, # 问号 = ?
0xA1A0: 0x0021 # 外文感叹号 = !
})
# Area A2
_update({
0xA240: 0x231C, # ⌜
0xA241: 0x231F, # ⌟
0xA242: 0xA242, # (empty ⌜)
0xA243: 0xA243, # (empty ⌟)
0xA244: 0x231D, # ⌝
0xA245: 0x231E, # ⌞
0xA246: 0xA246, # (empty ⌝)
0xA247: 0xA247, # (empty ⌞)
0xA248: 0xFF1C, # <
0xA249: 0xFF1E, # >
0xA24A: 0x2AA1, # ⪡
0xA24B: 0x2AA2, # ⪢
0xA24C: 0xA24C, # (vertical ”)
0xA24D: 0xA24D, # (vertical “)
0xA24E: 0x201E, # „
0xA24F: 0xA24F, # 斜感叹号(italic !)
0xA250: 0xA250, # 斜问号(italic ?)
0xA251: 0xA76C, # ❬
0xA252: 0xA76D, # ❭
0xA253: 0xA253, # (reversed 「)
0xA254: 0xA254, # (reversed 」)
0xA255: 0xA255, # (reversed 『)
0xA256: 0xA256, # (reversed 』)
0xA257: 0x203C, # 双叹号 = ‼
0xA258: 0xA258, # 斜双叹号(italic ‼)
0xA259: 0x2047, # 双问号 = ⁇
0xA25A: 0xA25A, # 斜双问号(italic ⁇)
0xA25B: 0x2048, # 疑问感叹号 = ⁈
0xA25C: 0xA25C, # 斜疑问感叹号(italic ⁈)
0xA25D: 0x2049, # 感叹疑问号 = ⁉
0xA25E: 0xA25E, # 斜感叹疑问号(italic ⁉)
0xA25F: 0xA25F, # 竖排小数点(vertical .)
0xA260: 0x03D6, # 希腊文符号PI = ϖ
0xA261: 0x2116, # №
0xA262: 0x0142, # 多国外文:带笔画的小写字母l = ł
0xA263: 0x0131, # 多国外文:无点的小写字母I = ı
0xA264: 0x014B, # 多国外文:小写字母eng = ŋ
0xA265: 0x0327, # 下加符 = ̧
0xA266: 0x00BF, # 倒置问号 = ¿
0xA267: 0x00A1, # 倒置感叹号 = ¡
0xA268: 0x00D8, # 多国外文:带笔画的大写字母O = Ø
0xA269: 0x00F8, # 多国外文:带笔画的小写字母o = ø
0xA26A: 0x0087, # 二重剑标 = ‡
0xA26B: 0x0086, # 短剑标 = †
0xA26C: 0x014A, # 多国外文:大写字母ENG = Ŋ
0xA26D: 0xFB00, # 多国外文 = ff
0xA26E: 0xFB01, # 多国外文 = fi
0xA26F: 0xFB02, # 多国外文 = fl
0xA270: 0xFB03, # 多国外文 = ffi
0xA271: 0xFB04, # 多国外文 = ffl
0xA272: 0x0141, # 多国外文 = Ł
0xA273: 0x00C7, # 多国外文 = Ç
0xA274: 0x00C6, # 多国外文 = Æ
0xA275: 0x00E6, # 多国外文 = æ
0xA276: 0x008C, # 多国外文 = Œ
0xA277: 0x009C, # 多国外文 = œ
0xA278: 0x00DF, # 多国外文 = ß
0xA279: 0x0083, # 多国外文 = ƒ
0xA27A: 0x00E5, # 多国外文 = å
0xA27B: 0x00E2, # 多国外文 = â
0xA27C: 0x00E4, # 多国外文 = ä
0xA27D: 0x0101, # 多国外文 = ā
0xA27E: 0x00E1, # 多国外文 = á
0xA280: 0x01CE, # 多国外文 = ǎ
0xA281: 0x00E0, # 多国外文 = à
0xA282: 0x00E3, # 多国外文 = ã
0xA283: 0x00EB, # 多国外文 = ë
0xA284: 0x1EBD, # 多国外文 = ẽ
0xA285: 0x00EE, # 多国外文 = î
0xA286: 0x00EF, # 多国外文 = ï
0xA287: 0x00F5, # 多国外文 = õ
0xA288: 0x00F4, # 多国外文 = ô
0xA289: 0x00F6, # 多国外文 = ö
0xA28A: 0x00FB, # 多国外文 = û
0xA28B: 0x00F1, # 多国外文 = ñ
0xA28C: 0x009A, # 多国外文 = š
0xA28D: 0x015D, # 多国外文 = ŝ
0xA28E: 0x011D, # 多国外文 = ĝ
0xA28F: 0x00FF, # 多国外文 = ÿ
0xA290: 0x009E, # 多国外文 = ž
0xA291: 0x1E91, # 多国外文 = ẑ
0xA292: 0x0109, # 多国外文 = ĉ
0xA293: 0x00E7, # 多国外文 = ç
0xA294: 0xA294, # 多国外文(ê̄)
0xA295: 0x1EBF, # 多国外文 = ế
0xA296: 0xA296, # 多国外文(ê̌)
0xA297: 0x1EC1, # 多国外文 = ề
0xA29A: 0x0307, # 组合用发音符 = ̇
0xA29B: 0x030A, # 组合用发音符 = ̊
0xA29C: 0x0303, # 组合用发音符 = ̃
0xA29D: 0x20F0, # 组合用发音符 = ⃰
0xA29E: 0x0306, # 组合用发音符 = ̆
0xA29F: 0x002C, # 外文逗号 = ,
0xA2A0: 0x0085, # 外文三点省略号,外文三连点 = …
0xA2AB: 0x217A, # 小写罗马数字11 = ⅺ
0xA2AC: 0x217B, # 小写罗马数字12 = ⅻ
0xA2AD: 0xA2AD, # 小写罗马数字13(ⅹⅲ)
0xA2AE: 0xA2AE, # 小写罗马数字14(ⅹⅳ)
0xA2AF: 0xA2AF, # 小写罗马数字15(ⅹⅴ)
0xA2B0: 0xA2B0, # 小写罗马数字16(ⅹⅵ)
0xA2EF: 0xA2EF, # 大写罗马数字15(ⅩⅤ)
0xA2F0: 0xA2F0, # 大写罗马数字16(ⅩⅥ)
0xA2FD: 0xA2FD, # 大写罗马数字13(ⅩⅢ)
0xA2FE: 0xA2FE, # 大写罗马数字14(ⅩⅣ)
})
# Area A3
_update({
0xA340: 0xA340, # 带括号的大写罗马数字1((Ⅰ))
0xA341: 0xA341, # 带括号的大写罗马数字2((Ⅱ))
0xA342: 0xA342, # 带括号的大写罗马数字3((Ⅲ))
0xA343: 0xA343, # 带括号的大写罗马数字4((Ⅳ))
0xA344: 0xA344, # 带括号的大写罗马数字5((Ⅴ))
0xA345: 0xA345, # 带括号的大写罗马数字6((Ⅵ))
0xA346: 0xA346, # 带括号的大写罗马数字7((Ⅶ))
0xA347: 0xA347, # 带括号的大写罗马数字8((Ⅷ))
0xA348: 0xA348, # 带括号的大写罗马数字9((Ⅸ))
0xA349: 0xA349, # 带括号的大写罗马数字10((Ⅹ))
0xA34A: 0xA34A, # 带括号的大写罗马数字11((Ⅺ))
0xA34B: 0xA34B, # 带括号的大写罗马数字12((Ⅻ))
0xA34C: 0x24FF, # 数字阴圈码0 = ⓿
0xA34D: 0x2776, # 数字阴圈码1 = ❶
0xA34E: 0x2777, # 数字阴圈码2 = ❷
0xA34F: 0x2778, # 数字阴圈码3 = ❸
0xA350: 0x2779, # 数字阴圈码4 = ❹
0xA351: 0x277A, # 数字阴圈码5 = ❺
0xA352: 0x277B, # 数字阴圈码6 = ❻
0xA353: 0x277C, # 数字阴圈码7 = ❼
0xA354: 0x277D, # 数字阴圈码8 = ❽
0xA355: 0x277E, # 数字阴圈码9 = ❾
0xA356: 0x24B6, # 字母阳圈码A = Ⓐ
0xA357: 0x24B7, # 字母阳圈码B = Ⓑ
0xA358: 0x24B8, # 字母阳圈码C = Ⓒ
0xA359: 0x24B9, # 字母阳圈码D = Ⓓ
0xA35A: 0x24BA, # 字母阳圈码E = Ⓔ
0xA35B: 0x24BB, # 字母阳圈码F = Ⓕ
0xA35C: 0x24BC, # 字母阳圈码G = Ⓖ
0xA35D: 0x24BD, # 字母阳圈码H = Ⓗ
0xA35E: 0x24BE, # 字母阳圈码I = Ⓘ
0xA35F: 0x24BF, # 字母阳圈码J = Ⓙ
0xA360: 0x1F110, # 圆括号码A = 🄐
0xA361: 0x1F111, # 圆括号码B = 🄑
0xA362: 0x1F112, # 圆括号码C = 🄒
0xA363: 0x1F113, # 圆括号码D = 🄓
0xA364: 0x1F114, # 圆括号码E = 🄔
0xA365: 0x1F115, # 圆括号码F = 🄕
0xA366: 0x1F116, # 圆括号码G = 🄖
0xA367: 0x1F117, # 圆括号码H = 🄗
0xA368: 0x1F118, # 圆括号码I = 🄘
0xA369: 0x1F119, # 圆括号码J = 🄙
0xA36A: 0x24D0, # 阳圈码a = ⓐ
0xA36B: 0x24D1, # 阳圈码b = ⓑ
0xA36C: 0x24D2, # 阳圈码c = ⓒ
0xA36D: 0x24D3, # 阳圈码d = ⓓ
0xA36E: 0x24D4, # 阳圈码e = ⓔ
0xA36F: 0x24D5, # 阳圈码f = ⓕ
0xA370: 0x24D6, # 阳圈码g = ⓖ
0xA371: 0x24D7, # 阳圈码h = ⓗ
0xA372: 0x24D8, # 阳圈码i = ⓘ
0xA373: 0x24D9, # 阳圈码j = ⓙ
0xA374: 0x249C, # 圆括号码a = ⒜
0xA375: 0x249D, # 圆括号码b = ⒝
0xA376: 0x249E, # 圆括号码c = ⒞
0xA377: 0x249F, # 圆括号码d = ⒟
0xA378: 0x24A0, # 圆括号码e = ⒠
0xA379: 0x24A1, # 圆括号码f = ⒡
0xA37A: 0x24A2, # 圆括号码g = ⒢
0xA37B: 0x24A3, # 圆括号码h = ⒣
0xA37C: 0x24A4, # 圆括号码i = ⒤
0xA37D: 0x24A5, # 圆括号码j = ⒥
0xA37E: 0x3396, # 单位符号:毫升 = ㎖
0xA380: 0x3397, # ㎗
0xA381: 0x33CB, # 单位符号:百帕 = ㏋
0xA382: 0x3398, # 单位符号:立升 = ㎘
0xA383: 0x33A0, # 单位符号:平方厘米 = ㎠
0xA384: 0x33A4, # 单位符号:立方厘米 = ㎤
0xA385: 0x33A5, # 单位符号:立方米 = ㎥
0xA386: 0x33A2, # 单位符号:平方公里 = ㎢
0xA387: 0x33BE, # 单位符号:千瓦 = ㎾
0xA388: 0x33C4, # ㏄
0xA389: 0x3383, # 单位符号:毫安 = ㎃
0xA38A: 0x33C2, # ㏂
0xA38B: 0x33D8, # ㏘
0xA38C: 0x33CD, # ㏍
0xA38D: 0x33D7, # ㏗
0xA38E: 0x33DA, # ㏚
0xA38F: 0x339C, # ㎜
0xA390: 0x339D, # ㎝
0xA391: 0x339E, # ㎞
0xA392: 0x33CE, # 单位符号:公里 = ㏎
0xA393: 0x338E, # 单位符号:毫克 = ㎎
0xA394: 0x338F, # 单位符号:千克(公斤) = ㎏
0xA395: 0x33A1, # 单位符号:平方米 = ㎡
0xA396: 0x33D2, # ㏒
0xA397: 0x33D1, # ㏑
0xA398: 0x33C4, # ㏄
0xA399: 0x33D5, # ㏕
0xA39A: 0xAB36, # ꬶ
0xA39B: 0x2113, # ℓ
0xA39C: 0x006D, # m
0xA39D: 0x0078, # x
0xA39E: 0x1EFF, # ỿ
0xA39F: 0x0028, # 左开圆括号 = (
0xA3A0: 0x0029, # 右闭圆括号 = )
})
# Area A4
_update({
0xA440: 0xA440, # BD语言注解:四分空(◯ + ¼)
0xA441: 0xA441, # BD语言注解:二分空(◯ + ½)
0xA442: 0xA442, # BD语言注解:六分空(◯ + ⅙)
0xA443: 0xA443, # BD语言注解:八分空(◯ + ⅙)
0xA444: 0xA444, # (◇ + ◼ + ⬦)
0xA445: 0xA445, # (◇ + ◻)
0xA446: 0xA446, # (☐ + ◆ + ◻)
0xA447: 0xA447, # (⏹ + ⬦)
0xA448: 0x29C8, # ⧈
0xA449: 0x1F79C, # 🞜
0xA44A: 0xA44A, # (◆ + ◻)
0xA44B: 0xA44B, # (◇ + ◼)
0xA44C: 0xA44C, # (☐ + ◆)
0xA44D: 0x26CB, # ⛋
0xA44E: 0x2756, # ❖
0xA44F: 0xA44F, # (negative ❖)
0xA450: 0xA450, # (5-black-square cross, like ⸭)
0xA451: 0xA451, # (5-white-square cross, like ⌘)
0xA452: 0x2795, # ➕
0xA453: 0x271A, # ✚
0xA454: 0x23FA, # ⏺
0xA455: 0x2704, # ✄
0xA456: 0x25C9, # ◉
0xA457: 0x2A00, # ⨀
0xA458: 0x2740, # ❀
0xA459: 0x273F, # ✿
0xA45A: 0x2668, # ♨
0xA45B: 0x2669, # ♩
0xA45C: 0x266A, # ♪
0xA45D: 0x266C, # ♬
0xA45E: 0x2B57, # ⭗
0xA45F: 0x26BE, # ⚾
0xA460: 0x260E, # ☎
0xA461: 0x2025, # ‥
0xA462: 0x261C, # ☜
0xA463: 0x261E, # ☞
0xA464: 0x3021, # 杭州记数标记“一” = 〡
0xA465: 0x3022, # 杭州记数标记“二” = 〢
0xA466: 0x3023, # 杭州记数标记“三” = 〣
0xA467: 0x3024, # 杭州记数标记“四” = 〤
0xA468: 0x3025, # 杭州记数标记“五” = 〥
0xA469: 0x3026, # 杭州记数标记“六” = 〦
0xA46A: 0x3027, # 杭州记数标记“七” = 〧
0xA46B: 0x3028, # 杭州记数标记“八” = 〨
0xA46C: 0x3029, # 杭州记数标记“九” = 〩
0xA46D: 0x3038, # 杭州记数标记“十” = 〸
0xA46E: 0x3039, # 杭州记数标记“廿” = 〹
0xA46F: 0x303A, # 杭州记数标记“卅” = 〺
0xA470: 0x25A2, # ▢
0xA471: 0x00AE, # ®
0xA472: 0x25CF, # ●
0xA473: 0x25CB, # ○
0xA474: 0x25CB, # ♡
0xA475: 0x25CA, # ◊
0xA476: 0xA476, # (▽ + ▿)
0xA477: 0x2236, # ∶
0xA478: 0xA478, # 毫米(m/m)
0xA479: 0xA479, # 厘米(c/m)
0xA47A: 0xA47A, # 分米(d/m)
0xA47B: 0x2105, # ℅
0xA47D: 0xA47D, # (circled ™)
0xA47E: 0x2122, # ™
0xA480: 0xAB65, # ꭥ
0xA481: 0x026E, # ɮ
0xA482: 0x02A7, # ʧ
0xA483: 0x01EB, # ǫ
0xA484: 0x03C5, # υ
0xA485: 0xA7AC, # Ɡ
0xA486: 0x1D93, # ᶓ
0xA487: 0x1D74, # ᵴ
0xA488: 0x1D92, # ᶒ
0xA489: 0x1D95, # ᶕ
0xA48A: 0x02AE, # ʮ
0xA48B: 0x1D8B, # ᶋ
0xA48C: 0x0119, # ę
0xA48D: 0x01BE, # ƾ
0xA48E: 0x1D97, # ᶗ
0xA48F: 0x0293, # ʓ
0xA490: 0xA490, # (hɥ)
0xA491: 0x0253, # ɓ
0xA492: 0x0287, # ʇ
0xA493: 0x01AB, # ƫ
0xA494: 0x028D, # ʍ
0xA495: 0x1D8D, # ᶍ
0xA496: 0x0269, # ɩ
0xA497: 0x025C, # ɜ
0xA498: 0x02A5, # ʥ
0xA499: 0x019E, # ƞ
0xA49A: 0x01AA, # ƪ
0xA49B: 0x0250, # ɐ
0xA49C: 0x0286, # ʆ
0xA49D: 0x01BB, # ƻ
0xA49E: 0x00D8, # Ø
0xA4F4: 0xA4F4, # 三叹号(!!!)
0xA4F5: 0xA4F5, # 斜三叹号(italic !!!)
0xA4F6: 0x32A3, # 带圈汉字:正 = ㊣
0xA4F7: 0x329E, # 带圈汉字:印 = ㊞
0xA4F8: 0x32A4, # 带圈汉字:上 = ㊤
0xA4F9: 0x32A5, # 带圈汉字:中 = ㊥
0xA4FA: 0x32A6, # 带圈汉字:下 = ㊦
0xA4FB: 0x32A7, # 带圈汉字:左 = ㊧
0xA4FC: 0x32A8, # 带圈汉字:右 = ㊨
0xA4FD: 0xA4FD, # 带圈汉字:大(◯ + 大)
0xA4FE: 0xA4FE, # 带圈汉字:小(◯ + 小)
})
# Area A5
_update({
0xA540: 0x0111, # đ
0xA541: 0x1D80, # ᶀ
0xA542: 0x1D81, # ᶁ
0xA543: 0x0252, # ɒ
0xA544: 0xA544, # (ŋ + ʷ)
0xA545: 0x026B, # ɫ
0xA546: 0x1D88, # ᶈ
0xA547: 0x1D82, # ᶂ
0xA548: 0x02A6, # ʦ
0xA549: 0x025F, # ɟ
0xA54A: 0x00FE, # þ
0xA54B: 0x0257, # ɗ
0xA54C: 0xAB67, # ꭧ
0xA54D: 0x0260, # ɠ
0xA54E: 0x0242, # ɂ
0xA54F: 0x02AF, # ʯ
0xA550: 0xA550, # (ʯ)
0xA551: 0x0241, # Ɂ
0xA552: 0x025A, # ɚ
0xA553: 0x1D8A, # ᶊ
0xA554: 0x0296, # ʖ
0xA555: 0x1D8C, # ᶌ
0xA556: 0x1D75, # ᵵ
0xA557: 0x1D6D, # ᵭ
0xA558: 0x027D, # ɽ
0xA559: 0x027A, # ɺ
0xA55A: 0x01BA, # ƺ
0xA55B: 0xA55B, # (turned ɰ)
0xA55C: 0x0273, # ɳ
0xA55D: 0xA795, # ꞕ
0xA55E: 0x01B0, # ư
0xA55F: 0x1D85, # ᶅ
0xA560: 0x0260, # ɠ
0xA561: 0x1D86, # ᶆ
0xA562: 0x0277, # ɷ
0xA563: 0x02A4, # ʤ
0xA564: 0x02A3, # ʣ
0xA565: 0x1D87, # ᶇ
0xA566: 0x1D7C, # ᵼ
0xA567: 0x02A8, # ʨ
0xA568: 0x1D8F, # ᶏ
0xA569: 0x029A, # ʚ
0xA56A: 0x1D9A, # ᶚ
0xA56B: 0xA727, # ꜧ
0xA56C: 0x1D83, # ᶃ
0xA56D: 0xA56D, # (italic ŋ)
0xA56E: 0x029E, # ʞ
0xA56F: 0x0195, # ƕ
0xA570: 0x1D76, # ᵶ
0xA571: 0x027E, # ɾ
0xA572: 0x1D8E, # ᶎ
0xA573: 0x1D89, # ᶉ
0xA574: 0x027C, # ɼ
0xA575: 0x0279, # ɹ
0xA576: 0x018D, # ƍ
0xA577: 0x03C9, # ω
0xA578: 0x025D, # ɝ
0xA579: 0x03C3, # σ
0xA57A: 0x027B, # ɻ
0xA57B: 0x026D, # ɭ
0xA57C: 0x0267, # ɧ
0xA57D: 0x025A, # ɚ
0xA57E: 0xAB66, # ꭦ
0xA580: 0x5F02, # 异
0xA581: 0x28473, # 𨑳
0xA582: 0x5194, # 冔
0xA583: 0x247A3, # 𤞣
0xA584: 0x2896D, # 𨥭
0xA585: 0x5642, # 噂
0xA586: 0x7479, # 瑹
0xA587: 0x243B9, # 𤎹
0xA588: 0x723F, # 爿
0xA589: 0x9D56, # 鵖
0xA58A: 0x4D29, # 䴩
0xA58B: 0x20779, # 𠝹
0xA58C: 0x210F1, # 𡃱
0xA58D: 0x2504C, # 𥁌
0xA58E: 0x233CC, # 𣏌
0xA58F: 0x032F, # 下加符 = ̯
0xA590: 0x0312, # 下加符 = ̒
0xA591: 0x030D, # 下加符 = ̍
0xA592: 0x0314, # 下加符 = ̔
0xA593: 0x0313, # 下加符 = ̓
0xA594: 0x2F83B, # 吆
0xA595: 0x25EC0, # 𥻀
0xA596: 0x445B, # 䑛
0xA597: 0x21D3E, # 𡴾
0xA598: 0x0323, # 下加符 = ̣
0xA599: 0x0325, # 下加符 = ̥
0xA59A: 0x0331, # 下加符 = ̱
0xA59B: 0x032A, # 下加符 = ̪
0xA59C: 0x032C, # 下加符 = ̬
0xA59D: 0x032B, # 下加符 = ̫
0xA59E: 0x0329, # 下加符 = ̩
0xA59F: 0xFF5B, # 左开花括号 = {
0xA5A0: 0xFF5D, # 右闭花括号 = }
0xA5F7: 0x3016, # 左空方圆括号 = 〖
0xA5F8: 0x3017, # 右空方圆括号 = 〗
0xA5F9: 0x29DB, # ⧛
0xA5FA: 0xA5FA, # (vertical ⧛)
0xA5FB: 0x534D, # 卍
0xA5FC: 0xFE47, # 竖排上方括号 = ﹇
0xA5FD: 0xFE48, # 竖排下方括号 = ﹈
0xA5FE: 0x2571, # 斜线 = ╱
})
# Area A6
_update({
0xA640: 0x00C5, # 多国外文 = Å
0xA641: 0x0100, # 多国外文 = Ā
0xA642: 0x00C1, # 多国外文 = Á
0xA643: 0x01CD, # 多国外文 = Ǎ
0xA644: 0x00C0, # 多国外文 = À
0xA645: 0x00C2, # 多国外文 = Â
0xA646: 0x00C4, # 多国外文 = Ä
0xA647: 0x00C3, # 多国外文 = Ã
0xA648: 0x0112, # 多国外文 = Ē
0xA649: 0x00C9, # 多国外文 = É
0xA64A: 0x011A, # 多国外文 = Ě
0xA64B: 0x00C8, # 多国外文 = È
0xA64C: 0x00CA, # 多国外文 = Ê
0xA64D: 0x00CB, # 多国外文 = Ë
0xA64E: 0x1EBC, # 多国外文 = Ẽ
0xA64F: 0x012A, # 多国外文 = Ī
0xA650: 0x00CD, # 多国外文 = Í
0xA651: 0x01CF, # 多国外文 = Ǐ
0xA652: 0x00CC, # 多国外文 = Ì
0xA653: 0x00CE, # 多国外文 = Î
0xA654: 0x00CF, # 多国外文 = Ï
0xA655: 0x014C, # 多国外文 = Ō
0xA656: 0x00D3, # 多国外文 = Ó
0xA657: 0x01D1, # 多国外文 = Ǒ
0xA658: 0x00D2, # 多国外文 = Ò
0xA659: 0x00D4, # 多国外文 = Ô
0xA65A: 0x00D6, # 多国外文 = Ö
0xA65B: 0x00D5, # 多国外文 = Õ
0xA65C: 0x016A, # 多国外文 = Ū
0xA65D: 0x00DA, # 多国外文 = Ú
0xA65E: 0x01D3, # 多国外文 = Ǔ
0xA65F: 0x00D9, # 多国外文 = Ù
0xA660: 0x00DB, # 多国外文 = Û
0xA661: 0x00DC, # 多国外文 = Ü
0xA662: 0x01D5, # 多国外文 = Ǖ
0xA663: 0x01D7, # 多国外文 = Ǘ
0xA664: 0x01D9, # 多国外文 = Ǚ
0xA665: 0x01DB, # 多国外文 = Ǜ
0xA666: 0xA666, # 多国外文(Ü̂)
0xA667: 0x0108, # 多国外文 = Ĉ
0xA668: 0x011C, # 多国外文 = Ĝ
0xA669: 0x0124, # 多国外文 = Ĥ
0xA66A: 0x0134, # 多国外文 = Ĵ
0xA66B: 0x0160, # 多国外文 = Š
0xA66C: 0x015C, # 多国外文 = Ŝ
0xA66D: 0x0178, # 多国外文 = Ÿ
0xA66E: 0x017D, # 多国外文 = Ž
0xA66F: 0x1E90, # 多国外文 = Ẑ
0xA670: 0x0125, # 多国外文 = ĥ
0xA671: 0x0135, # 多国外文 = ĵ
0xA672: 0x00D1, # 多国外文 = Ñ
0xA673: 0x00E1, # á
0xA674: 0x00E9, # é
0xA675: 0x00ED, # í
0xA676: 0x00F3, # ó
0xA677: 0x00FA, # ú
0xA678: 0x2339D, # 𣎝
0xA679: 0x29F15, # 𩼕
0xA67A: 0x23293, # 𣊓
0xA67B: 0x3CA0, # 㲠
0xA67C: 0x2F922, # 牐
0xA67D: 0x24271, # 𤉱
0xA67E: 0x2720F, # 𧈏
0xA680: 0x00C1, # Á
0xA681: 0x0403, # Ѓ
0xA682: 0x00C9, # É
0xA683: 0x040C, # Ќ
0xA684: 0x00D3, # Ó
0xA685: 0x00FD, # ý
0xA686: 0xA686, # (Ы́)
0xA687: 0xA687, # (Э́)
0xA688: 0x04EC, # Ӭ
0xA689: 0xA689, # (Ю́)
0xA68A: 0xA68A, # (Я́)
0xA68B: 0xA68B, # (ѣ́)
0xA68C: 0xA68C, # (Ѣ́)
0xA68D: 0xA68D, # (И́)
0xA68E: 0x27E1B, # 𧸛
0xA68F: 0x910B, # 鄋
0xA690: 0x29F14, # 𩼔
0xA691: 0x2A0DF, # 𪃟
0xA692: 0x20270, # 𠉰
0xA693: 0x203F1, # 𠏱
0xA694: 0x211AB, # 𡆫
0xA695: 0x211E5, # 𡇥
0xA696: 0x21290, # 𡊐
0xA697: 0x363E, # 㘾
0xA698: 0x212DF, # 𡋟
0xA699: 0x57D7, # 埗
0xA69A: 0x2165F, # 𡙟
0xA69B: 0x248C2, # 𤣂
0xA69C: 0x22288, # 𢊈
0xA69D: 0x23C62, # 𣱢
0xA69E: 0x24276, # 𤉶
0xA69F: 0xFF1A, # 冒号 = :
0xA6A0: 0xFF1B, # 分号 = ;
0xA6B9: 0x2202, # 小写希腊字母 = ∂
0xA6BA: 0x03F5, # 小写希腊字母 = ϵ
0xA6BB: 0x03D1, # 小写希腊字母 = ϑ
0xA6BC: 0x03D5, # 小写希腊字母 = ϕ
0xA6BD: 0x03C6, # 小写希腊字母 = φ
0xA6BE: 0x03F0, # 小写希腊字母 = ϰ
0xA6BF: 0x03F1, # 小写希腊字母 = ϱ
0xA6C0: 0x03C2, # 小写希腊字母 = ς
0xA6D9: 0xFE10, # 竖排逗号 = ︐
0xA6DA: 0xFE12, # 竖排句号 = ︒
0xA6DB: 0xFE11, # 竖排顿号 = ︑
0xA6DC: 0xFE13, # 竖排冒号 = ︓
0xA6DD: 0xFE14, # 竖排分号 = ︔
0xA6DE: 0xFE15, # 竖排感叹号 = ︕
0xA6DF: 0xFE16, # 竖排问号 = ︖
0xA6EC: 0xFE17, # 竖排上空方圆括号 = ︗
0xA6ED: 0xFE18, # 竖排下空方圆括号 = ︘
0xA6F3: 0xFE19, # 竖排三点省略号 = ︙
0xA6F6: 0x00B7, # 居中间隔点 = ·
0xA6F7: 0xA6F7, # 居中逗号(middle ,)
0xA6F8: 0xA6F8, # 居中句号(middle 。)
0xA6F9: 0xA6F9, # 居中顿号(middle 、)
0xA6FA: 0xA6FA, # 居中冒号(middle :)
0xA6FB: 0xA6FB, # 居中分号(middle ;)
0xA6FC: 0xA6FC, # 居中感叹号(middle !)
0xA6FD: 0xA6FD, # 居中问号(middle ?)
0xA6FE: 0xA6FE # ( ͘)
})
# Area A7
_update({
0xA740: 0x24235, # 𤈵
0xA741: 0x2431A, # 𤌚
0xA742: 0x2489B, # 𤢛
0xA743: 0x4B63, # 䭣
0xA744: 0x25581, # 𥖁
0xA745: 0x25BB0, # 𥮰
0xA746: 0x7C06, # 簆
0xA747: 0x23388, # 𣎈
0xA748: 0x26A40, # 𦩀
0xA749: 0x26F16, # 𦼖
0xA74A: 0x2717F, # 𧅿
0xA74B: 0x22A98, # 𢪘
0xA74C: 0x3005, # 々
0xA74D: 0x22F7E, # 𢽾
0xA74E: 0x27BAA, # 𧮪
0xA74F: 0x20242, # 𠉂
0xA750: 0x23C5D, # 𣱝
0xA751: 0x22650, # 𢙐
0xA752: 0x247EF, # 𤟯
0xA753: 0x26221, # 𦈡
0xA754: 0x29A02, # 𩨂
0xA755: 0x45EA, # 䗪
0xA756: 0x26B4C, # 𦭌
0xA757: 0x26D9F, # 𦶟
0xA758: 0x26ED8, # 𦻘
0xA759: 0x359E, # 㖞
0xA75A: 0x20E01, # 𠸁
0xA75B: 0x20F90, # 𠾐
0xA75C: 0x3A18, # 㨘
0xA75D: 0x241A2, # 𤆢
0xA75E: 0x3B74, # 㭴
0xA75F: 0x43F2, # 䏲
0xA760: 0x40DA, # 䃚
0xA761: 0x3FA6, # 㾦
0xA762: 0x24ECA, # 𤻊
0xA763: 0x28C3E, # 𨰾
0xA764: 0x28C47, # 𨱇
0xA765: 0x28C4D, # 𨱍
0xA766: 0x28C4F, # 𨱏
0xA767: 0x28C4E, # 𨱎
0xA768: 0x28C54, # 𨱔
0xA769: 0x28C53, # 𨱓
0xA76A: 0x25128, # 𥄨
0xA76B: 0x251A7, # 𥆧
0xA76C: 0x45AC, # 䖬
0xA76D: 0x26A2D, # 𦨭
0xA76E: 0x41F2, # 䇲
0xA76F: 0x26393, # 𦎓
0xA770: 0x29F7C, # 𩽼
0xA771: 0x29F7E, # 𩽾
0xA772: 0x29F83, # 𩾃
0xA773: 0x29F87, # 𩾇
0xA774: 0x29F8C, # 𩾌
0xA775: 0x27785, # 𧞅
0xA776: 0x2775E, # 𧝞
0xA777: 0x28EE7, # 𨻧
0xA778: 0x290AF, # 𩂯
0xA779: 0x2070E, # 𠜎
0xA77A: 0x22AC1, # 𢫁
0xA77B: 0x20CED, # 𠳭
0xA77C: 0x3598, # 㖘
0xA77D: 0x220C7, # 𢃇
0xA77E: 0x22B43, # 𢭃
0xA780: 0x4367, # 䍧
0xA781: 0x20CD3, # 𠳓
0xA782: 0x20CAC, # 𠲬
0xA783: 0x36E2, # 㛢
0xA784: 0x35CE, # 㗎
0xA785: 0x3B39, # 㬹
0xA786: 0x44EA, # 䓪
0xA787: 0x20E96, # 𠺖
0xA788: 0x20E4C, # 𠹌
0xA789: 0x35ED, # 㗭
0xA78A: 0x20EF9, # 𠻹
0xA78B: 0x24319, # 𤌙
0xA78C: 0x267CC, # 𦟌
0xA78D: 0x28056, # 𨁖
0xA78E: 0x28840, # 𨡀
0xA78F: 0x20F90, # 𠾐
0xA790: 0x21014, # 𡀔
0xA791: 0x236DC, # 𣛜
0xA792: 0x28A17, # 𨨗
0xA793: 0x28879, # 𨡹
0xA794: 0x4C9E, # 䲞
0xA795: 0x20410, # 𠐐
0xA796: 0x40DF, # 䃟
0xA797: 0x210BF, # 𡂿
0xA798: 0x22E0B, # 𢸋
0xA799: 0x4312, # 䌒
0xA79A: 0x233AB, # 𣎫
0xA79B: 0x2812E, # 𨄮
0xA79C: 0x4A31, # 䨱
0xA79D: 0x27B48, # 𧭈
0xA79E: 0x29EAC, # 𩺬
0xA79F: 0x23822, # 𣠢
0xA7A0: 0x244CB, # 𤓋
0xA7C2: 0x0409, # 大写俄文字母LJE = Љ
0xA7C3: 0x040A, # 大写俄文字母NJE = Њ
0xA7C4: 0x040F, # 大写俄文字母DZHE = Џ
0xA7C5: 0x04AE, # 大写俄文字母 = Ү
0xA7C6: 0x0402, # 俄文字母 = Ђ
0xA7C7: 0x040B, # 俄文字母 = Ћ
0xA7C8: 0x0474, # 俄文字母 = Ѵ
0xA7C9: 0x0462, # 俄文字母 = Ѣ
0xA7CA: 0x0463, # 俄文字母 = ѣ
0xA7CB: 0x04E8, # 俄文字母 = Ө
0xA7CC: 0x0459, # 俄文字母 = љ
0xA7CD: 0x045A, # 俄文字母 = њ
0xA7CE: 0x045F, # 俄文字母 = џ
0xA7CF: 0x04AF, # 俄文字母 = ү
0xA7F2: 0x00E1, # 俄文字母 = á
0xA7F3: 0x00E9, # 俄文字母 = é
0xA7F4: 0xA7F4, # 俄文字母(и́)
0xA7F5: 0x00F3, # 俄文字母 = ó
0xA7F6: 0x00FD, # 俄文字母 = ý
0xA7F7: 0xA7F7, # 俄文字母(ы́)
0xA7F8: 0xA7F8, # 俄文字母(э́)
0xA7F9: 0xA7F9, # 俄文字母(ю́)
0xA7FA: 0xA7FA, # 俄文字母(я́)
0xA7FB: 0x0452, # 俄文字母 = ђ
0xA7FC: 0x045B, # 俄文字母 = ћ
0xA7FD: 0x0475, # 俄文字母 = ѵ
0xA7FE: 0x04E9 # 俄文字母 = ө
})
# Area A8
_update({
0xA8BC: 0x1E3F, # 汉语拼音(ḿ) = ḿ
0xA8C1: 0xA8C1, # 中文阴圈码十(⏺ + 十)
0xA8C2: 0xA8C2, # 中文阴圈码廿(⏺ + 廿)
0xA8C3: 0xA8C3, # 中文阴圈码卅(⏺ + 卅)
0xA8C4: 0x4E00, # 注音符号— = 一
0xA8EA: 0xA8EA, # 中文阴框码一(⏹ + 一)
0xA8EB: 0xA8EB, # 中文阴框码二(⏹ + 二)
0xA8EC: 0xA8EC, # 中文阴框码三(⏹ + 三)
0xA8ED: 0xA8ED, # 中文阴框码四(⏹ + 四)
0xA8EE: 0xA8EE, # 中文阴框码五(⏹ + 五)
0xA8EF: 0xA8EF, # 中文阴框码六(⏹ + 六)
0xA8F0: 0xA8F0, # 中文阴框码七(⏹ + 七)
0xA8F1: 0xA8F1, # 中文阴框码八(⏹ + 八)
0xA8F2: 0xA8F2, # 中文阴框码九(⏹ + 九)
0xA8F3: 0xA8F3, # 中文阴框码十(⏹ + 十)
0xA8F4: 0xA8F4, # 中文阴框码廿(⏹ + 廿)
0xA8F5: 0xA8F5, # 中文阴框码卅(⏹ + 卅)
0xA8F6: 0xA8F6, # 中文阴圈码一(⏺ + 一)
0xA8F7: 0xA8F7, # 中文阴圈码二(⏺ + 二)
0xA8F8: 0xA8F8, # 中文阴圈码三(⏺ + 三)
0xA8F9: 0xA8F9, # 中文阴圈码四(⏺ + 四)
0xA8FA: 0xA8FA, # 中文阴圈码五(⏺ + 五)
0xA8FB: 0xA8FB, # 中文阴圈码六(⏺ + 六)
0xA8FC: 0xA8FC, # 中文阴圈码七(⏺ + 七)
0xA8FD: 0xA8FD, # 中文阴圈码八(⏺ + 八)
0xA8FE: 0xA8FE # 中文阴圈码九(⏺ + 九)
})
# Area A9
_update({
0xA9A1: 0xA9A1, # (╪)
0xA9A2: 0xA9A2, # (╡)
0xA9F0: 0x21E8, # 空心向右箭头 = ⇨
0xA9F1: 0x21E6, # 空心向左箭头 = ⇦
0xA9F2: 0x2B06, # 实心向上箭头 = ⬆
0xA9F3: 0x2B07, # 实心向下箭头 = ⬇
0xA9F4: 0x27A1, # 实心向右箭头 = ➡
0xA9F5: 0x2B05, # 实心向左箭头 = ⬅
0xA9F6: 0x2B62, # 箭头-无翅向右 = ⭢
0xA9F7: 0x2B60, # 箭头-无翅向左 = ⭠
0xA9F8: 0x2B61, # 箭头-无翅向左 = ⭡
0xA9F9: 0x2B63, # 箭头-无翅向左 = ⭣
0xA9FA: 0x21C1, # 箭头-下单翅向右 = ⇁
0xA9FB: 0x21BD, # 箭头-下单翅向左 = ↽
0xA9FC: 0xA9FC, # 箭头-双向向内(ꜜ͎)
0xA9FD: 0x2195, # 箭头-双向向外 = ↕
0xA9FE: 0x2B65, # 箭头-无翅双向向外 = ⭥
})
# Area AA
_update({
0xAAA1: 0xAAA1, # BD语言注解:盘外符开弧(⸨)
0xAAA2: 0xAAA2, # BD语言注解:盘外符标记()→)
0xAAA3: 0xAAA3, # BD语言注解:盘外符闭弧(⸩)
0xAAA4: 0xAAA4, # BD语言注解:换行符(⇙)
0xAAA5: 0xAAA5, # BD语言注解:换段符(↙)
0xAAA6: 0xAAA6, # BD语言注解:小样文件结束(Ω)
0xAAA7: 0xAAA7, # BD语言注解:数学态标记(◯ + ﹩)
0xAAA8: 0xAAA8, # BD语言注解:自定义参数(◯ + ﹠)
0xAAA9: 0xAAA9, # BD语言注解:盒子开弧(⦃)
0xAAAA: 0xAAAA, # BD语言注解:盒子闭弧(⦄)
0xAAAB: 0xAAAB, # BD语言注解:转字体标记(ⓩ)
0xAAAC: 0xAAAC, # BD语言注解:上标(⤊)
0xAAAD: 0xAAAD, # BD语言注解:下标(⤋)
0xAAB0: 0x002C, # 千分撇 = ,
0xAAB1: 0x002E, # 小数点 = .
0xAAB2: 0x2010, # 半字线 = ‒
0xAAB3: 0x002A, # 六角星号、呼应号 = *
0xAAB4: 0x0021, # 阶乘 = !
0xAAB5: 0x2202, # 偏导数 = ∂
0xAAB6: 0x2211, # 和 = ∑
0xAAB7: 0x220F, # 积 = ∏
0xAAB8: 0x2AEE, # 非因子号 = ⫮
0xAAB9: 0x2031, # 万分号 = ‱
0xAABA: 0x227B, # 前继 = ≻
0xAABB: 0x227A, # 后继 = ≺
0xAABC: 0x2282, # 包含于 = ⊂
0xAABD: 0x2283, # 包含 = ⊃
0xAABE: 0x225C, # Delta等于 = ≜
0xAABF: 0x00AC, # 否定 = ¬
0xAAC0: 0x22CD, # ⋍
0xAAC1: 0x2286, # 包含于 = ⊆
0xAAC2: 0x2287, # 包含 = ⊇
0xAAC3: 0x225C, # ≜
0xAAC4: 0x2243, # 近似符号 = ⋍
0xAAC5: 0x2265, # 大于等于 = ≥
0xAAC6: 0x2264, # 小于等于 = ≤
0xAAC7: 0x2214, # 穆勒连分符号、集合合 = ∔
0xAAC8: 0x2238, # 算术差 = ∸
0xAAC9: 0x2A30, # 直积号 = ⨰
0xAACA: 0x2271, # 不大于等于 = ≱
0xAACB: 0x2270, # 不小于等于 = ≰
0xAACC: 0x2AB0, # ⪰
0xAACD: 0x2AAF, # ⪯
0xAACE: 0x5350, # 卐
0xAACF: 0x212A, # 绝对温度单位 = K
0xAAD0: 0x2200, # 全称量词 = ∀
0xAAD1: 0x21D1, # ⇑
0xAAD2: 0x21E7, # ⇧
0xAAD3: 0x21BE, # ↾
0xAAD4: 0x21D3, # ⇓
0xAAD5: 0x21E9, # ⇩
0xAAD6: 0x21C3, # ⇃
0xAAD7: 0x2935, # ⤵
0xAAD8: 0x21E5, # ⇥
0xAAD9: 0x22F0, # 对角三连点 = ⋰
0xAADA: 0x21D4, # 等价 = ⇔
0xAADB: 0x21C6, # ⇆
0xAADC: 0x2194, # ↔
0xAADD: 0x21D2, # 推断 = ⇒
0xAADE: 0x21E8, # ⇨
0xAADF: 0x21C0, # ⇀
0xAAE0: 0x27F6, # ⟶
0xAAE1: 0x21D0, # ⇐
0xAAE2: 0x21E6, # ⇦
0xAAE3: 0x21BC, # ↼
0xAAE4: 0x27F5, # ⟵
0xAAE5: 0x2196, # ↖️
0xAAE6: 0x2199, # ↙️
0xAAE7: 0x2198, # ↘️
0xAAE8: 0x2197, # ↗️
0xAAE9: 0x22D5, # 平行等于 = ⋕
0xAAEA: 0x2AC5, # 包含于 = ⫅
0xAAEB: 0x2AC6, # 包含 = ⫆
0xAAEC: 0x29CB, # 相当于 = ⧋
0xAAED: 0x226B, # 远大于 = ≫
0xAAEE: 0x226A, # 远小于 = ≪
0xAAEF: 0x2A72, # 加或等于 = ⩲
0xAAF0: 0x22BB, # ⊻
0xAAF1: 0x2AE8, # 垂直等于 = ⫨
0xAAF2: 0x2277, # 大于或小于 = ≷
0xAAF3: 0x227D, # ≽
0xAAF4: 0x227C, # ≼
0xAAF5: 0x2109, # 华氏度 = ℉
0xAAF6: 0x2203, # 存在量词 = ∃
0xAAF7: 0x22F1, # 对角三连点 = ⋱
0xAAF9: 0x2241, # ≁
0xAAFA: 0x2244, # ≄
0xAAFB: 0x2276, # ≶
0xAAFC: 0x2209, # 不属于 = ∉
0xAAFD: 0x2267, # ≧
0xAAFE: 0x2266 # ≦
})
# Area AB
_update({
0xABA1: 0x224B, # ≋
0xABA2: 0x2262, # 不恒等于 = ≢
0xABA3: 0x2251, # 近似值号 = ≑
0xABA4: 0x2284, # 不包含于 = ⊄
0xABA5: 0x2285, # 不包含 = ⊅
0xABA6: 0x2259, # 相当于、等角的、估算 = ≙
0xABA7: 0x2205, # 空集 = ∅
0xABA8: 0x2207, # 微分算符 = ∇
0xABA9: 0x2A01, # 直和 = ⨁
0xABAA: 0x2A02, # 重积 = ⨂
0xABAB: 0x03F9, # 组合 = Ϲ
0xABAC: 0xABAC, # 对角六连点(⋰ + ⋰)
0xABAD: 0x263C, # ☼
0xABAE: 0xABAE, # (⚬ + ↑)
0xABAF: 0x2247, # 不近似等于 = ≇
0xABB0: 0x2249, # 不近似等于 = ≉
0xABB1: 0x2278, # 不小于大于 = ≸
0xABB2: 0x22F6, # 不属于 = ⋶
0xABB3: 0x2AFA, # 大于等于 = ⫺
0xABB4: 0x2AF9, # 小于等于 = ⫹
0xABB5: 0x2245, # 近似等于、接近 = ≅
0xABB6: 0x2267, # 大于等于 = ≧
0xABB7: 0x2250, # 近似等于 = ≐
0xABB8: 0x2266, # 小于等于 = ≦
0xABB9: 0x2A26, # 加或差 = ⨦
0xABBA: 0x2213, # 负或正、减或加 = ∓
0xABBB: 0x233F, # ⌿
0xABBC: 0x30FC, # 日文符号 = ー
0xABBD: 0xABBD, # 近似值号(· + ≈)
0xABBE: 0x2288, # 不包含于 = ⊈
0xABBF: 0x2289, # 不包含 = ⊉
0xABC0: 0x225A, # 角相等 = ≚
0xABC1: 0x2205, # 空集 = ∅
0xABC2: 0x2205, # (diagonal 卐)
0xABC3: 0x0024, # $
0xABC4: 0x2709, # ✉
0xABC5: 0x272E, # ✮
0xABC6: 0x272F, # ✯
0xABC7: 0x2744, # ❄
0xABC8: 0x211E, # 处方符号 = ℞
0xABC9: 0x1D110, # 𝄐
0xABCA: 0x2034, # 三次微分 = ‴
0xABCB: 0xABCB, # 对角六连点(⋱ + ⋱)
0xABCC: 0x2ACB, # 真包含于 = ⫋
0xABCD: 0x2ACC, # 真包含 = ⫌
0xABCE: 0x2A63, # ⩣
0xABCF: 0xABCF, # 约数0(0 + \)
0xABD0: 0xABD0, # 约数1(1 + \)
0xABD1: 0xABD1, # 约数2(2 + \)
0xABD2: 0xABD2, # 约数3(3 + \)
0xABD3: 0xABD3, # 约数4(4 + \)
0xABD4: 0xABD4, # 约数5(5 + \)
0xABD5: 0xABD5, # 约数6(6 + \)
0xABD6: 0xABD6, # 约数7(7 + \)
0xABD7: 0xABD7, # 约数8(8 + \)
0xABD8: 0xABD8, # 约数9(9 + \)
0xABD9: 0x216C, # 罗马数字50 = Ⅼ
0xABDA: 0x216D, # 罗马数字100 = Ⅽ
0xABDB: 0x216E, # 罗马数字500 = Ⅾ
0xABDC: 0x216F, # 罗马数字1000 = Ⅿ
0xABDD: 0x2295, # 圈加 = ⊕
0xABDE: 0xABDE, # 圈加减(◯ + ±)
0xABDF: 0x2296, # 圈减 = ⊖
0xABE0: 0xABE0, # 圈点减(◯ + ∸)
0xABE1: 0x2297, # 圈乘 = ⊗
0xABE2: 0x2A38, # 圈除 = ⨸
0xABE3: 0x229C, # 圈等于 = ⊜
0xABE4: 0xABE4, # 交流电机(◯ + ∼)
0xABE5: 0xABE5, # 圈大于等于(◯ + ≥)
0xABE6: 0xABE6, # 圈小于等于(◯ + ≤)
0xABE7: 0x224A, # 近似等于 = ≊
0xABE8: 0xABE8, # (> + >)
0xABE9: 0xABE9, # (< + <)
0xABEA: 0x22DB, # 大于等于小于 = ⋛
0xABEB: 0x22DA, # 小于等于大于 = ⋚
0xABEC: 0x2A8C, # 大于等于小于 = ⪌
0xABED: 0x2A8B, # 小于等于大于 = ⪋
0xABEE: 0x2273, # ≳
0xABEF: 0x2272, # ≲
0xABF0: 0x29A5, # ⦥
0xABF1: 0x29A4, # ⦤
0xABF2: 0x2660, # 黑桃 = ♠
0xABF3: 0x2394, # 正六边形 = ⎔
0xABF4: 0x2B20, # 正五边形 = ⬠
0xABF5: 0x23E2, # 梯形 = ⏢
0xABF6: 0x2663, # 梅花 = ♣
0xABF7: 0x25B1, # 平行四边形 = ▱
0xABF8: 0x25AD, # 矩形 = ▭
0xABF9: 0x25AF, # 矩形 = ▯
0xABFA: 0x2665, # 红桃 = ♥
0xABFB: 0x2666, # 方块 = ♦
0xABFC: 0x25C1, # 三角形(向左) = ◁
0xABFD: 0x25BD, # 三角形(向下) = ▽
0xABFE: 0x25BD # 三角形(向右) = ▷
})
# Area AC
_update({
0xACA1: 0x25C0, # 实三角形(向左) = ◀
0xACA2: 0x25BC, # 实三角形(向下) = ▼
0xACA3: 0x25B6, # 实三角形(向右) = ▶
0xACA4: 0x25FA, # 直角三角形 = ◺
0xACA5: 0x22BF, # 直角三角形 = ⊿
0xACA6: 0x25B3, # △
0xACA7: 0x27C1, # ⟁
0xACA8: 0x2BCE, # ⯎
0xACA9: 0x2B2F, # ⬯
0xACAA: 0xACAA, # (⬯ + ∥)
0xACAB: 0x2B2E, # ⬮
0xACAC: 0x2279, # 不大于小于 = ≹
0xACAD: 0x1D10B, # 𝄋
0xACAE: 0x2218, # 圈乘 = ∘
0xACAF: 0xACAF, # (vertical ≈)
0xACB2: 0xACB2, # (F-like symbol)
0xACB3: 0x22A6, # ⊦
0xACB4: 0x22A7, # ⊧
0xACB5: 0x22A8, # ⊨
0xACB6: 0x29FA, # 强阳二值 = ⧺
0xACB7: 0x29FB, # 强阳三值 = ⧻
0xACB8: 0xACB8, # 强阳四值(++++)
0xACB9: 0x291A, # ⤚
0xACBA: 0xACBA, # (⤙ + _)
0xACBB: 0xACBB, # (⤚ + _)
0xACBC: 0x2713, # 勾 = ✓
0xACBD: 0x22CE, # ⋎
0xACBE: 0xACBE, # (V + \)
0xACBF: 0xACBF, # (ˇ + | + ꞈ)
0xACC0: 0x224E, # 相当于、等值于 = ≎
0xACC1: 0x224F, # 间差 = ≏
0xACC2: 0x23D3, # ⏓
0xACC3: 0xACC3, # (◡ + _)
0xACC4: 0xACC4, # (◡ + _ + /)
0xACC5: 0x2715, # ✕
0xACC6: 0xACC6, # (✕ + •)
0xACC8: 0xACC8, # (∩ + ˜)
0xACC9: 0xACC9, # (∪ + ˜)
0xACCA: 0xACCA, # (V̰)
0xACCB: 0xACCB, # (V̱)
0xACCC: 0xACCC, # (V̱̰)
0xACCD: 0x2126, # Ω
0xACCE: 0x221D, # 成正比 = ∝
0xACCF: 0x29A0, # 角 = ⦠
0xACD0: 0x2222, # 角 = ∢
0xACD1: 0x2AAC, # 小于等于 = ⪬
0xACD2: 0x2239, # 差 = ∹
0xACD3: 0x223A, # ∺
0xACD4: 0x2135, # ℵ
0xACD5: 0xACD5, # (⊃ + ᐣ)
0xACD6: 0xACD6, # (⊃ + ᐣ + /)
0xACD7: 0x21CC, # ⇌
0xACD8: 0x274B, # ❋
0xACD9: 0x2B01, # ⬁
0xACDA: 0x2B03, # ⬃
0xACDB: 0x2B02, # ⬂
0xACDC: 0x2B00, # ⬀
0xACDD: 0xACDD, # (△ + ▾)
0xACDE: 0xACDE, # (▲ + ▿)
0xACDF: 0xACDE, # (( + —)
0xACE0: 0xACE0, # ([ + —)
0xACE1: 0xACE1, # ([ + —)
0xACE2: 0xACE2, # () + —)
0xACE3: 0xACE3, # (] + —)
0xACE4: 0xACE4, # (] + —)
0xACE5: 0xACE5, # (] + — + ₙ)
0xACE6: 0xACE6, # (] + — + ₘ)
0xACE7: 0xACE7, # (] + — + ₓ)
0xACE8: 0xACE8, # () + — + ₙ)
0xACE9: 0x2233, # 逆时针环积分 = ∳
0xACEA: 0x2232, # 顺时针环积分 = ∲
0xACEB: 0x222C, # 二重积分 = ∬
0xACEC: 0x222F, # 二重环积分 = ∯
0xACED: 0x222D, # 三重积分 = ∭
0xACEE: 0x2230, # 三重环积分 = ∰
0xACEF: 0x0421, # 组合符号 = С
0xACF0: 0x2019, # 所有格符 = ’
0xACF1: 0x0027, # 重音节符号 = '
0xACF2: 0x03A3, # 和(正文态) = Σ
0xACF3: 0x03A0, # 积(正文态) = Π
0xACF4: 0x02C7, # 注音符号 = ˇ
0xACF5: 0x02CB, # 注音符号 = ˋ
0xACF6: 0x02CA, # 注音符号 = ˊ
0xACF7: 0x02D9, # 注音符号 = ˙
0xACF8: 0x29F72, # 𩽲
0xACF9: 0x362D, # 㘭
0xACFA: 0x3A52, # 㩒
0xACFB: 0x3E74, # 㹴
0xACFC: 0x27741, # 𧝁
0xACFD: 0x30FC, # 日文长音记号 = ー
0xACFE: 0x2022 # 注音符号 = •
})
# Area AD
_update({
0xADA1: 0x3280, # 中文阳圈码一 = ㊀
0xADA2: 0x3281, # 中文阳圈码二 = ㊁
0xADA3: 0x3282, # 中文阳圈码三 = ㊂
0xADA4: 0x3283, # 中文阳圈码四 = ㊃
0xADA5: 0x3284, # 中文阳圈码五 = ㊄
0xADA6: 0x3285, # 中文阳圈码六 = ㊅
0xADA7: 0x3286, # 中文阳圈码七 = ㊆
0xADA8: 0x3287, # 中文阳圈码八 = ㊇
0xADA9: 0x3288, # 中文阳圈码九 = ㊈
0xADAA: 0xADAA, # 中文阳圈码一零(◯ + 一〇)
0xADAB: 0xADAB, # 中文阳圈码一一(◯ + 一一)
0xADAC: 0xADAC, # 中文阳圈码一二(◯ + 一二)
0xADAD: 0xADAD, # 中文阳圈码一三(◯ + 一三)
0xADAE: 0xADAE, # 中文阳圈码一四(◯ + 一四)
0xADAF: 0xADAF, # 中文阳圈码一五(◯ + 一五)
0xADB0: 0xADB0, # 中文阳圈码一六(◯ + 一六)
0xADB1: 0xADB1, # 中文阳圈码一七(◯ + 一七)
0xADB2: 0xADB2, # 中文阳圈码一八(◯ + 一八)
0xADB3: 0xADB3, # 中文阳圈码一九(◯ + 一九)
0xADB4: 0xADB4, # 中文阳圈码二零(◯ + 二〇)
0xADB5: 0x24EA, # 数字阳圈码0 = ⓪
0xADB6: 0x2018, # 外文左单引号 = ‘
0xADB7: 0x201C, # 外文左双引号 = “
0xADB8: 0x2019, # 外文右单引号 = ’
0xADB9: 0x201D, # 外文右双引号 = ”
0xADBA: 0x025B, # 国际音标 = ɛ
0xADBB: 0x0251, # 国际音标 = ɑ
0xADBC: 0x0259, # 国际音标 = ə
0xADBD: 0x025A, # 国际音标 = ɚ
0xADBE: 0x028C, # 国际音标 = ʌ
0xADBF: 0x0254, # 国际音标 = ɔ
0xADC0: 0x0283, # 国际音标 = ʃ
0xADC1: 0x02D1, # 国际音标 = ˑ
0xADC2: 0x02D0, # 国际音标 = ː
0xADC3: 0x0292, # 国际音标 = ʒ
0xADC4: 0x0261, # 国际音标 = ɡ
0xADC5: 0x03B8, # 国际音标 = θ
0xADC6: 0x00F0, # 国际音标 = ð
0xADC7: 0x014B, # 国际音标 = ŋ
0xADC8: 0x0264, # 国际音标 = ɤ
0xADC9: 0x0258, # 国际音标 = ɘ
0xADCA: 0x026A, # 国际音标 = ɪ
0xADCB: 0x0268, # 国际音标 = ɨ
0xADCC: 0x027F, # 国际音标 = ɿ
0xADCD: 0x0285, # 国际音标 = ʅ
0xADCE: 0x028A, # 国际音标 = ʊ
0xADCF: 0x00F8, # 国际音标 = ø
0xADD0: 0x0275, # 国际音标 = ɵ
0xADD1: 0x026F, # 国际音标 = ɯ
0xADD2: 0x028F, # 国际音标 = ʏ
0xADD3: 0x0265, # 国际音标 = ɥ
0xADD4: 0x0289, # 国际音标 = ʉ
0xADD5: 0x0278, # 国际音标 = ɸ
0xADD6: 0x0288, # 国际音标 = ʈ
0xADD7: 0x0290, # 国际音标 = ʐ
0xADD8: 0x0256, # 国际音标 = ɖ
0xADD9: 0x0282, # 国际音标 = ʂ
0xADDA: 0x0272, # 国际音标 = ɲ
0xADDB: 0x0271, # 国际音标 = ɱ
0xADDC: 0x03B3, # 国际音标 = γ
0xADDD: 0x0221, # 国际音标 = ȡ
0xADDE: 0x0255, # 国际音标 = ɕ
0xADDF: 0x0235, # 国际音标 = ȵ
0xADE0: 0x0291, # 国际音标 = ʑ
0xADE1: 0x0236, # 国际音标 = ȶ
0xADE2: 0x026C, # 国际音标 = ɬ
0xADE3: 0x028E, # 国际音标 = ʎ
0xADE4: 0x1D84, # 国际音标 = ᶄ
0xADE5: 0xAB53, # 国际音标 = ꭓ
0xADE6: 0x0127, # 国际音标 = ħ
0xADE7: 0x0263, # 国际音标 = ɣ
0xADE8: 0x0281, # 国际音标 = ʁ
0xADE9: 0x0294, # 国际音标 = ʔ
0xADEA: 0x0295, # 国际音标 = ʕ
0xADEB: 0x0262, # 国际音标 = ɢ
0xADEC: 0x0266, # 国际音标 = ɦ
0xADED: 0x4C7D, # 䱽
0xADEE: 0x24B6D, # 𤭭
0xADEF: 0x00B8, # 新蒙文 = ¸
0xADF0: 0x02DB, # 新蒙文 = ˛
0xADF1: 0x04D8, # 新蒙文 = Ә
0xADF2: 0x04BA, # 新蒙文 = Һ
0xADF3: 0x0496, # 新蒙文 = Җ
0xADF4: 0x04A2, # 新蒙文 = Ң
0xADF5: 0x2107B, # 𡁻
0xADF6: 0x2B62C, # 𫘬
0xADF7: 0x04D9, # 新蒙文 = ә
0xADF8: 0x04BB, # 新蒙文 = һ
0xADF9: 0x0497, # 新蒙文 = җ
0xADFA: 0x04A3, # 新蒙文 = ң
0xADFB: 0x40CE, # 䃎
0xADFC: 0x04AF, # 新蒙文 = ү
0xADFD: 0x02CC, # 次重音符号 = ˌ
0xADFE: 0xff40 # 次重音符号 = `
})
# Area F8
_update({
0xF8A1: 0x5C2A, # 尪
0xF8A2: 0x97E8, # 韨
0xF8A3: 0x5F67, # 彧
0xF8A4: 0x672E, # 朮
0xF8A5: 0x4EB6, # 亶
0xF8A6: 0x53C6, # 叆
0xF8A7: 0x53C7, # 叇
0xF8A8: 0x8BBB, # 讻
0xF8A9: 0x27BAA, # 𧮪
0xF8AA: 0x8BEA, # 诪
0xF8AB: 0x8C09, # 谉
0xF8AC: 0x8C1E, # 谞
0xF8AD: 0x5396, # 厖
0xF8AE: 0x9EE1, # 黡
0xF8AF: 0x533D, # 匽
0xF8B0: 0x5232, # 刲
0xF8B1: 0x6706, # 朆
0xF8B2: 0x50F0, # 僰
0xF8B3: 0x4F3B, # 伻
0xF8B4: 0x20242, # 𠉂
0xF8B5: 0x5092, # 傒
0xF8B6: 0x5072, # 偲
0xF8B7: 0x8129, # 脩
0xF8B8: 0x50DC, # 僜
0xF8B9: 0x90A0, # 邠
0xF8BA: 0x9120, # 鄠
0xF8BB: 0x911C, # 鄜
0xF8BC: 0x52BB, # 劻
0xF8BD: 0x52F7, # 勷
0xF8BE: 0x6C67, # 汧
0xF8BF: 0x6C9A, # 沚
0xF8C0: 0x6C6D, # 汭
0xF8C1: 0x6D34, # 洴
0xF8C2: 0x6D50, # 浐
0xF8C3: 0x6D49, # 浉
0xF8C4: 0x6DA2, # 涢
0xF8C5: 0x6D65, # 浥
0xF8C6: 0x6DF4, # 淴
0xF8C7: 0x6EEA, # 滪
0xF8C8: 0x6E87, # 溇
0xF8C9: 0x6EC9, # 滉
0xF8CA: 0x6FBC, # 澼
0xF8CB: 0x6017, # 怗
0xF8CC: 0x22650, # 𢙐
0xF8CD: 0x6097, # 悗
0xF8CE: 0x60B0, # 悰
0xF8CF: 0x60D3, # 惓
0xF8D0: 0x6153, # 慓
0xF8D1: 0x5BAC, # 宬
0xF8D2: 0x5EBC, # 庼
0xF8D3: 0x95EC, # 闬
0xF8D4: 0x95FF, # 闿
0xF8D5: 0x9607, # 阇
0xF8D6: 0x9613, # 阓
0xF8D7: 0x961B, # 阛
0xF8D8: 0x631C, # 挜
0xF8D9: 0x630C, # 挌
0xF8DA: 0x63AF, # 掯
0xF8DB: 0x6412, # 搒
0xF8DC: 0x63F3, # 揳
0xF8DD: 0x6422, # 搢
0xF8DE: 0x5787, # 垇
0xF8DF: 0x57B5, # 垵
0xF8E0: 0x57BD, # 垽
0xF8E1: 0x57FC, # 埼
0xF8E2: 0x56AD, # 嚭
0xF8E3: 0x26B4C, # 𦭌
0xF8E4: 0x8313, # 茓
0xF8E5: 0x8359, # 荙
0xF8E6: 0x82F3, # 苳
0xF8E7: 0x8399, # 莙
0xF8E8: 0x44D6, # 䓖
0xF8E9: 0x841A, # 萚
0xF8EA: 0x83D1, # 菑
0xF8EB: 0x84C2, # 蓂
0xF8EC: 0x8439, # 萹
0xF8ED: 0x844E, # 葎
0xF8EE: 0x8447, # 葇
0xF8EF: 0x84DA, # 蓚
0xF8F0: 0x26D9F, # 𦶟
0xF8F1: 0x849F, # 蒟
0xF8F2: 0x84BB, # 蒻
0xF8F3: 0x850A, # 蔊
0xF8F4: 0x26ED8, # 𦻘
0xF8F5: 0x85A2, # 薢
0xF8F6: 0x85B8, # 薸
0xF8F7: 0x85E8, # 藨
0xF8F8: 0x8618, # 蘘
0xF8F9: 0x596D, # 奭
0xF8FA: 0x546F, # 呯
0xF8FB: 0x54A5, # 咥
0xF8FC: 0x551D, # 唝
0xF8FD: 0x5536, # 唶
0xF8FE: 0x556F # 啯
})
# Area F9
_update({
0xF9A1: 0x5621, # 嘡
0xF9A2: 0x20E01, # 𠸁
0xF9A3: 0x20F90, # 𠾐
0xF9A4: 0x360E, # 㘎
0xF9A5: 0x56F7, # 囷
0xF9A6: 0x5E21, # 帡
0xF9A7: 0x5E28, # 帨
0xF9A8: 0x5CA8, # 岨
0xF9A9: 0x5CE3, # 峣
0xF9AA: 0x5D5A, # 嵚
0xF9AB: 0x5D4E, # 嵎
0xF9AC: 0x5D56, # 嵖
0xF9AD: 0x5DC2, # 巂
0xF9AE: 0x8852, # 衒
0xF9AF: 0x5FAF, # 徯
0xF9B0: 0x5910, # 夐
0xF9B1: 0x7330, # 猰
0xF9B2: 0x247EF, # 𤟯
0xF9B3: 0x734F, # 獏
0xF9B4: 0x9964, # 饤
0xF9B5: 0x9973, # 饳
0xF9B6: 0x997E, # 饾
0xF9B7: 0x9982, # 馂
0xF9B8: 0x9989, # 馉
0xF9B9: 0x5C43, # 屃
0xF9BA: 0x5F36, # 弶
0xF9BB: 0x5B56, # 孖
0xF9BC: 0x59EE, # 姮
0xF9BD: 0x5AEA, # 嫪
0xF9BE: 0x7ED6, # 绖
0xF9BF: 0x7F0A, # 缊
0xF9C0: 0x7E34, # 縴
0xF9C1: 0x7F1E, # 缞
0xF9C2: 0x26221, # 𦈡
0xF9C3: 0x9A8E, # 骎
0xF9C4: 0x29A02, # 𩨂
0xF9C5: 0x9A95, # 骕
0xF9C6: 0x9AA6, # 骦
0xF9C7: 0x659D, # 斝
0xF9C8: 0x241A2, # 𤆢
0xF9C9: 0x712E, # 焮
0xF9CA: 0x7943, # 祃
0xF9CB: 0x794E, # 祎
0xF9CC: 0x7972, # 祲
0xF9CD: 0x7395, # 玕
0xF9CE: 0x73A0, # 玠
0xF9CF: 0x7399, # 玙
0xF9D0: 0x73B1, # 玱
0xF9D1: 0x73F0, # 珰
0xF9D2: 0x740E, # 琎
0xF9D3: 0x742F, # 琯
0xF9D4: 0x7432, # 琲
0xF9D5: 0x67EE, # 柮
0xF9D6: 0x6812, # 栒
0xF9D7: 0x3B74, # 㭴
0xF9D8: 0x6872, # 桲
0xF9D9: 0x68BC, # 梼
0xF9DA: 0x68B9, # 梹
0xF9DB: 0x68C1, # 棁
0xF9DC: 0x696F, # 楯
0xF9DD: 0x69A0, # 榠
0xF9DE: 0x69BE, # 榾
0xF9DF: 0x69E5, # 槥
0xF9E0: 0x6A9E, # 檞
0xF9E1: 0x69DC, # 槜
0xF9E2: 0x6B95, # 殕
0xF9E3: 0x80FE, # 胾
0xF9E4: 0x89F1, # 觱
0xF9E5: 0x74FB, # 瓻
0xF9E6: 0x7503, # 甃
0xF9E7: 0x80D4, # 胔
0xF9E8: 0x22F7E, # 𢽾
0xF9E9: 0x668D, # 暍
0xF9EA: 0x9F12, # 鼒
0xF9EB: 0x6F26, # 漦
0xF9EC: 0x8D51, # 赑
0xF9ED: 0x8D52, # 赒
0xF9EE: 0x8D57, # 赗
0xF9EF: 0x7277, # 牷
0xF9F0: 0x7297, # 犗
0xF9F1: 0x23C5D, # 𣱝
0xF9F2: 0x8090, # 肐
0xF9F3: 0x43F2, # 䏲
0xF9F4: 0x6718, # 朘
0xF9F5: 0x8158, # 腘
0xF9F6: 0x81D1, # 臑
0xF9F7: 0x7241, # 牁
0xF9F8: 0x7242, # 牂
0xF9F9: 0x7A85, # 窅
0xF9FA: 0x7A8E, # 窎
0xF9FB: 0x7ABE, # 窾
0xF9FC: 0x75A2, # 疢
0xF9FD: 0x75AD, # 疭
0xF9FE: 0x75CE # 痎
})
# Area FA
_update({
0xFAA1: 0x3FA6, # 㾦
0xFAA2: 0x7604, # 瘄
0xFAA3: 0x7606, # 瘆
0xFAA4: 0x7608, # 瘈
0xFAA5: 0x24ECA, # 𤻊
0xFAA6: 0x88C8, # 裈
0xFAA7: 0x7806, # 砆
0xFAA8: 0x7822, # 砢
0xFAA9: 0x7841, # 硁
0xFAAA: 0x7859, # 硙
0xFAAB: 0x785A, # 硚
0xFAAC: 0x7875, # 硵
0xFAAD: 0x7894, # 碔
0xFAAE: 0x40DA, # 䃚
0xFAAF: 0x790C, # 礌
0xFAB0: 0x771C, # 眜
0xFAB1: 0x251A7, # 𥆧
0xFAB2: 0x7786, # 瞆
0xFAB3: 0x778B, # 瞋
0xFAB4: 0x7564, # 畤
0xFAB5: 0x756C, # 畬
0xFAB6: 0x756F, # 畯
0xFAB7: 0x76C9, # 盉
0xFAB8: 0x76DD, # 盝
0xFAB9: 0x28C3E, # 𨰾
0xFABA: 0x497A, # 䥺
0xFABB: 0x94D3, # 铓
0xFABC: 0x94E6, # 铦
0xFABD: 0x9575, # 镵
0xFABE: 0x9520, # 锠
0xFABF: 0x9527, # 锧
0xFAC0: 0x28C4F, # 𨱏
0xFAC1: 0x9543, # 镃
0xFAC2: 0x953D, # 锽
0xFAC3: 0x28C4E, # 𨱎
0xFAC4: 0x28C54, # 𨱔
0xFAC5: 0x28C53, # 𨱓
0xFAC6: 0x9574, # 镴
0xFAC7: 0x79FE, # 秾
0xFAC8: 0x7A16, # 稖
0xFAC9: 0x415F, # 䅟
0xFACA: 0x7A5E, # 穞
0xFACB: 0x9E30, # 鸰
0xFACC: 0x9E34, # 鸴
0xFACD: 0x9E27, # 鸧
0xFACE: 0x9E2E, # 鸮
0xFACF: 0x9E52, # 鹒
0xFAD0: 0x9E53, # 鹓
0xFAD1: 0x9E59, # 鹙
0xFAD2: 0x9E56, # 鹖
0xFAD3: 0x9E61, # 鹡
0xFAD4: 0x9E6F, # 鹯
0xFAD5: 0x77DE, # 矞
0xFAD6: 0x76B6, # 皶
0xFAD7: 0x7F91, # 羑
0xFAD8: 0x7F93, # 羓
0xFAD9: 0x26393, # 𦎓
0xFADA: 0x7CA6, # 粦
0xFADB: 0x43AC, # 䎬
0xFADC: 0x8030, # 耰
0xFADD: 0x8064, # 聤
0xFADE: 0x8985, # 覅
0xFADF: 0x9892, # 颒
0xFAE0: 0x98A3, # 颣
0xFAE1: 0x8683, # 蚃
0xFAE2: 0x86B2, # 蚲
0xFAE3: 0x45AC, # 䖬
0xFAE4: 0x8705, # 蜅
0xFAE5: 0x8730, # 蜰
0xFAE6: 0x45EA, # 䗪
0xFAE7: 0x8758, # 蝘
0xFAE8: 0x7F4D, # 罍
0xFAE9: 0x7B4A, # 筊
0xFAEA: 0x41F2, # 䇲
0xFAEB: 0x7BF0, # 篰
0xFAEC: 0x7C09, # 簉
0xFAED: 0x7BEF, # 篯
0xFAEE: 0x7BF2, # 篲
0xFAEF: 0x7C20, # 簠
0xFAF0: 0x26A2D, # 𦨭
0xFAF1: 0x8C68, # 豨
0xFAF2: 0x8C6D, # 豭
0xFAF3: 0x8DF6, # 跶
0xFAF4: 0x8E04, # 踄
0xFAF5: 0x8E26, # 踦
0xFAF6: 0x8E16, # 踖
0xFAF7: 0x8E27, # 踧
0xFAF8: 0x8E53, # 蹓
0xFAF9: 0x8E50, # 蹐
0xFAFA: 0x8C90, # 貐
0xFAFB: 0x9702, # 霂
0xFAFC: 0x9F81, # 龁
0xFAFD: 0x9F82, # 龂
0xFAFE: 0x9C7D # 鱽
})
# Area FB
_update({
0xFBA1: 0x9C8A, # 鲊
0xFBA2: 0x9C80, # 鲀
0xFBA3: 0x9C8F, # 鲏
0xFBA4: 0x4C9F, # 䲟
0xFBA5: 0x9C99, # 鲙
0xFBA6: 0x9C97, # 鲗
0xFBA7: 0x29F7C, # 𩽼
0xFBA8: 0x9C96, # 鲖
0xFBA9: 0x29F7E, # 𩽾
0xFBAA: 0x29F83, # 𩾃
0xFBAB: 0x29F87, # 𩾇
0xFBAC: 0x9CC1, # 鳁
0xFBAD: 0x9CD1, # 鳑
0xFBAE: 0x9CDB, # 鳛
0xFBAF: 0x9CD2, # 鳒
0xFBB0: 0x29F8C, # 𩾌
0xFBB1: 0x9CE3, # 鳣
0xFBB2: 0x977A, # 靺
0xFBB3: 0x97AE, # 鞮
0xFBB4: 0x97A8, # 鞨
0xFBB5: 0x9B4C, # 魌
0xFBB6: 0x9B10, # 鬐
0xFBB7: 0x9B18, # 鬘
0xFBB8: 0x9E80, # 麀
0xFBB9: 0x9E95, # 麕
0xFBBA: 0x9E91, # 麑
})
"B库符号(部分非组合用字符)"
symbolsB = UnicodeMap()
symbolsB.update({
0x8940: 0x1E37, # 国际音标 = ḷ
0x8941: 0x1E43, # 国际音标 = ṃ
0x8942: 0x1E47, # 国际音标 = ṇ
0x8943: 0x015E, # 国际音标 = Ş
0x8944: 0x015F, # 国际音标 = ş
0x8945: 0x0162, # 国际音标 = Ţ
0x8946: 0x0163, # 国际音标 = ţ
0x94C0: 0x2654, # 国际象棋白格白子-王 = ♔
0x94C1: 0x2655, # 国际象棋白格白子-后 = ♕
0x94C2: 0x2656, # 国际象棋白格白子-车 = ♖
0x94C3: 0x2658, # 国际象棋白格白子-马 = ♘
0x94C4: 0x2657, # 国际象棋白格白子-相 = ♗
0x94C5: 0x2659, # 国际象棋白格白子-卒 = ♙
0x94C6: 0x265A, # 国际象棋白格黑子-王 = ♚
0x94C7: 0x265B, # 国际象棋白格黑子-后 = ♛
0x94C8: 0x265C, # 国际象棋白格黑子-车 = ♜
0x94C9: 0x265E, # 国际象棋白格黑子-马 = ♞
0x94CA: 0x265D, # 国际象棋白格黑子-相 = ♝
0x94CB: 0x265F, # 国际象棋白格黑子-卒 = ♟
0x94EC: 0x2660, # 桥牌-黑桃 = ♠
0x94ED: 0x2665, # 桥牌-红桃 = ♥
0x94EE: 0x2666, # 桥牌-方框 = ♦
0x94EF: 0x2663, # 桥牌-梅花 = ♣
0x95F1: 0x1FA67, # 中国象棋黑子-将 = 🩧
0x95F2: 0x1FA64, # 中国象棋红子-车 = 🩤
0x95F3: 0x1FA63, # 中国象棋红子-马 = 🩣
0x95F4: 0x1FA65, # 中国象棋红子-炮 = 🩥
0x95F5: 0x1FA66, # 中国象棋红子-兵 = 🩦
0x95F6: 0x1FA62, # 中国象棋红子-相 = 🩢
0x95F7: 0x1FA61, # 中国象棋红子-士 = 🩡
0x95F8: 0x1FA60, # 中国象棋红子-帅 = 🩠
0x95F9: 0x1FA6B, # 中国象棋黑子-车 = 🩫
0x95FA: 0x1FA6A, # 中国象棋黑子-马 = 🩪
0x95FB: 0x1FA6C, # 中国象棋黑子-炮 = 🩬
0x95FC: 0x1FA6D, # 中国象棋黑子-卒 = 🩭
0x95FD: 0x1FA68, # 中国象棋黑子-士 = 🩨
0x95FE: 0x1FA69, # 中国象棋黑子-象 = 🩩
0x968F: 0x1D11E, # 其他符号 = 𝄞
0x97A0: 0x4DC0, # 八卦符号 = ䷀
0x97A1: 0x4DC1, # 八卦符号 = ䷁
0x97A2: 0x4DC2, # 八卦符号 = ䷂
0x97A3: 0x4DC3, # 八卦符号 = ䷃
0x97A4: 0x4DC4, # 八卦符号 = ䷄
0x97A5: 0x4DC5, # 八卦符号 = ䷅
0x97A6: 0x4DC6, # 八卦符号 = ䷆
0x97A7: 0x4DC7, # 八卦符号 = ䷇
0x97A8: 0x4DC8, # 八卦符号 = ䷈
0x97A9: 0x4DC9, # 八卦符号 = ䷉
0x97AA: 0x4DCA, # 八卦符号 = ䷊
0x97AB: 0x4DCB, # 八卦符号 = ䷋
0x97AC: 0x4DCC, # 八卦符号 = ䷌
0x97AD: 0x4DCD, # 八卦符号 = ䷍
0x97AE: 0x4DCE, # 八卦符号 = ䷎
0x97AF: 0x4DCF, # 八卦符号 = ䷏
0x97B0: 0x4DD0, # 八卦符号 = ䷐
0x97B1: 0x4DD1, # 八卦符号 = ䷑
0x97B2: 0x4DD2, # 八卦符号 = ䷒
0x97B3: 0x4DD3, # 八卦符号 = ䷓
0x97B4: 0x4DD4, # 八卦符号 = ䷔
0x97B5: 0x4DD5, # 八卦符号 = ䷕
0x97B6: 0x4DD6, # 八卦符号 = ䷖
0x97B7: 0x4DD7, # 八卦符号 = ䷗
0x97B8: 0x4DD8, # 八卦符号 = ䷘
0x97B9: 0x4DD9, # 八卦符号 = ䷙
0x97BA: 0x4DDA, # 八卦符号 = ䷚
0x97BB: 0x4DDB, # 八卦符号 = ䷛
0x97BC: 0x4DDC, # 八卦符号 = ䷜
0x97BD: 0x4DDD, # 八卦符号 = ䷝
0x97BE: 0x4DDE, # 八卦符号 = ䷞
0x97BF: 0x4DDF, # 八卦符号 = ䷟
0x97C0: 0x4DE0, # 八卦符号 = ䷠
0x97C1: 0x4DE1, # 八卦符号 = ䷡
0x97C2: 0x4DE2, # 八卦符号 = ䷢
0x97C3: 0x4DE3, # 八卦符号 = ䷣
0x97C4: 0x4DE4, # 八卦符号 = ䷤
0x97C5: 0x4DE5, # 八卦符号 = ䷥
0x97C6: 0x4DE6, # 八卦符号 = ䷦
0x97C7: 0x4DE7, # 八卦符号 = ䷧
0x97C8: 0x4DE8, # 八卦符号 = ䷨
0x97C9: 0x4DE9, # 八卦符号 = ䷩
0x97CA: 0x4DEA, # 八卦符号 = ䷪
0x97CB: 0x4DEB, # 八卦符号 = ䷫
0x97CC: 0x4DEC, # 八卦符号 = ䷬
0x97CD: 0x4DED, # 八卦符号 = ䷭
0x97CE: 0x4DEE, # 八卦符号 = ䷮
0x97CF: 0x4DEF, # 八卦符号 = ䷯
0x97D0: 0x4DF0, # 八卦符号 = ䷰
0x97D1: 0x4DF1, # 八卦符号 = ䷱
0x97D2: 0x4DF2, # 八卦符号 = ䷲
0x97D3: 0x4DF3, # 八卦符号 = ䷳
0x97D4: 0x4DF4, # 八卦符号 = ䷴
0x97D5: 0x4DF5, # 八卦符号 = ䷵
0x97D6: 0x4DF6, # 八卦符号 = ䷶
0x97D7: 0x4DF7, # 八卦符号 = ䷷
0x97D8: 0x4DF8, # 八卦符号 = ䷸
0x97D9: 0x4DF9, # 八卦符号 = ䷹
0x97DA: 0x4DFA, # 八卦符号 = ䷺
0x97DB: 0x4DFB, # 八卦符号 = ䷻
0x97DC: 0x4DFC, # 八卦符号 = ䷼
0x97DD: 0x4DFD, # 八卦符号 = ䷽
0x97DE: 0x4DFE, # 八卦符号 = ䷾
0x97DF: 0x4DFF, # 八卦符号 = ䷿
0x97E0: 0x2630, # 八卦符号 = ☰
0x97E1: 0x2637, # 八卦符号 = ☷
0x97E2: 0x2633, # 八卦符号 = ☳
0x97E3: 0x2634, # 八卦符号 = ☴
0x97E4: 0x2635, # 八卦符号 = ☵
0x97E5: 0x2632, # 八卦符号 = ☲
0x97E6: 0x2636, # 八卦符号 = ☶
0x97E7: 0x2631, # 八卦符号 = ☱
0x97EF: 0x2A0D, # 积分主值 = ⨍
0x97F0: 0x0274, # 国际音标 = ɴ
0x97F1: 0x0280, # 国际音标 = ʀ
0x97F2: 0x97F2, # 国际音标(ɔ̃)
0x97F3: 0x97F3, # 国际音标(ɛ̃)
0xA080: 0x00B7, # 外文间隔点 = ·
0xA08E: 0x2039, # 外文左单书名号 = ‹
0xA08F: 0x203A, # 外文右单书名号 = ›
0xA090: 0x00AB, # 外文左双书名号 = «
0xA091: 0x00BB, # 外文右双书名号 = »
0xBD8A: 0x2201, # 补集 = ∁
0xBD8B: 0x2115, # 集合符号N = ℕ
0xBD8C: 0x2124, # 集合符号Z = ℤ
0xBD8D: 0x211A, # 集合符号Q = ℚ
0xBD8E: 0x211D, # 集合符号R = ℝ
0xBD8F: 0x2102, # 集合符号C = ℂ
0xBD90: 0x00AC, # 否定符号 = ¬
0xBD93: 0xBD93, # 不属于(∈ + \)
0xBD94: 0xBD94, # 不属于(∈ + |)
0xBD95: 0x220B, # 属于 = ∋
0xBD96: 0x220C, # 不属于 = ∌
0xBD97: 0xBD97, # 不属于(∋ + |)
0xBD98: 0xBD98, # 不属于(∌ + \)
0xBD99: 0x22FD, # 不属于 = ⋽
0xBD9A: 0xBD9A, # 不等于(= + \)
0xBD9B: 0x1d463 # 𝑣
})
```
#### File: FBDParser/patterns/_global.py
```python
import re
class _R:
def __init__(self, kwargs):
self.kwargs = kwargs
def __getitem__(self, key):
return re.sub(r'\(\?P\<.*?\>', '(?:', self.kwargs[key])
def _f(pat, **vars):
args = dict(globals(), **vars)
args['_r'] = _R(args)
return pat.format(**args)
"=======================================公用参数========================================="
# 字号
size = r'''(?:
(?P<p1>[0124567]["”]?|3|10["”]?|11|63|72|84|96)| # 常用字号
(?P<p2>\d+\.(?:\d\d?)?)| # 磅字号
(?P<p3>\d{1,4}j) # 级字号
)'''
# 双向字号
bisize = _f(r'''(?:
(?P<v>{_r[size]}) # 纵向字号
(?:,(?P<h>{_r[size]}))? # 横向字号
)''')
# 行距、字距
length = _f(r'''(?:
(?P<p1> # 以字号为单位
(?:(?P<size>{_r[size]})\:)? # 单位字号
(?:\d+(?:\*\d+(?:/\d+)?)?|\*\d+(?:/\d+)?)
)|
(?P<p2>\d+(?:\.\d+)?mm)| # 以毫米为单位
(?P<p3>\d+(?:\.\d+)?p)| # 以磅为单位
(?P<p4>\d+x) # 以线为单位
)''')
# 空行参数
lines = _f(r'''(?:
(?P<p1>\d+)| # 整数行
(?P<p2>\d*\+{_r[length]})| # 附加距离
(?P<p3>\d*\*\d+(?:/\d+)?) # 行占比
)''')
# 起点
anchor = _f(r'''(?:
(?P<qd> # 起点
\((?:-?{_r[lines]})?,-?{_r[length]}\)| # 绝对位置
# 相对当前栏(页)的位置
# K表示要排到下一栏(页)的初始位置
,K?(?:[ZY]|S|[ZY][SX])|,X|,K
)?
(?P<pf>,PZ|,PY|,BP)? # 排法
(?P<dy>,DY)? # 在分栏或对照时,内容可跨栏
)''')
# 字体名
fontname = r'[A-Z][A-Z1-9]*'
# 字体集
fontset = _f(r'''
(?P<zh> # 字号
{_r[bisize]})
(?P<ht>{_r[fontname]}) # 汉字字体
(?:&(?P<wt>{_r[fontname]}))? # 外文字体
(?:&(?P<st>{_r[fontname]}))? # 数字字体
(?:《H(?P<hw>.*?)》)? # 汉字外挂字体名
(?:《W(?P<ww>.*?)》)? # 外文外挂字体名
''')
# 颜色
color = r'''@(?P<ys>%?(?: # CMYK值/百分比
\d{1,3},\d{1,3},\d{1,3},\d{1,3}|
\(\d{1,3},\d{1,3},\d{1,3},\d{1,3}\))
)'''
# 花边线
lace = r'H(?:0\d\d|10\d|11[0-7])'
``` |
{
"source": "jonix6/minepdf",
"score": 2
} |
#### File: minepdf/minepdf/cidsystem.py
```python
import re
from collections import OrderedDict
import struct
import os
import decoder748
REG_EXP = re.compile(r'^\s*<([0-9a-f]+)>\s+<([0-9a-f]+)>\s+(\d+)$', re.M)
class CMap:
MAP_STRING = ''
def __init__(self):
self.codePoints = set()
self.cid2unicode = {}
self._feed()
def _feed(self):
for (s, e, code) in re.findall(REG_EXP, self.MAP_STRING):
s = int(s, 16)
e = int(e, 16)
self.codePoints.add(s)
self.cid2unicode[s] = int(code)
def to_unicode(self, cid):
for point in self.codePoints:
if cid <= point:
break
d = cid - point
code = self.cid2unicode[point]
return chr(code + d)
def to_unicode(klass, cid):
if cid in klass.diff:
return klass.diff[cid]
point = 0
for next_point in sorted(klass.cid2unicode.keys()):
if cid < next_point:
break
point = next_point
e = cid - point
code = klass.cid2unicode[point] + e
if code < 0x100:
c = chr(code)
elif code < 0x10000:
c = struct.pack('>H', code).decode('gb18030')
else:
c = struct.pack('>L', code).decode('gb18030')
return c
def to_unicode_wrapper(klass):
def func(cid):
return to_unicode(klass, cid)
return func
class UnicodeMap:
@property
def DESC(self):
return './cidtounicode'
def __init__(self, cmap={}):
self.cid2unicode = {}
self.diff = cmap
def get(self, cid):
if cid in self.diff:
return self.diff[cid]
return chr(cid)
class ADOBE_GB1(UnicodeMap):
FILE_NAME = 'Adobe-GB1.cidToUnicode'
def getCMap(cmapType, cmap={}):
if cmapType.startswith('Founder-') and cmapType.endswith('748'):
decoder = decoder748.encoding(cmapType)
for cid in cmap:
cmap[cid] = decoder.decode(cmap[cid].encode('gb18030'))
elif cmapType == 'Adobe-GB1':
cmap = ADOBE_GB1(cmap=cmap)
return cmap
```
#### File: minepdf/minepdf/colorspace.py
```python
from pdfminer.psparser import LIT, PSLiteral, PSStackParser, PSKeyword, PSEOF, keyword_name
from pdfminer.pdftypes import PDFObjRef, resolve1, dict_value, stream_value, list_value, PDFStream
from PIL import ImageCms
from io import BytesIO
import numpy as np
from itertools import product
class colorSpaces:
@property
def defaults(self):
default_values = [
(GrayColorSpace, LIT('DeviceGray'), LIT('G')),
(RGBColorSpace, LIT('DeviceRGB'), LIT('RGB')),
(CMYKColorSpace, LIT('DeviceCMYK'), LIT('CMYK')),
(CalGrayColorSpace, LIT('CalGray')),
(CalRGBColorSpace, LIT('CalRGB')),
(LabColorSpace, LIT('Lab')),
(ICCBasedColorSpace, LIT('ICCBased')),
(IndexedColorSpace, LIT('Indexed')),
(SeparationColorSpace, LIT('Separation')),
# (DeviceNColorSpace, LIT('DeviceN')),
(PatternColorSpace, LIT('Pattern')),
(NColorSpace, LIT('DeviceN')),
]
refs = {}
for tpl in default_values:
for i, x in enumerate(tpl):
if i > 0:
refs[x] = tpl[0]
return refs
def parse(self, obj, args=[]):
if isinstance(obj, PDFObjRef):
obj = resolve1(obj)
if isinstance(obj, PSLiteral):
cs = self.defaults.get(obj)
if not cs:
return None
# raise TypeError('unknown color space: %s' % obj.name)
return cs(*args)
if isinstance(obj, list):
return self.parse(obj[0], args=obj[1:])
class ColorSpace:
overprintMask = 0x0f
pipe = lambda *val: val
getGray = pipe
getRGB = pipe
getCMYK = pipe
mapGray = pipe
mapRGB = pipe
mapCMYK = pipe
class GrayColorSpace(ColorSpace):
mode = 'L'
ncomps = 1
def getRGB(self, gray):
# [gray] · [1, 1, 1]
r = g = b = gray
return r, g, b
def getCMYK(self, gray):
# [gray] · [0, 0, 0, 1]
c = m = y = 0
k = gray
return c, m, y, k
class CalGrayColorSpace(GrayColorSpace):
whiteX = whiteY = whiteZ = 1
blackX = blackY = blackZ = 0
gamma = 1
def __init__(self, obj):
obj = resolve1(obj)
params = dict_value(obj)
self.whiteX, self.whiteY, self.whiteZ = params['WhitePoint']
self.blackX, self.blackY, self.blackZ = params['BlackPoint']
self.gamma = params['Gamma']
class RGBColorSpace(ColorSpace):
mode = 'RGB'
ncomps = 3
def getGray(self, r, g, b):
return 0.299 * r + 0.587 * g + 0.114 * b
def getCMYK(self, r, g, b):
c = 1 - r
m = 1 - g
y = 1 - b
k = min(c, m, y)
return c - k, m - k, y - k, k
def mapGray(self, arr):
return self.getGray(arr[..., 0], arr[..., 1], arr[..., 2])
def mapCMYK(self, arr):
k = arr.max(-1)
out = np.empty_like(arr)
out[..., 0] = k - arr[..., 0]
out[..., 1] = k - arr[..., 1]
out[..., 2] = k - arr[..., 2]
k = k[..., np.newaxis]
return np.concatenate((out, 255 - k), axis=-1)
class CalRGBColorSpace(RGBColorSpace):
matrix = [
1, 0, 0,
0, 1, 0,
0, 0, 1
]
def __init__(self, obj):
obj = resolve1(obj)
params = dict_value(obj)
self.whiteX, self.whiteY, self.whiteZ = params.get(
'WhitePoint', (1, 1, 1))
self.blackX, self.blackY, self.blackZ = params.get(
'BlackPoint', (0, 0, 0))
self.gammaR, self.gammaG, self.gammaB = params.get('Gamma', (1, 1, 1))
self.matrix = params.get('Matrix', self.matrix)
class CMYKColorSpace(ColorSpace):
mode = 'CMYK'
ncomps = 4
factors = [
[1, 1, 1],
[0.1373, 0.1216, 0.1255],
[1, 0.9490, 0],
[0.1098, 0.1020, 0],
[0.9255, 0, 0.5490],
[0.1412, 0, 0],
[0.9294, 0.1098, 0.1412],
[0.1333, 0, 0],
[0, 0.6784, 0.9373],
[0, 0.0588, 0.1412],
[0, 0.6510, 0.3137],
[0, 0.0745, 0],
[0.1804, 0.1922, 0.5725],
[0, 0, 0.0078],
[0.2118, 0.2119, 0.2235],
[0, 0, 0]
]
def getGray(self, c, m, y, k):
return 1 - k - 0.3 * c - 0.59 * m - 0.11 * y
def getRGB(self, c, m, y, k, r=0, g=0, b=0):
c1, m1, y1, k1 = 1-c, 1-m, 1-y, 1-k
for i, (b0, b1, b2, b3) in enumerate(product([c1, c], [m1, m], [y1, y], [k1, k])):
x = b0 * b1 * b2 * b3
r += self.factors[i][0] * x
g += self.factors[i][1] * x
b += self.factors[i][2] * x
return r, g, b
def mapGray(self, arr):
return 255 - arr[..., 3] - 0.3 * arr[..., 0] - 0.59 * arr[..., 1] - 0.11 * arr[..., 2]
def mapRGB(self, arr):
arr = arr.astype('float') / 255
out = np.empty_like(arr[..., :-1], dtype='float')
self.getRGB(*(arr[..., i] for i in range(4)),
*(out[..., i] for i in range(3)))
return (out * 255).astype('uint8')
xyzrgb = [
[3.240449, -1.537136, -0.498531],
[-0.969265, 1.876011, 0.041556],
[0.055643, -0.204026, 1.057229]
]
class LabColorSpace(ColorSpace):
mode = 'LAB'
ncomps = 3
def __init__(self, obj):
obj = resolve1(obj)
params = dict_value(obj)
self.whiteX, self.whiteY, self.whiteZ = params.get(
'WhitePoint', (1, 1, 1))
self.blackX, self.blackY, self.blackZ = params.get(
'BlackPoint', (0, 0, 0))
self.aMin, self.bMin, self.aMax, self.bMax = params.get(
'Range', (-100, -100, 100, 100))
self.kr = 1 / (
xyzrgb[0][0] * self.whiteX +
xyzrgb[0][1] * self.whiteY +
xyzrgb[0][2] * self.whiteZ
)
self.kg = 1 / (
xyzrgb[1][0] * self.whiteX +
xyzrgb[1][1] * self.whiteY +
xyzrgb[1][2] * self.whiteZ
)
self.kb = 1 / (
xyzrgb[2][0] * self.whiteX +
xyzrgb[2][1] * self.whiteY +
xyzrgb[2][2] * self.whiteZ
)
def getGray(self, l, a, b):
r, g, b = self.getRGB(l, a, b)
return 0.299 * r + 0.587 * g + 0.114 * b + 0.5
def getRGB(self, l, a, b):
def lab2xyz(t): return t ** 3 if (t >= 6 /
29) else (108 / 841 * (t - 4 / 29))
# convert L*a*b* to CIE 1931 XYZ color space
t1 = (l + 16) / 116
t2 = t1 + a / 500
X = lab2xyz(t2)
X *= self.whiteX
Y = lab2xyz(t1)
Y *= self.whiteY
t2 = t1 - b / 200
Z = lab2xyz(t2)
Z *= self.whiteZ
# convert XYZ to RGB, including gamut mapping and gamma correction
r = xyzrgb[0][0] * X + xyzrgb[0][1] * Y + xyzrgb[0][2] * Z
g = xyzrgb[1][0] * X + xyzrgb[1][1] * Y + xyzrgb[1][2] * Z
b = xyzrgb[2][0] * X + xyzrgb[2][1] * Y + xyzrgb[2][2] * Z
return r ** 0.5, g ** 0.5, b ** 0.5
def getCMYK(self, l, a, b):
r, g, b = self.getRGB(l, a, b)
c = 1 - r
m = 1 - g
y = 1 - b
k = min(c, m, y)
return c - k, m - k, y - k, k
class ICCBasedColorSpace(ColorSpace):
@property
def defaults(self):
return {
'L': GrayColorSpace,
'RGB': RGBColorSpace,
'CMYK': CMYKColorSpace,
'LAB': LabColorSpace
}
mode = 'RGB'
def __init__(self, obj):
obj = resolve1(obj)
fp = BytesIO(obj.get_data())
self.profile = ImageCms.ImageCmsProfile(fp)
fp.close()
self.mode = self.profile.profile.color_space
if self.mode == 'LAB':
alt = resolve1(obj['Alternate'])
if isinstance(alt, list):
alt = alt[1]
self.base = self.defaults[self.mode](alt)
else:
self.base = self.defaults[self.mode]()
self.ncomps = len(self.mode)
def getGray(self, *val):
return self.base.getGray(*val)
def getRGB(self, *val):
return self.base.getRGB(*val)
def getCMYK(self, *val):
return self.base.getCMYK(*val)
class IndexedColorSpace(ColorSpace):
mode = 'P'
basemode = 'RGB'
palette = list(map(lambda i: (i, i, i), range(256)))
ncomps = 1
def __init__(self, base, hival, obj):
cs = colorSpaces()
self.base = cs.parse(resolve1(base))
self.hival = int(resolve1(hival))
obj = resolve1(obj)
data = b''
if isinstance(obj, bytes):
data = obj
elif isinstance(obj, PDFStream):
data = obj.get_data()
if data:
n = self.base.ncomps
self.palette = [[data[i * n + j] for j in range(n)] for i in range(len(data) // n)]
def lookup(self, index):
i = max(0, min(index, len(self.palette) - 1))
return self.palette[i]
def getGray(self, index):
return self.base.getGray(*self.lookup(index))
def getRGB(self, index):
return self.base.getRGB(*self.lookup(index))
def getCMYK(self, index):
return self.base.getCMYK(*self.lookup(index))
def mapPixels(self, arr):
palette = np.array(self.palette, dtype='uint8')
return palette[arr]
def mapGray(self, arr):
return self.base.mapGray(arr)
def mapRGB(self, arr):
return self.base.mapRGB(arr)
def mapCMYK(self, arr):
return self.base.mapCMYK(arr)
class functionParser:
def _min(self, x, num):
if isinstance(x, (int, float)):
return min(x, num)
x[x >= num] = num
return x
def _max(self, x, num):
if isinstance(x, (int, float)):
return max(x, num)
x[x < num] = num
return x
class SampledFunctionParser(functionParser):
def __init__(self, spec, domain):
self.domain = domain
self.frange = list_value(spec['Range'])
self.nins = len(self.domain) >> 1
self.nouts = len(self.frange) >> 1
self.sizes = list_value(spec['Size'])[:self.nins]
self.bits = int(spec['BitsPerSample'])
if 'Encode' in spec:
self.encode = list_value(spec['Encode'])
else:
self.encode = [0] * (self.nins << 1)
self.encode[1::2] = [size-1 for size in self.sizes]
self.decode = list_value(
spec['Decode']) if 'Decode' in spec else self.frange[:]
# domain = [0 1]
# range = [0 1 0 1 0 1 0 1]
# bits = 8
# sizes = [1024]
# encode = [0 1023]
# decode = [0 1 0 1 0 1 0 1]
def interpolate(self, x, xmin, xmax, ymin, ymax):
return (ymax - ymin) / (xmax-xmin) * (x-xmin) + ymin
def parse(self, *args):
e = []
for i in range(self.nins):
x = self._min(
self._max(args[i], self.domain[i*2]), self.domain[i*2+1])
x = self.interpolate(
x, self.domain[i*2], self.domain[i*2+1], self.encode[i*2], self.encode[i*2+1])
e.append(self._min(self._max(x, 0), self.sizes[i]-1))
return e
def SampledFunction(spec, domain):
parser = SampledFunctionParser(spec, domain)
return parser.parse
class ExponentialFunctionParser(functionParser):
def __init__(self, spec, domain):
self.c0, self.c1 = [0], [1]
if spec.get('C0'):
self.c0 = [float(x) for x in list_value(spec['C0'])]
if spec.get('C1'):
self.c1 = [float(x) for x in list_value(spec['C1'])]
self.n = spec['N']
self.frange = None
if spec.get('Range'):
self.frange = list_value(spec.get('Range'))
self.domain = domain
def parse(self, ipt):
ipt /= 255
ipt = self._min(self._max(ipt, self.domain[0]), self.domain[1])
opt = []
for i in range(len(self.c0)):
x = self.c0[i] + pow(ipt, self.n) * (self.c1[i] - self.c0[i])
if self.frange:
x = self._min(self._max(x, self.frange[0]), self.frange[1])
opt.append(x * 255)
return opt
def ExponentialFunction(spec, domain):
parser = ExponentialFunctionParser(spec, domain)
return parser.parse
def StitchingFunction(spec, domain):
pass
class PSFunctionParser(PSStackParser):
def __init__(self, fp):
super().__init__(fp)
self.run()
def run(self):
try:
self.nextobject()
except PSEOF:
pass
_, self.argstack = self.curstack.pop()
self.reset()
def parse(self, *args):
argstack = list(args) + self.argstack
self.curstack = []
while argstack:
obj = argstack.pop(0)
if isinstance(obj, PSKeyword):
name = keyword_name(obj)
if not isinstance(name, str):
name = name.decode()
result = getattr(self, 'do_'+name)()
if result is not None:
if isinstance(result, (list, tuple)):
self.curstack += list(result)
else:
self.curstack.append(result)
else:
self.curstack.append(obj)
return self.curstack
def do_keyword(self, pos, token):
self.push((pos, token))
def do_roll(self):
n, j = self.pop(2)
vals = self.pop(n)
j %= n
if not j:
return vals
return (vals*2)[n-j:n*2-j]
def do_dup(self):
x = self.pop(1)
return x + x
def do_exch(self):
a, b = self.pop(2)
return b, a
def do_sub(self):
a, b = self.pop(2)
if isinstance(b, (int, float)):
return b - a
b[b < a] = 0
b[b >= a] -= a
return b
def do_pop(self):
self.pop(1)
def do_index(self):
i = self.pop(1)[0]
return self.curstack[-i-1]
def do_cvr(self):
num = self.pop(1)[0]
return float(num)
def do_mul(self):
a, b = self.pop(2)
return a * b
def PostScriptFunction(spec, domain):
parser = PSFunctionParser(BytesIO(spec.get_data()))
return parser.parse
def func_parse(spec):
func_type = int(spec.get('FunctionType'))
domain = list_value(spec.get('Domain'))
func_refs = {
0: SampledFunction,
2: ExponentialFunction,
3: StitchingFunction,
4: PostScriptFunction
}
func_builder = func_refs[func_type]
return func_builder(spec, domain)
class SeparationColorSpace(ColorSpace):
mode = 'P'
def __init__(self, alt, base, func, *args):
cs = colorSpaces()
self.base = cs.parse(resolve1(base))
spec = resolve1(func)
self.ncomps = len(spec['Domain']) >> 1
self.func = func_parse(spec)
def transform(self, *val):
transformed = self.func(*val)
new_val = []
for i in range(self.base.ncomps):
new_val.append(transformed[i])
return new_val
def mapPixels(self, arr):
if not self.func:
return arr
if len(arr.shape) == 2:
arr = arr[..., np.newaxis]
w, h, d = arr.shape
arr = arr.astype('float')
transformed = self.transform(*[arr[..., i] for i in range(d)])
result = None
for layer in transformed:
if isinstance(layer, (int, float)):
layer = np.ones((w, h), dtype='float') * layer
layer = layer.astype('uint8')
if result is None:
result = layer
else:
result = np.dstack([result, layer])
return result
def getGray(self, *val):
val = self.transform(*val)
return self.base.getGray(*val)
def getRGB(self, *val):
val = self.transform(*val)
return self.base.getRGB(*val)
def getCMYK(self, *val):
val = self.transform(*val)
return self.base.getCMYK(*val)
def mapGray(self, arr):
return self.base.mapGray(arr)
def mapRGB(self, arr):
return self.base.mapRGB(arr)
def mapCMYK(self, arr):
return self.base.mapCMYK(arr)
class NColorSpace(SeparationColorSpace):
mode = 'P'
def __init__(self, names, alt, func, *attrs):
self.names = list_value(names)
self.base = colorSpaces().parse(resolve1(alt))
spec = resolve1(func)
self.ncomps = len(spec['Domain']) >> 1
self.func = func_parse(spec)
class PatternColorSpace(ColorSpace):
under = None
mode = 'P'
ncomps = 1
def __init__(self, *args):
if args:
cs = colorSpaces()
self.under = cs.parse(resolve1(args[0]))
defaults = colorSpaces().defaults
parse = colorSpaces().parse
```
#### File: minepdf/minepdf/converter.py
```python
from pdfminer.pdfparser import PDFParser
from pdfminer.pdfdocument import PDFDocument
from pdfminer.pdfpage import PDFPage
from pdfminer.pdfdevice import PDFDevice
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter, PDFTextState, PDFGraphicState
from pdfminer.pdftypes import list_value, dict_value, stream_value, PDFStream
from pdfminer.psparser import LIT, PSLiteral
from pdfminer.pdftypes import PDFObjRef, resolve1
from pdfminer.utils import mult_matrix
from pdftext import TextAnalyzer, textSpanBox
import pdffonts
import colorspace
def literal(name): return LIT(
name) if not isinstance(name, PSLiteral) else name
def render_type(ftype):
def render_function(func):
def render_arguments(self, *args, **kwargs):
if ftype in self.filtered:
return
return func(self, *args, **kwargs)
return render_arguments
return render_function
def get_default(res_type):
def binding(func):
def get_arguments(self, objid, obj=None):
res_list = getattr(self, res_type+'s', None)
if res_list is None:
return
if objid is not None:
objid = literal(objid)
if objid in res_list:
return res_list[objid]
elif obj is None:
return
func(self, objid, obj=obj)
if objid is not None:
return res_list.get(objid)
return get_arguments
return binding
class Paint:
def __init__(self, cs, value):
self.cs = cs
self.value = value
def draw(self):
return self.cs.getRGB(*self.value)
class TextState(PDFTextState):
def __init__(self):
super().__init__()
self.fill = None
self.extState = {}
def copy(self):
obj = self.__class__()
obj.font = self.font
obj.fontsize = self.fontsize
obj.charspace = self.charspace
obj.wordspace = self.wordspace
obj.scaling = self.scaling
obj.leading = self.leading
obj.render = self.render
obj.rise = self.rise
obj.matrix = self.matrix
obj.linematrix = self.linematrix
obj.fill = self.fill
obj.extState = self.extState
return obj
def __setattr__(self, key, value):
if key in ['charspace', 'wordspace']:
value *= getattr(self, 'scaling', 100) * 0.01
return object.__setattr__(self, key, value)
class GraphicState(PDFGraphicState):
def __init__(self):
super().__init__()
self.stroke = self.fill = None
self.extState = {}
def copy(self):
obj = self.__class__()
obj.linewidth = self.linewidth
obj.linecap = self.linecap
obj.linejoin = self.linejoin
obj.miterlimit = self.miterlimit
obj.dash = self.dash
obj.intent = self.intent
obj.flatness = self.flatness
obj.stroke = self.stroke
obj.fill = self.fill
obj.extState = self.extState
return obj
class Device(PDFDevice):
def __init__(self, filtered=None, laparams=None, check_visible=True):
super().__init__(None)
self.filtered = filtered or []
self.check_visible = check_visible
self.analyzer = TextAnalyzer(**(laparams or {}))
self.pageno = 1
self.reset()
self.viewBox = [0, 0, 0, 0]
def reset(self):
self.images = {}
self.text_layer = []
self.layers = {}
self.layer_stack = []
def begin_page(self, page, ctm):
self.reset()
self.layers[LIT('Page')] = (page.cropbox, ctm)
self.layer_stack = [LIT('Page')]
self.viewBox = page.cropbox
self.ymax = page.mediabox[3] - page.mediabox[1]
def is_visible(self, span, bbox):
boxset = set(map(lambda p: (int(p[0]), int(p[1])), span.bbox))
if len(boxset) < len(span.bbox):
return False
xmin, ymin, xmax, ymax = bbox
return all(xmin < x < xmax and ymin < y < ymax for x, y in boxset)
def get_current_layer(self):
i = -1
depth = 0
while True:
layerName = self.layer_stack[i]
if layerName == 'end':
depth += 1
else:
depth -= 1
if depth < 0:
break
i -= 1
return layerName, self.layers[layerName]
def end_page(self, page):
self.text_layer = filter(lambda x: not self.check_visible
or self.is_visible(x, self.viewBox), self.text_layer)
lines = self.analyzer.group_lines(self.text_layer)
paras = self.analyzer.group_paras(lines)
self.text_layer = paras
self.pageno += 1
def begin_figure(self, name, bbox, matrix):
x, y, w, h = bbox
self.layers[name] = ([x, y, x+w, y+h], matrix)
self.layer_stack.append(name)
def end_figure(self, name):
self.layer_stack.append('end')
@render_type('path')
def paint_path(self, graphicstate, stroke, fill, evenodd, path):
# path handling suspended
return path
@render_type('image')
def render_image(self, name, stream, anchored=False, textstate=None):
bbox, matrix = self.get_current_layer()[1]
self.images.setdefault(stream.objid, (name, stream, bbox, matrix))
@render_type('text')
def render_string(self, textstate, seq, *args):
layerName = self.get_current_layer()[0]
x, y = textstate.linematrix
a, b, c, d, e, f = mult_matrix(textstate.matrix, self.ctm)
matrix = a, b, c, d, e, self.ymax - f
box = textSpanBox((x, y), seq, textstate, layerName=layerName, matrix=matrix)
# check if text is visible
if not textstate.extState.get('OP', False) or not textstate.extState.get('OPM', 0):
self.text_layer.append(box)
elif textstate.extState.get('OPM', 1) and any(textstate.fill.value):
self.text_layer.append(box)
textstate.linematrix = box.originbox[2]
class ResourceManager(PDFResourceManager):
def __init__(self):
self.fonts = {}
self.colorspaces = colorspace.defaults.copy()
self.xobjects = {}
self.cache = {}
self.stream_objects = []
def clear(self):
for res in self.fonts:
stream_to_close = getattr(res, 'embedFont', None)
stream_to_close and stream_to_close.close()
self.fonts.clear()
self.colorspaces.clear()
self.xobjects.clear()
def render_resource(self, res_type, res_obj):
get_function = getattr(self, 'get_' + res_type.lower(), None)
return get_function and get_function(None, obj=res_obj)
@get_default('font')
def get_font(self, objid, obj=None):
for (fontid, spec) in dict_value(obj).items():
spec = dict_value(spec)
spec, fontType, embedFont, opentype = pdffonts.getType(spec)
if fontType:
font = fontType(spec, embedFont=embedFont and self.xobjects.get(
embedFont.objid, embedFont), opentype=opentype)
if embedFont:
objid = literal(embedFont.objid)
if not objid in self.xobjects:
self.xobjects[objid] = font.embedFont
self.fonts[literal(fontid)] = font
@get_default('colorspace')
def get_colorspace(self, objid, obj=None):
for (csid, spec) in dict_value(obj).items():
cs = colorspace.parse(spec)
if cs:
self.colorspaces[literal(csid)] = cs
def get_procset(self, objid, obj=None):
# procset handling suspended
pass
@get_default('xobject')
def get_xobject(self, objid, obj=None):
for (xobjid, xobjstrm) in dict_value(obj).items():
self.xobjects[literal(xobjid)] = xobjstrm
class Interpreter(PDFPageInterpreter):
def __init__(self, device):
self.rsrcmgr = ResourceManager()
self.device = device
# custom logging here
def log(self, message):
pass
def dup(self):
return self.__class__(self.device)
def close(self):
self.rsrcmgr.clear()
def init_resources(self, resources):
self.resources = resources
if resources:
for (k, v) in dict_value(resources).items():
self.debug and self.log('Resource: %r: %r' % (k, v))
self.rsrcmgr.render_resource(k, v)
def init_state(self, ctm):
self.gstack = []
self.ctm = ctm
self.device.set_ctm(self.ctm)
self.textstate = TextState()
self.graphicstate = GraphicState()
self.curpath = []
self.argstack = []
self.scs = self.ncs = colorspace.CMYKColorSpace()
def do_CS(self, name):
self.scs = self.rsrcmgr.get_colorspace(literal(name))
def do_cs(self, name):
self.ncs = self.rsrcmgr.get_colorspace(literal(name))
def do_SCN(self):
n = len(self.scs.mode)
pattern = self.argstack[-n:]
self.graphicstate.stroke = Paint(self.scs, pattern)
self.argstack = self.argstack[:-n]
def do_scn(self):
n = len(self.ncs.mode)
pattern = self.argstack[-n:]
self.graphicstate.fill = self.textstate.fill = Paint(self.ncs, pattern)
self.argstack = self.argstack[:-n]
def do_G(self, gray):
cs = colorspace.GrayColorSpace()
self.graphicstate.stroke = Paint(cs, gray)
def do_g(self, gray):
cs = colorspace.GrayColorSpace()
self.graphicstate.fill = self.textstate.fill = Paint(cs, gray)
def do_RG(self, r, g, b):
cs = colorspace.RGBColorSpace()
self.graphicstate.stroke = Paint(cs, (r, g, b))
def do_rg(self, r, g, b):
cs = colorspace.RGBColorSpace()
self.graphicstate.fill = self.textstate.fill = Paint(cs, (r, g, b))
def do_K(self, c, m, y, k):
cs = colorspace.CMYKColorSpace()
self.graphicstate.stroke = Paint(cs, (c, m, y, k))
def do_k(self, c, m, y, k):
cs = colorspace.CMYKColorSpace()
self.graphicstate.fill = self.textstate.fill = Paint(cs, (c, m, y, k))
def do_Tf(self, fontid, fontsize):
self.textstate.font = self.rsrcmgr.get_font(literal(fontid))
self.textstate.fontsize = fontsize
def do_Do(self, xobjid):
xobj = self.rsrcmgr.get_xobject(literal(xobjid))
if not xobj:
return
self.debug and self.log('Processing xobj: %r' % xobj)
xobj = stream_value(xobj)
subtype = xobj.get('Subtype')
if subtype is LIT('Form') and 'BBox' in xobj:
interpreter = self.dup()
bbox = list_value(xobj['BBox'])
matrix = list_value(xobj.get('Matrix', (1, 0, 0, 1, 0, 0)))
# According to PDF reference 1.7 section 4.9.1, XObjects in
# earlier PDFs (prior to v1.2) use the page's Resources entry
# instead of having their own Resources entry.
resources = dict_value(xobj.get('Resources')
) or self.resources.copy()
self.device.begin_figure(xobjid, bbox, matrix)
interpreter.render_contents(
resources, [xobj], ctm=mult_matrix(matrix, self.ctm))
self.device.end_figure(xobjid)
elif subtype is LIT('Image') and 'Width' in xobj and 'Height' in xobj:
self.device.render_image(xobjid, xobj, anchored=True)
else:
# unsupported xobject type.
pass
def do_EI(self, obj):
if 'W' in obj and 'H' in obj:
self.device.render_image(
str(id(obj)), obj, anchored=False, state=self.textstate)
def do_gs(self, name):
if isinstance(name, PSLiteral):
name = name.name
gstate = self.resources['ExtGState'].get(name)
if gstate and not self.textstate.extState:
gstate = resolve1(gstate)
self.textstate.extState = gstate
def do_q(self):
self.gstack.append(self.get_current_state())
def do_Q(self):
self.gstack and self.set_current_state(self.gstack.pop())
# def do_Td(self, tx, ty):
# x, y = self.textstate.linematrix
# # print((x,y), (tx,ty))
# (a, b, c, d, e, f) = self.textstate.matrix
# print((x,y), (tx,ty), (tx*a+ty*c+e, tx*b+ty*d+f))
# self.textstate.matrix = (a, b, c, d, tx*a+ty*c+e, tx*b+ty*d+f)
# self.textstate.linematrix = (0, 0)
```
#### File: minepdf/minepdf/decoder748.py
```python
import struct
def tohalf(code):
ffe0 = [0xa2, 0xa3, 0xac, 0xaf, 0xa6, 0xa5, 0x20a9]
if code <= 0xff00:
return code
if 0xff01 <= code <= 0xff5e:
code -= 0xfee0
elif 0xff5f <= code <= 0xff60:
code = 0x2985 + code - 0xff5f
elif 0xffe0 <= code <= 0xffe6:
code = ffe0[code - 0xffe0]
return code
class DECODER:
@property
def glyphlist(self):
data = open('./cidToUnicode/private.dat', 'rb')
e000 = []
while 1:
chunk = data.read(2)
if not chunk:
break
code, = struct.unpack('>H', chunk)
e000.append(code)
data.close()
return e000
def valid_unicode(self, code):
if 0xe000 <= code <= 0xe814:
code = self.glyphlist[code - 0xe000]
return chr(code)
def decode(self, bytes):
return bytes.decode('gb18030')
class DECODER_PK748_E(DECODER):
def decode(self, bytes):
code, = struct.unpack('>H', bytes)
if code < 0x1000:
return chr(code)
hb, lb = struct.unpack('>2B', struct.pack('>H', code))
if not 0xa0 <= hb <= 0xad:
return ord(struct.pack('>2B', hb, lb).decode('gb18030'))
if hb == 0xa0 and 0x80 <= lb <= 0xfe:
if lb < 0xfe:
hb += 3
else:
hb, lb = 0xa1, 0xab
elif hb == 0xa2 and 0x41 <= lb <= 0x7f:
lb += 0x1f
elif hb == 0xa6 and 0x40 <= lb <= 0x5f:
lb += 0x1f
code = ord(struct.pack('>2B', hb, lb).decode('gb18030'))
return self.valid_unicode(tohalf(code))
def encoding(enc_type):
assert enc_type.startswith('Founder-')
enc_type = enc_type[8:].lower()
if enc_type == 'pkue1':
return DECODER_PK748_E()
return DECODER()
```
#### File: minepdf/minepdf/__init__.py
```python
from pdfminer.pdfparser import PDFParser
from pdfminer.pdfdocument import PDFDocument
from pdfminer.pdfpage import PDFPage
from converter import Device, Interpreter
def page_range_match(s, maxsize=10000):
def get_range(x):
if x.isdigit():
return {min(int(x), maxsize)}
start, _, end = x.partition('-')
if start.strip().isdigit() and end.strip().isdigit():
return set(range(min(int(start.strip()), maxsize), min(int(end.strip()) + 1, maxsize)))
return set()
result = set()
for x in s.strip().split(','):
result |= get_range(x.strip())
return result
def render_pages(ipt, page_range='*', laparams=None, filtered=[], password=b'', check_visible=True, maxsize=10000):
page_range = page_range.strip()
parser = PDFParser(ipt)
doc = PDFDocument(parser, password=password, fallback=False)
renderer = Device(filtered=filtered, laparams=laparams,
check_visible=check_visible)
factory = Interpreter(renderer)
wildcard = page_range == '*'
pages = not wildcard and page_range_match(page_range)
endpage = max(pages or [0])
if wildcard or endpage:
for i, page in enumerate(PDFPage.create_pages(doc)):
i += 1
if not wildcard and i not in pages:
continue
factory.process_page(page)
yield i, renderer, factory
if not wildcard and i == endpage:
break
factory.close()
text_sort_key = {
'left': (lambda p: p.bbox[0]),
'center': (lambda p: (p.bbox[0][1], p.bbox[0][0])),
'right': (lambda p: (p.bbox[1][1], p.bbox[1][0]))
}
def render_text(ipt, page_range='*', laparams=None, box_align='center', password=b'', check_visible=True, maxsize=10000):
for pageno, renderer, _ in render_pages(
ipt, page_range=page_range, laparams=laparams,
filtered=['image', 'path'], password=password,
check_visible=check_visible, maxsize=maxsize):
texts = []
for para in sorted(renderer.text_layer, key=text_sort_key[box_align]):
texts.append([line.get_text() for line in para])
yield pageno, texts
def render_image(ipt, page_range='*', password=b'', maxsize=10000):
for pageno, renderer, _ in render_pages(
ipt, filtered=['text', 'path'], page_range=page_range,
password=password, maxsize=maxsize):
for objid, (name, stream, bbox, matrix) in renderer.images.items():
yield pageno, objid, name, stream, bbox, matrix
``` |
{
"source": "jonizen/BookingBot",
"score": 3
} |
#### File: BookingBot/pageobjects/find_flight_page.py
```python
from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
class FindFlightPage:
def __init__(self, driver):
self.driver = driver
self.driver.instance.save_screenshot('flightpage.png')
self.load_flight_page = WebDriverWait(self.driver.instance, 30).until(
EC.visibility_of_element_located((
By.CSS_SELECTOR, "button.btn.btn-lg.btn-block.btn-primary.forward-navigation.ng-binding")))
def forward_button(self):
assert self.load_flight_page.is_displayed()
def click_forward_button(self):
self.driver.instance.find_element_by_css_selector(
"button.btn.btn-lg.btn-block.btn-primary.forward-navigation.ng-binding").click()
``` |
{
"source": "jonizhong/jonizhong.github.io",
"score": 3
} |
#### File: jonizhong.github.io/codes/ContinuousActorCritic_2d_v2_commented.py
```python
import random
import numpy
import scipy
import pylab
import Image
import math
# used by exporttiles()
# insert into file a comment which looks e.g. like this: # highS: 0.099849 lowS: -0.099849
def exportinfo (filename, highS, lowS):
f = open(filename, 'rb')
content = f.read()
f.close()
f = open(filename, 'wb')
charcount = 0
for char in content:
f.write(char)
if charcount == 2:
f.write('# highS: %.6f lowS: %.6f\n' % (highS, lowS))
charcount += 1
f.close()
def exporttiles (X, x, y, a, b, frame, filename):
xy, ab = numpy.shape(X)
if (xy != x*y) or (ab != a*b):
print 'exporttiles: size error'
Y = numpy.zeros((frame + x*(a+frame), frame + y*(b+frame)))
image_id = 0
for xx in range(x):
for yy in range(y):
if image_id >= xy:
break
tile = numpy.reshape (X[image_id], (a, b))
beginA, beginB = frame + xx*(a+frame), frame + yy*(b+frame)
Y[beginA : beginA+a, beginB : beginB+b] = tile
image_id += 1
im = Image.new ("L", (frame + y*(b+frame), frame + x*(a+frame)))
im.info = 'comment here does not work'
im.putdata (Y.reshape((frame + x*(a+frame)) * (frame + y*(b+frame))), offset=-Y.min()*255.0/(Y.max()-Y.min()), scale=255.0/(Y.max()-Y.min()) )
im.save(filename, cmap=pylab.cm.jet) # seems to ignore the colormap
exportinfo (filename, numpy.max(X), numpy.min(X))
class world_model_RL:
def __init__(self, size_a, size_b):
# init input position
self.sel_a = random.uniform (0, size_a)
self.sel_b = random.uniform (0, size_b)
self.size_a = size_a
self.size_b = size_b
self.states = self.update_activation()
def newinit(self):
self.sel_a = random.uniform (0, self.size_a)
self.sel_b = random.uniform (0, self.size_b)
self.states = self.update_activation()
def update_activation(self):
states = numpy.zeros((self.size_a*self.size_b))
var = 1.5
for a in range(0, self.size_a):
for b in range(0, self.size_b):
distance =(a+0.5-self.sel_a)**2+(b+0.5-self.sel_b)**2
states[a * self.size_b + b] = math.exp(-distance/(2*var**2))
states /= numpy.sum(states)
return states
def act(self, act): #act is CONTINUOUS from 0 .. 2*PI
# position world reaction
self.sel_a += math.sin(act)
self.sel_b += math.cos(act)
# position boundary conditions
if self.sel_a < 0.0:
self.sel_a = 0.0
elif self.sel_a > self.size_a - 1.0:
self.sel_a = self.size_a - 1.0
if self.sel_b < 0.0:
self.sel_b = 0.0
elif self.sel_b > self.size_b - 1.0:
self.sel_b = self.size_b - 1.0
self.states = self.update_activation()
def reward(self): #TODO how to define reward????
if self.sel_a>=4.5 and self.sel_a <= 5.5 and self.sel_b >= 4.5 and self.sel_b<=5.5:
return 1.0
else:
return 0.0
def sensor(self):
return numpy.reshape(self.states, (size_map))
def rand_winner (self, h, sigma):
rand = random.normalvariate(h, sigma)
if rand < 0.0:
rand += 2.0 * math.pi
elif rand >= 2.0 * math.pi:
rand -= 2.0 * math.pi
return rand
def process_boundary(self, w_mot, I):
sum_a = numpy.dot(numpy.sin(w_mot),I)
sum_b = numpy.dot(numpy.cos(w_mot),I)
angle = math.atan2(sum_a, sum_b)
if angle < 0 :
return angle + 2*math.pi
else:
return angle
size_a, size_b = 10, 10
size_map = (size_a) * (size_b)
size_mot = 1
w_mot = numpy.random.uniform(0, 2.0*math.pi, (size_mot, size_map))
w_cri = numpy.random.uniform(0.0, 0.1, (size_map))
world = world_model_RL(size_a, size_b)
sigma = 2*0.314
eps = 0.1
gamma = 0.7
eta = 0.7
for iter in range (10000):
world.newinit()
I = world.sensor()
h2 = world.process_boundary(w_mot,I)
act = world.rand_winner (h2, sigma)
val = numpy.dot (w_cri, I) # value
r = 0
duration = 0
while r == 0 and duration < 1000:
duration += 1
world.act(act) # do selected action
r = world.reward() # read reward
I_tic = world.sensor() # read new state
h2 = world.process_boundary(w_mot,I_tic)
act_tic = world.rand_winner (h2, sigma) # choose next action
val_tic = numpy.dot(w_cri, I_tic)
if r == 1.0: # This is cleaner than defining
target = r # target as r + gamma * val_tic,
print 'reward achieved!'
print 'duration: ',duration
else: # because critic weights now converge.
target = gamma * (val_tic)
delta = target - val # prediction error; gamma w_cri += eps * delta * (I)
w_cri += eps * delta * I
w_cri = numpy.clip(w_cri, 0.0, numpy.inf)
if val_tic > val:
sum_a = (math.sin(act)*eta*I)-numpy.sin(w_mot)*eta*I
sum_b = (math.cos(act)*eta*I)-numpy.cos(w_mot)*eta*I
sum_a = numpy.reshape(sum_a,(1,size_a*size_b))
sum_b = numpy.reshape(sum_b,(1,size_a*size_b))
w_mot_a = numpy.sin(w_mot) + sum_a
w_mot_b = numpy.cos(w_mot) + sum_b
w_mot = numpy.arctan2(w_mot_a, w_mot_b)
for i in range(numpy.shape(w_mot)[1]):
if w_mot[0,i] < 0 :
w_mot[0,i] += 2.0*math.pi
# personally prefer this update rules.. more straightforward and original..
I[0:size_map] = I_tic[0:size_map]
val = val_tic
act = act_tic
exporttiles (numpy.reshape(I,(1,size_a * size_b)), 1, 1, size_a, size_b, 1, "/tmp/coco/obs_I_0.pgm")
exporttiles (w_mot, 1, size_mot, size_a, size_b, 1, "/tmp/coco/obs_v_0_0.pgm")
exporttiles (numpy.reshape (w_cri, (1,size_a * size_b)), 1, 1, size_a, size_b, 1, "/tmp/coco/obs_w_1_1.pgm")
print iter, duration, ' w_mot=%.2f..%.2f' % (numpy.min(w_mot), numpy.max(w_mot)), ' w_cri=%.2f..%.2f' % (numpy.min(w_cri), numpy.max(w_cri))
```
#### File: jonizhong.github.io/codes/simple-sumup-v094-nornn-2d-separate-stable.py
```python
# -------------------------------------------------------------------
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# -------------------------------------------------------------------
from pylab import *
import numpy
import random
import KTimage as KT
import math
class world_model_RL:
def __init__(self, reward_x, reward_y):
# init input position
self.reward_x = reward_x
self.reward_y = reward_y
self.start_x = 0.0
self.start_y = 0.1
self.sel_x = self.start_x
self.sel_y = self.start_y
self.StartPosCount = 10
def newinit(self):
if self.start_x < 0.9:
self.start_x += 1.0/self.StartPosCount
else:
self.start_x = 0.1
if self.start_y < 0.9:
self.start_y += 1.0/self.StartPosCount
else:
self.start_y = 0.1
self.sel_x = self.start_x
self.sel_y = self.start_y
def act(self, act): #act is [x,y], normalised
# position world reaction
self.sel_x += act[0]*0.06
self.sel_y += act[1]*0.06
# position boundary conditions
if self.sel_x < 0.0 or self.sel_x > 1.0:
self.sel_x -= act[0]*0.06
self.sel_y -= act[1]*0.06
if self.sel_y < 0.0 or self.sel_y > 1.0:
self.sel_x -= act[0]*0.06
self.sel_y -= act[1]*0.06
def reward(self): # reward area is 0.2*0.2 area
if self.sel_x >= self.reward_x - 0.1 and self.sel_x <= self.reward_x + 0.1 and self.sel_y >= self.reward_y - 0.1 and self.sel_y <= self.reward_y + 0.1:
return 1.0
else:
return 0.0
def sense(self):
return self.sel_x, self.sel_y
def rand_winner (self, h, sigma):
rand = random.normalvariate(h, sigma)
if rand < 0.0:
rand += 2.0 * math.pi
elif rand >= 2.0 * math.pi:
rand -= 2.0 * math.pi
return rand
class RNN:
""" A Multi-Layer RNN"""
def __init__(self, Perception, outtype='linear'):
""" Constructor """
# Set up network size
self.__perception = Perception
self.eta = 0.10 # learning rate
self.sigma = 0.2*math.pi # sigma for random action selection.
self.beta = 1.0
self.beta1 = 1.0
self.sm_beta = 90.0
self.eps = 0.10
self.nin = 2
self.nout = 2
self.ncritic = 1
self.nhidden = 5 #TODO: determine the size of hidden layer
print 'number of hidden units', self.nhidden
self.outtype = outtype
# Initialise network
self.weights11 = (1*numpy.random.rand(self.nhidden,self.nin+1)-0.5)*2/sqrt(self.nin+1) #weights between input and hidden1 (action)
self.weights12 = (1*numpy.random.rand(self.nhidden,self.nin+1)-0.5)*2/sqrt(self.nin+1) #weights between input and hidden2 (critic)
self.weights2 = (1*numpy.random.rand(self.nout,self.nhidden+1)-0.5)*2/sqrt(self.nhidden+1) #weights between hidden and output
self.weights4 = (1*numpy.random.rand(self.ncritic,self.nhidden+1)-0.5)*2/sqrt(self.nhidden+1) # weights between critic and hidden
def train(self,iteration):
gamma = 0.9
""" Train the thing """
# Add the inputs that match the bias node
self.maxIteration = iteration
self.error_action_sav = numpy.zeros((2,self.maxIteration/10))
self.error_val_sav = numpy.zeros((self.maxIteration/10))
self.error_total_action_sav = numpy.zeros((2,self.maxIteration/10))
self.error_total_val_sav = numpy.zeros((self.maxIteration/10))
self.average_duration_sav = numpy.zeros((self.maxIteration))
for iteration in range(0,self.maxIteration):
error = 0.0
error_val = 0.0
total_duration = 0
updateTimes = 0
self.visualisation()
for StartPos in range(0, self.__perception.StartPosCount**2):
self.__perception.newinit()
reward = 0.0
duration = 0
self.hidden = numpy.zeros((self.nhidden))
landmark_info = numpy.array(self.__perception.sense()) # 2 continuous inputs
inputs_bias = numpy.concatenate((landmark_info,-1*numpy.ones((1))),axis=0)
# ------- sigmoidal funcion for hidden layer 1 -------
self.hidden1_y = dot(self.weights11,inputs_bias)
self.hidden1 = 1.0 / (1.0 + numpy.exp(-self.beta1*self.hidden1_y))
self.hiddenplusone1 = numpy.concatenate((self.hidden1,-1*numpy.ones((1))),axis=0) # add a bias unit
# ------- end of sigmoidal funcion for hidden layer 1 -------
h_out = self.mlpfwd()
h = self.normalised(h_out)
h_angle = math.atan2(h[1], h[0])
action_angle = self.__perception.rand_winner(h_angle,self.sigma)
action = numpy.zeros((2))
action[0] = math.cos(action_angle)
action[1] = math.sin(action_angle)
action = self.normalised(action)
# ------- sigmoidal funcion for hidden layer 2 -------
self.hidden2_y = dot(self.weights12,inputs_bias)
self.hidden2 = 1.0 / (1.0 + numpy.exp(-self.beta*self.hidden2_y))
self.hiddenplusone2 = numpy.concatenate((self.hidden2,-1*numpy.ones((1))),axis=0) # add a bias unit
# ------- end of sigmoidal funcion for hidden layer 2 -------
val = self.valfwd()
r = self.__perception.reward() # read reward
while (r != 1.0) and duration < 1000:
duration += 1
total_duration += 1
updatew11 = numpy.zeros((numpy.shape(self.weights11)))
updatew12 = numpy.zeros((numpy.shape(self.weights12)))
updatew2 = numpy.zeros((numpy.shape(self.weights2)))
updatew4 = numpy.zeros((numpy.shape(self.weights4)))
self.__perception.act(action)
landmark_info_tic = numpy.array(self.__perception.sense()) # 2 continuous inputs
r = self.__perception.reward() # read reward
inputs_bias_tic = numpy.concatenate((landmark_info_tic,-1*numpy.ones((1))),axis=0)
KT.exporttiles(inputs_bias_tic, self.nin+1, 1, basedir+"obs_S_0.pgm")
# ------- sigmoidal funcion for hidden layer 1 -------
self.hidden1_y = dot(self.weights11,inputs_bias_tic)
self.hidden1 = 1.0 / (1.0 + numpy.exp(-self.beta1*self.hidden1_y))
self.hiddenplusone1 = numpy.concatenate((self.hidden1,-1*numpy.ones((1))),axis=0) # add a bias unit
# ------- end of sigmoidal funcion for hidden layer 1 -------
h_out = self.mlpfwd()
h = self.normalised(h_out)
h_angle = math.atan2(h[1], h[0])
action_tic_angle = self.__perception.rand_winner(h_angle,self.sigma)
action_tic = numpy.zeros((2))
action_tic[0] = math.cos(action_tic_angle)
action_tic[1] = math.sin(action_tic_angle)
action_tic = self.normalised(action_tic)
# ------- sigmoidal funcion for hidden layer 2 -------
self.hidden2_y = dot(self.weights12,inputs_bias_tic)
self.hidden2 = 1.0 / (1.0 + numpy.exp(-self.beta*self.hidden2_y))
self.hiddenplusone2 = numpy.concatenate((self.hidden2,-1*numpy.ones((1))),axis=0) # add a bias unit
# ------- end of sigmoidal funcion for hidden layer 2 -------
if self.__perception.sel_x > 0.1 and self.__perception.sel_y > 0.1 and self.__perception.sel_x < 0.3 and self.__perception.sel_y < 0.3:
KT.exporttiles(self.hidden1, 1, self.nhidden, basedir+"obs_S_1.pgm")
KT.exporttiles(self.hidden2, 1, self.nhidden, basedir+"obs_S_2.pgm")
if self.__perception.sel_x > 0.6 and self.__perception.sel_y > 0.6 and self.__perception.sel_x < 0.7 and self.__perception.sel_y < 0.7:
KT.exporttiles(self.hidden1, 1, self.nhidden, basedir+"obs_A_1.pgm")
KT.exporttiles(self.hidden2, 1, self.nhidden, basedir+"obs_A_2.pgm")
val_tic = self.valfwd()
# ----- here are the training process--------#
if r == 1.0: # reward achieved
target = r
else: # because critic weights now converge.
target = gamma * val_tic # gamma = 0.9
# prediction error;
deltao = (target-val)
error_val += abs(deltao)
updatew4 = self.eps * (outer(deltao,self.hiddenplusone2))
deltah0 = self.hiddenplusone2*(1-self.hiddenplusone2)*(dot(transpose(self.weights4),deltao))
updatew12 = self.eta * (outer(deltah0[:-1],inputs_bias_tic))
self.weights12 += updatew12
self.weights4 += updatew4
if gamma * val_tic > val or r == 1.0:
updateTimes += 1
error += abs(action - h)
deltao2 = (action-h) / numpy.linalg.norm(action-h)
deltah0 = self.hiddenplusone1 * (1-self.hiddenplusone1) * dot(transpose(self.weights2),deltao2)
updatew11 = self.eta*(outer(deltah0[:-1],inputs_bias_tic))
self.weights11 += updatew11
updatew2 = self.eta * (outer(deltao2, self.hiddenplusone1))
self.weights2 += updatew2
##-------------end update when the critic are higher-----------##
landmark_info = landmark_info_tic
action = action_tic
val = val_tic
if (iteration%1 == 0):
print "iteration:", iteration
print "Error in val:", error_val, "average per move:", error_val/float(total_duration+1)
print "Error in action:", error, "average per move:", error/float(updateTimes+1)
print "Total duration:", total_duration
print "Average duration", total_duration / (self.__perception.StartPosCount**2)
print "Update Times:", updateTimes
self.average_duration_sav[iteration] = total_duration / (self.__perception.StartPosCount**2)
def ploterror(self):
t = range(0, self.maxIteration)
plot(t, self.average_duration_sav)
show()
t = range(0, self.maxIteration, 10)
plot(t, self.average_duration_sav)
show()
plot(t, self.error_action_sav[0,:])
plot(t, self.error_action_sav[1,:])
show()
plot(t, self.error_val_sav)
show()
plot(t, self.error_total_action_sav[0,:])
plot(t, self.error_total_action_sav[1,:])
show()
plot(t, self.error_total_val_sav)
show()
def visualisation(self):
KT.exporttiles(self.weights11[:,0:-1], self.nhidden, self.nin, basedir+"obs_V_1_0.pgm")
KT.exporttiles(self.weights12[:,0:-1], self.nhidden, self.nin, basedir+"obs_W_1_0.pgm")
KT.exporttiles(self.weights2[:,0:-1], self.nhidden, self.nout, basedir+"obs_V_2_1.pgm")
KT.exporttiles(self.weights4[:,0:-1], self.nhidden, self.ncritic, basedir+"obs_W_2_1.pgm")
print 'visualisation updated!!'
def mlpfwd(self):
""" Run the network forward """
outputs = dot(self.weights2,self.hiddenplusone1)
return outputs
def valfwd(self):
""" Run the network forward """
outputs = dot(self.weights4,self.hiddenplusone2)
return outputs
def normalised(self, h):
outputs = h / numpy.linalg.norm(h)
return outputs
def test_trajectory(self):
Count = 100.0 # we need higher reslution for plotting the action
self.action_sav = numpy.zeros((Count, Count))
for x in range(int(Count)):
for y in range(int(Count)):
landmark_info = numpy.array([(x+1)/Count, (y+1)/Count]) # 2 continuous inputs
inputs_bias = numpy.concatenate((landmark_info,-1*numpy.ones((1))),axis=0)
# ------- sigmoidal funcion for hidden layer 1 -------
self.hidden1_y = dot(self.weights11,inputs_bias)
self.hidden1 = 1.0 / (1.0 + numpy.exp(-self.beta1*self.hidden1_y))
self.hiddenplusone1 = numpy.concatenate((self.hidden1,-1*numpy.ones((1))),axis=0) # add a bia s unit
# ------- end of sigmoidal funcion for hidden layer 1 -------
h_out = self.mlpfwd()
h = self.normalised(h_out)
h_angle = math.atan2(h[1], h[0])
self.action_sav[x,y] = h_angle
basedir = "/tmp/coco/"
if __name__ == "__main__":
Perception = world_model_RL(0.55, 0.85)
Prediction = RNN(Perception, 'linear')
Prediction.train(1500)
Prediction.test_trajectory()
pcolor(Prediction.action_sav, cmap=cm.RdBu, vmax=abs(Prediction.action_sav).max(), vmin=-abs(Prediction.action_sav).max())
colorbar()
show()
#Prediction.predict()
``` |
{
"source": "jon-jacky/PyModel",
"score": 3
} |
#### File: samples/Socket/stepper_util.py
```python
import socket # Python standard library socket module, OR a simulator
# Default configuration, may rebind below
port = 8080
line_length = 80 # length limit for received messages
def listen():
"""
Initial setup, runs once
"""
global listener
# Server's listener socket
listener = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# get and recv buffer size to print below, just FYI
rcvbuf = listener.getsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF)
# Ensure we release listener socket immediately when program exits,
# to avoid socket.error: [Errno 48] Address already in use
listener.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Listen, prepare for senders to connect
listener.bind(('localhost', port))
listener.listen(1)
print '\nServer listens on localhost port %s with RCVBUF size %s' % (port, rcvbuf)
# Define function for sender connect - also used in stepper reset()
def connect():
global sender, receiver, msg, n, bufsize
sender = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# get and print send buffer size, just FYI
sndbuf = sender.getsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF)
print 'Sender creates socket with SNDBUF size %s' % (sndbuf)
sender.connect(('localhost', port))
print 'Sender connects to localhost port %s' % port
receiver, addr = listener.accept()
print 'Server accepts connection from ', addr
# State needed to remember _call args for __return
msg = ''
n = 0
bufsize = 0
def close():
global sender, receiver
sender.close()
receiver.close()
def reset():
close()
connect()
# Start listening and connect the first time
# Runs when stepper imports stepper_util
listen()
connect()
``` |
{
"source": "jonjacobs24/bcs-cnn",
"score": 2
} |
#### File: bcs-cnn/helper_tools/dicom_tools.py
```python
from typing import AnyStr, BinaryIO, Dict, List, NamedTuple, Optional, Union
import matplotlib
import numpy as np
import pandas as pd
import pydicom as dicom
from skimage.exposure import rescale_intensity
def dcmread_image(
fp: Union[str, "os.PathLike[AnyStr]", BinaryIO],
view: str,
index: Optional[np.uint] = None,
) -> np.ndarray:
"""Read pixel array from DBT DICOM file"""
ds = dicom.dcmread(fp)
ds.decompress(handler_name="pylibjpeg")
pixel_array = ds.pixel_array
view_laterality = view[0].upper()
image_laterality = _get_image_laterality(pixel_array[index or 0])
if index is not None:
pixel_array = pixel_array[index]
if not image_laterality == view_laterality:
pixel_array = np.flip(pixel_array, axis=(-1, -2))
window_center = _get_window_center(ds)
window_width = _get_window_width(ds)
low = (2 * window_center - window_width) / 2
high = (2 * window_center + window_width) / 2
pixel_array = rescale_intensity(
pixel_array, in_range=(low, high), out_range="dtype"
)
return pixel_array
def read_boxes(
boxes_fp: pd._typing.FilePathOrBuffer, filepaths_fp: pd._typing.FilePathOrBuffer
) -> pd.DataFrame:
"""Read pandas DataFrame with bounding boxes joined with file paths"""
df_boxes = pd.read_csv(boxes_fp)
df_filepaths = pd.read_csv(filepaths_fp)
primary_key = ("PatientID", "StudyUID", "View")
if not all([key in df_boxes.columns for key in primary_key]):
raise AssertionError(
f"Not all primary key columns {primary_key} are present in bounding boxes columns {df_boxes.columns}"
)
if not all([key in df_boxes.columns for key in primary_key]):
raise AssertionError(
f"Not all primary key columns {primary_key} are present in file paths columns {df_filepaths.columns}"
)
return pd.merge(df_boxes, df_filepaths, on=primary_key)
def draw_box(
image: np.ndarray,
x: int,
y: int,
width: int,
height: int,
color: Optional[Union[int, tuple]] = None,
lw=4,
):
"""Draw bounding box on the image"""
x = min(max(x, 0), image.shape[1] - 1)
y = min(max(y, 0), image.shape[0] - 1)
if color is None:
color = np.max(image)
if len(image.shape) > 2 and not hasattr(color, "__len__"):
color = (color,) + (0,) * (image.shape[-1] - 1)
image[y : y + lw, x : x + width] = color
image[y + height - lw : y + height, x : x + width] = color
image[y : y + height, x : x + lw] = color
image[y : y + height, x + width - lw : x + width] = color
return image
def evaluate(
labels_fp: pd._typing.FilePathOrBuffer,
boxes_fp: pd._typing.FilePathOrBuffer,
predictions_fp: pd._typing.FilePathOrBuffer,
) -> Dict[str, float]:
"""Evaluate predictions"""
df_labels = pd.read_csv(labels_fp)
df_boxes = pd.read_csv(boxes_fp, dtype={"VolumeSlices": float})
df_pred = pd.read_csv(predictions_fp, dtype={"Score": float})
df_labels = df_labels.reset_index().set_index(["StudyUID", "View"]).sort_index()
df_boxes = df_boxes.reset_index().set_index(["StudyUID", "View"]).sort_index()
df_pred = df_pred.reset_index().set_index(["StudyUID", "View"]).sort_index()
df_pred["TP"] = 0
df_pred["GTID"] = -1
thresholds = [df_pred["Score"].max() + 1.0]
# find true positive predictions and assign detected ground truth box ID
for box_pred in df_pred.itertuples():
if box_pred.Index not in df_boxes.index:
continue
df_boxes_view = df_boxes.loc[[box_pred.Index]]
view_slice_offset = df_boxes.loc[[box_pred.Index], "VolumeSlices"].iloc[0] / 4
tp_boxes = [
b
for b in df_boxes_view.itertuples()
if _is_tp(box_pred, b, slice_offset=view_slice_offset)
]
if len(tp_boxes) > 1:
# find the nearest GT box
tp_distances = [_distance(box_pred, b) for b in tp_boxes]
tp_boxes = [tp_boxes[np.argmin(tp_distances)]]
if len(tp_boxes) > 0:
tp_i = tp_boxes[0].index
df_pred.loc[df_pred["index"] == box_pred.index, ("TP", "GTID")] = (1, tp_i)
thresholds.append(box_pred.Score)
thresholds.append(df_pred["Score"].min() - 1.0)
# compute sensitivity at 2 FPs/volume on all cases
evaluation_fps_all = (2.0,)
tpr_all = _froc(
df_pred=df_pred,
thresholds=thresholds,
n_volumes=len(df_labels),
n_boxes=len(df_boxes),
evaluation_fps=evaluation_fps_all,
)
result = {f"sensitivity_at_2_fps_all": tpr_all[0]}
# compute mean sensitivity at 1, 2, 3, 4 FPs/volume on positive cases
df_pred = df_pred[df_pred.index.isin(df_boxes.index)]
df_labels = df_labels[df_labels.index.isin(df_boxes.index)]
evaluation_fps_positive = (1.0, 2.0, 3.0, 4.0)
tpr_positive = _froc(
df_pred=df_pred,
thresholds=thresholds,
n_volumes=len(df_labels),
n_boxes=len(df_boxes),
evaluation_fps=evaluation_fps_positive,
)
result.update(
dict(
(f"sensitivity_at_{int(x)}_fps_positive", y)
for x, y in zip(evaluation_fps_positive, tpr_positive)
)
)
result.update({"mean_sensitivity_positive": np.mean(tpr_positive)})
return result
def _froc(
df_pred: pd.DataFrame,
thresholds: List[float],
n_volumes: int,
n_boxes: int,
evaluation_fps: tuple,
) -> List[float]:
tpr = []
fps = []
for th in sorted(thresholds, reverse=True):
df_th = df_pred.loc[df_pred["Score"] >= th]
df_th_unique_tp = df_th.reset_index().drop_duplicates(
subset=["StudyUID", "View", "TP", "GTID"]
)
n_tps_th = float(sum(df_th_unique_tp["TP"]))
tpr_th = n_tps_th / n_boxes
n_fps_th = float(len(df_th[df_th["TP"] == 0]))
fps_th = n_fps_th / n_volumes
tpr.append(tpr_th)
fps.append(fps_th)
if fps_th > max(evaluation_fps):
break
return [np.interp(x, fps, tpr) for x in evaluation_fps]
def _is_tp(
box_pred: NamedTuple, box_true: NamedTuple, slice_offset: int, min_dist: int = 100
) -> bool:
pred_y = box_pred.Y + box_pred.Height / 2
pred_x = box_pred.X + box_pred.Width / 2
pred_z = box_pred.Z + box_pred.Depth / 2
true_y = box_true.Y + box_true.Height / 2
true_x = box_true.X + box_true.Width / 2
true_z = box_true.Slice
# 2D distance between true and predicted center points
dist = np.linalg.norm((pred_x - true_x, pred_y - true_y))
# compute radius based on true box size
dist_threshold = np.sqrt(box_true.Width ** 2 + box_true.Height ** 2) / 2.0
dist_threshold = max(dist_threshold, min_dist)
slice_diff = np.abs(pred_z - true_z)
# TP if predicted center within radius and slice within slice offset
return dist <= dist_threshold and slice_diff <= slice_offset
def _distance(box_pred: NamedTuple, box_true: NamedTuple) -> float:
pred_y = box_pred.Y + box_pred.Height / 2
pred_x = box_pred.X + box_pred.Width / 2
pred_z = box_pred.Z + box_pred.Depth / 2
true_y = box_true.Y + box_true.Height / 2
true_x = box_true.X + box_true.Width / 2
true_z = box_true.Slice
return np.linalg.norm((pred_x - true_x, pred_y - true_y, pred_z - true_z))
def _get_dicom_laterality(ds: dicom.dataset.FileDataset) -> str:
"""Unreliable - DICOM laterality is incorrect for some cases"""
return ds[0x5200, 0x9229][0][0x0020, 0x9071][0][0x0020, 0x9072].value
def _get_image_laterality(pixel_array: np.ndarray) -> str:
left_edge = np.sum(pixel_array[:, 0]) # sum of left edge pixels
right_edge = np.sum(pixel_array[:, -1]) # sum of right edge pixels
return "R" if left_edge < right_edge else "L"
def _get_window_center(ds: dicom.dataset.FileDataset) -> np.float32:
return np.float32(ds[0x5200, 0x9229][0][0x0028, 0x9132][0][0x0028, 0x1050].value)
def _get_window_width(ds: dicom.dataset.FileDataset) -> np.float32:
return np.float32(ds[0x5200, 0x9229][0][0x0028, 0x9132][0][0x0028, 0x1051].value)
``` |
{
"source": "JonJagger/two-day-courses",
"score": 4
} |
#### File: pf/yatzy/yatzy.py
```python
def score_chance(dice):
return sum(dice)
def score_yatzy(dice):
return 50 if dice[0] == dice[1] == dice[2] == dice[3] == dice[4] else 0
def score_ones(dice):
s = 0
for i in dice:
if i == 1:
s += 1
return s
def score_twos(dice):
return sum(d for d in dice if d == 2)
def score_threes(dice):
return dice.count(3) * 3
def score_fours(dice):
tally = {1: 0, 2: 0, 3: 0, 4: 0, 5: 0, 6: 0}
for d in dice:
tally[d] += d
return tally[4]
def score_fives(dice):
import collections
c = collections.Counter(dice)
return c[5] * 5
def score_sixes(dice):
s = 0
for x in reversed(sorted(dice)):
if x == 6:
s += x
else:
break
return s
def score_pair(dice):
for x in [6, 5, 4, 3, 2, 1]:
if dice.count(x) == 2:
return 2*x
return 0
def score_two_pairs(dice):
from collections import Counter
pairs = [x for x, y in Counter(dice).items() if y == 2]
return pairs[0]*2 + pairs[1]*2 if len(pairs) == 2 else 0
def score_small_straight(dice):
return 15 if set(dice) == {1, 2, 3, 4, 5} else 0
def score_large_straight(dice):
if len(set(dice)) == 5 and min(dice) == 2:
return 20
return 0
def _score_n_of_a_kind(dice, n):
for f in [6, 5, 4, 3, 2, 1]:
if dice.count(f) == n:
return f*n
return 0
def score_three_of_a_kind(dice):
return _score_n_of_a_kind(dice, 3)
def score_four_of_a_kind(dice):
return _score_n_of_a_kind(dice, 4)
def score_full_house(dice):
score_two_of_a_kind = lambda dice: _score_n_of_a_kind(dice, 2)
parts = score_two_of_a_kind(dice), score_three_of_a_kind(dice)
return sum(parts) if all(parts) else 0
``` |
{
"source": "JonJala/ESPS_sim",
"score": 3
} |
#### File: JonJala/ESPS_sim/between_family_ea_simulation.py
```python
import numpy as np
import pandas as pd
from scipy.stats import norm
import argparse
def calc_between_family_values(n, no_embryos, hsquared_bfsnp, eur_bf_rsquared):
"""
Purpose: to get the ghat_i and y_i for each family pair, where i={1,...,no_embryos}.
Arguments:
n: number of comparisons to make
no_embryos: number of embryos for each comparison
hsquared_bfsnp: naive "SNP heritability" (for between family comparisons)
eur_bf_rsquared: R^2 (ancestry-specific)
Returns:
{'ghat':df_ghat, 'educ':df_y}.
Each dataframe has size (n x no_embryos) and holds polygenic scores and phenotype values (unscaled), respectively.
"""
df_g = pd.DataFrame()
df_ghat = pd.DataFrame()
df_y = pd.DataFrame()
# generate 1e6 values of ghat_i, y_i for each i. Note: g is used to calculate ghat and y.
for i in range(no_embryos):
df_g['g_{}'.format(i)] = np.random.normal(loc=0, scale=hsquared_bfsnp**0.5, size=int(n))
df_ghat['ghat_{}'.format(i)] = np.random.normal(loc=df_g['g_{}'.format(i)], \
scale=(hsquared_bfsnp**2/eur_bf_rsquared - hsquared_bfsnp)**0.5, size=int(n))
df_y['y_{}'.format(i)] = np.random.normal(loc=df_g['g_{}'.format(i)], \
scale=(1-hsquared_bfsnp)**0.5, size=int(n))
return {'ghat':df_ghat, 'y':df_y}
def get_random_index(no_embryos, no_observations):
"""
Purpose: to generate a list of random integers
Arguments:
no_embryos: max value of random integers generated, i.e. random integers will be generated in the range [0, no_embryos)
no_observations: number of random integers to return
Returns:
random integers from the “discrete uniform” distribution of size no_observations and in the range of [0, no_embyros)
"""
return np.random.randint(no_embryos, size=int(no_observations))
def get_max_pgs_index(df_ghat):
"""
Purpose: to identify the column that holds the max polygenic score for each row
Arguments:
df_ghat: dataframe of polygenic scores, where each row is a new parent pair.
Returns:
series of indices of the highest polygenic score per parent pair
"""
return df_ghat.idxmax(axis=1).map(lambda x: int(x.lstrip('ghat_')))
def select_embryos_by_index(df, index_of_embryos):
"""
Purpose: to select values of a dataframe using a valid set of indices
Arguments:
df: dataframe from which you want to select values
index_of_embryos: indices you are using to select values from df
Returns:
dataframe of selected values
"""
df_values = pd.DataFrame()
relevant_values = []
count = 0
# get relevant phenotype values (and ghat) based on index
for row in df.itertuples(index=False):
relevant_values.append(row[index_of_embryos[count]])
count += 1
df_values['selected_values'] = relevant_values
return df_values
def calc_phenotype_diffs(df_selected_embryos, df_random_embryos, sd_pheno):
"""
Purpose: to calculate the difference in educational attainment of a randomly selected
embryo vs. an embryo selected by highest PRS for educational attainment.
Arguments:
df_selected_embryos: dataframe of embryos selected by highest PRS
df_random_embryos: dataframe of randomly selected embryos
sd_pheno: standard deviation of education in a population
Returns:
dataframe of difference in education level (measured in years) between randomly selected embryos
and those selected on highest PRS
"""
return (df_selected_embryos - df_random_embryos)*sd_pheno
def process_arguments():
"""
Parses command line arguments.
Args:
-----
None
Returns:
--------
parser: :class: argparse.Namespace
arguments passed in from command line
"""
parser = argparse.ArgumentParser()
parser.add_argument("--n", default=1e6, type=float, \
help="Number of parent pairs to simulate. Defaults to 1e6.")
parser.add_argument("--embryos", default=10, type=int, \
help="Number of embryos from which to choose. Defaults to 10.")
parser.add_argument("--hsquared_bf", default=0.2, type=float, \
help="Naive heritability, or between-family heritability. Defaults to 0.2.")
parser.add_argument("--eur_bf_rsquared", default=0.1, type=float, \
help="Naive R^2, or between-family Rsquared for EUR ancestry. Defaults to 0.1.")
parser.add_argument("--sd_pheno", default=3.2, type=float, \
help="Standard deviation of phenotype of interest. Defaults to 3.2 for years of education.")
parser.add_argument("--scale_AMR", default=1.6, type=float, \
help="Factor to convert EUR R2 values to AMR. Defaults to 1.6.")
parser.add_argument("--scale_EAS", default=2.0, type=float, \
help="Factor to convert EUR R2 values to EAS. Defaults to 2.0.")
parser.add_argument("--scale_AFR", default=4.5, type=float, \
help="Factor to convert EUR R2 values to AFR. Defaults to 4.5.")
return parser.parse_args()
def main():
# import arguments
args = process_arguments()
# dictionary of rsquared (between family) values
DICT_RSQUARED_BF = {'EUR':args.eur_bf_rsquared,
'AMR':args.eur_bf_rsquared/args.scale_AMR,
'EAS':args.eur_bf_rsquared/args.scale_EAS,
'AFR':args.eur_bf_rsquared/args.scale_AFR}
for ancestry in DICT_RSQUARED_BF:
# calculate values using ancestry-specific rsquared
values = calc_between_family_values(args.n, args.embryos, args.hsquared_bf, DICT_RSQUARED_BF.get(ancestry))
# generate indices
rand_index = get_random_index(args.embryos, args.n)
max_index = get_max_pgs_index(values['ghat'])
# get max pheno values
max_y = select_embryos_by_index(values['y'], max_index)
# get random pheno values
rand_y = select_embryos_by_index(values['y'], rand_index)
# calculate difference
diffs_y = calc_phenotype_diffs(max_y, rand_y, args.sd_pheno)
mean = diffs_y['selected_values'].mean()
interval = diffs_y['selected_values'].std() * 1.96
print('For ' + str(ancestry) + ' ancestry, the between-family 95-percent prediction interval ' + \
'for the phenotype of interest is ' + '%.2f' % mean + ' +/- ' +'%.2f' % interval + '.')
pass
if __name__ == "__main__":
main()
```
#### File: JonJala/ESPS_sim/within_family_simulation.py
```python
import numpy as np
import pandas as pd
from scipy.stats import norm
from between_family_ea_simulation import (
get_random_index,
get_max_pgs_index,
select_embryos_by_index,
calc_phenotype_diffs
)
from scipy.stats import norm
import argparse
def calc_within_family_values(n, num_embryos, heritability, correlation_mz, rsquared):
"""
Purpose: To get the ghat_i and y_i for each family pair, where i={1,...,num_embryos}.
Arguments:
n: integer number of parent pairs
num_embryos: integer number of embryos for each parent pair
heritability: heritability of clinical trait
correlation_mz: twin correlation of clinical trait
rsquared: Ancestry-specific R^2 value for PGS prediction of trait
Returns:
{'pgs':df_pgs, 'liability':df_liability}.
Each dataframe has size (n x num_embryos) and holds polygenic scores and phenotype liability values, respectively.
"""
df_a = pd.DataFrame()
df_pgs = pd.DataFrame()
df_liability = pd.DataFrame()
# calculate average parent additive component
a_mean = np.random.normal(loc=0, scale=(heritability/2)**0.5, size=int(n))
# calculate shared environmental component (constant within-family)
c = np.random.normal(loc=0, scale=(correlation_mz - heritability)**0.5, size=int(n))
# calculate liability and PGS for clinical trait, for each embryo
for i in range(num_embryos):
# generate individual embryo additive components
df_a['a_{}'.format(i)] = np.random.normal(loc=a_mean, scale=(heritability/2)**0.5, size=int(n))
# calculate PGS
df_pgs['ghat_{}'.format(i)] = np.random.normal(loc=df_a['a_{}'.format(i)], \
scale=(heritability**2/rsquared - heritability)**0.5, size=int(n))
# calculate phenotypic liability
df_liability['liability_{}'.format(i)] = np.random.normal(loc=(df_a['a_{}'.format(i)] + c), \
scale=(1 - correlation_mz)**0.5, size=int(n))
return {'pgs':df_pgs, 'liability':df_liability}
def process_arguments():
"""
Parses command line arguments.
Args:
-----
None
Returns:
--------
parser: :class: argparse.Namespace
arguments passed in from command line
"""
parser = argparse.ArgumentParser()
parser.add_argument('--n', default=1000000, type=int,
help='Number of parent pairs to simulate. Defaults to 1e6.')
parser.add_argument('--embryos', default=10, type=int,
help='Number of embryos from which to choose. Defaults to 10.')
parser.add_argument('--ancestry', required=True, type=str,
help='Ancestry of interest.')
parser.add_argument('--heritability', required=True, type=float, nargs='+',
help='List of heritabilities on the liability scale for conditions. \
Index of heritabilities must match index of rsquared, correlation_mz, and prevalence.')
parser.add_argument('--rsquared', required=True, type=float, nargs='+',
help='List of r2 for conditions. Index of r2 must match index of \
heritability, correlation_mz, and prevalence.')
parser.add_argument('--correlation_mz', required=True, type=float, nargs='+',
help='List of monozygotic twin correlations on the liability scale for conditions. \
Index of correlations must match index of rsquared, heritabilities, and prevalence.')
parser.add_argument('--prevalence', required=True, type=float, nargs='+',
help='List of prevalences for conditions. \
Index of prevalences must match index of rsquared, heritabilities, and correlations.')
parser.add_argument('--condition', required=True, type=str, nargs='+',
help='Name of conditions. \
Index must match index of rsquared, heritabilities, and correlations.')
return parser.parse_args()
if __name__ == "__main__":
# import arguments
args = process_arguments()
N = args.n
NUM_EMBRYOS = args.embryos
HERITABILITY = args.heritability # list
RSQUARED = args.rsquared # list
CORR_MZ = args.correlation_mz # list
PREVALENCE = args.prevalence # list
CONDITION = args.condition # list
# check lengths
assert len(HERITABILITY) == len(RSQUARED), 'Your lists aren\'t the same length!'
assert len(HERITABILITY) == len(CORR_MZ), 'Your lists aren\'t the same length!'
assert len(HERITABILITY) == len(PREVALENCE), 'Your lists aren\'t the same length!'
assert len(HERITABILITY) == len(CONDITION), 'Your lists aren\'t the same length!'
# print intro
print('This is a simulation for within-family selection of embryos.')
print('This analysis is for parents of ' + args.ancestry + ' ancestry who are choosing from ' + str(NUM_EMBRYOS) + ' embryos.')
#### begin simualation
for i in range(len(args.condition)):
# calculate values
values = calc_within_family_values(n=N, num_embryos=NUM_EMBRYOS,
heritability=HERITABILITY[i], correlation_mz=CORR_MZ[i], rsquared=RSQUARED[i])
# generate indices
rand_index = get_random_index(NUM_EMBRYOS, N)
max_index = get_max_pgs_index(values['pgs'])
# get max/random liability values
max_liability = select_embryos_by_index(values['liability'], max_index)
rand_liability = select_embryos_by_index(values['liability'], rand_index)
# get fraction of individuals who will have disease (convert liability to binary trait)
max_frac = (max_liability <= norm.ppf(PREVALENCE[i])).astype(int).mean()
rand_frac = (rand_liability <= norm.ppf(PREVALENCE[i])).astype(int).mean()
# print summary
print('For ' + CONDITION[i] + ', the within-family prevalence is ' + str(rand_frac['selected_values']) + \
' in random embryos and ' + str(max_frac['selected_values']) + ' in selected embryos.')
``` |
{
"source": "JonJala/mama",
"score": 3
} |
#### File: JonJala/mama/core_mama.py
```python
import gc
from typing import Tuple
import numpy as np
# Functions ##################################
#################################
def create_omega_matrix(ldscores: np.ndarray, reg_ldscore_coefs: np.ndarray) -> np.ndarray:
"""
Creates the omega matrix for each SNP. Assumes the PxP submatrices in the ldscores and the
PxP matrix of LD regression coefficients have the same ordering of corresponding ancestries.
:param ldscores: (Mx)PxP symmetric matrices containing LD scores (PxP per SNP)
:param reg_ldscore_coefs: PxP symmetric matrix containing LD score regression coefficients
:return: The Omega matrices as indicated in the MAMA paper (PxP per SNP) = (Mx)PxP
"""
# Multiply PxP slices of LD scores with the regression coefficients component-wise
return reg_ldscore_coefs * ldscores
#################################
def tweak_omega(omega_slice: np.ndarray) -> np.ndarray:
"""
Tweaks the off-diagonal elements of a non positive semi-definite omega matrix to make it
positive semi-definite. This assumes that necessary checks are done ahead of time to ensure
this method will converge (e.g. all diagonal elements must be positive)
:param omega_slice: PxP symmetric Omega matrix
:return np.ndarray: A modified omega that is now positive semi-definite
"""
# First get the component-wise square root of the diagonal
omega_diag = np.diag(omega_slice).copy()
omega_sqrt_diag = np.sqrt(omega_diag)
# Clamp off diagonal elements to values based on product of the corresponding diagonal entries
omega_slice = np.minimum(np.outer(omega_sqrt_diag, omega_sqrt_diag), omega_slice)
# Then, scale down off-diagonal elements until positive semi-definite
d_indices = np.diag_indices_from(omega_slice)
while np.any(np.linalg.eigvalsh(omega_slice) < 0.0):
omega_slice *= 0.99
omega_slice[d_indices] = omega_diag
return omega_slice
#################################
def qc_omega(omega: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
"""
Runs checks over the omega matrices for positive-semi-definiteness. Tweaks omega where possible
to correct for non-positive-semi-definiteness and returns an array of length M
(where M = number of SNPs) along the SNP axis (the first dimension of the MxPxP omega)
where True indicates positive semi-definiteness and False indicates
non-positive semi-definiteness
:param omega: MxPxP matrix for Omega values
:return: Tuple containing:
1) Array of length M where True indicates positive semi-definiteness and False
indicates non-positive semi-definiteness
2) Array of length M where True indicates the omega was tweaked to make it
positive semi-definite (False otherwise)
"""
# Create result vectors of length M, all values defaulting to False
M = omega.shape[0]
pos_semi_def_indices = np.full(M, False)
tweaked_omega_indices = np.full(M, False)
# Iterate over the M PxP matrices of sigma
for i in range(M):
omega_slice = omega[i, :, :]
# Check for positive semi-definiteness (if PSD, set to True and move on)
if np.all(np.linalg.eigvalsh(omega_slice) >= 0.0):
pos_semi_def_indices[i] = True
continue
# If diagonal entries aren't positive, move on
if np.any(np.diag(omega_slice) <= 0.0):
continue
# We can try to tweak ths slice of omega to become positive semi-definite
omega[i, :, :] = tweak_omega(omega_slice)
pos_semi_def_indices[i] = True
tweaked_omega_indices[i] = True
return pos_semi_def_indices, tweaked_omega_indices
#################################
def create_sigma_matrix(sumstat_ses, reg_se2_coefs, reg_const_coefs):
"""
Creates the sigma matrix for each SNP. Assumes the PxP submatrices in the ldscores and the
PxP matrix of LD regression coefficients have the same ordering of corresponding ancestries.
:param sumstat_se: Standard errors for the SNPs for each population (M x P matrix)
:param reg_se2_coefs: PxP symmetric matrix containing SE^2 regression coefficients
:param reg_const_coefs: PxP symmetric matrix containing Constant term regression coefficients
:return: The Sigma matrices as indicated in the MAMA paper (PxP per SNP) = (Mx)PxP
"""
# Get values for M and P (used to keep track of slices / indices / broadcasting)
M = sumstat_ses.shape[0]
P = sumstat_ses.shape[1]
# Create an MxPxP matrix with each PxP slice initially equal to reg_const_coefs
result_matrix = np.full(shape=(M, P, P), fill_value=reg_const_coefs)
# Create an M X P matrix, whose rows of length P will need to be added to the diagonals
# of the PxP slices in the final result
se_diags_as_matrix = sumstat_ses * sumstat_ses * np.diag(reg_se2_coefs)
# Broadcast-add the rows of the SE term matrix to the diagonals of slices of the result matrix
d_indices = np.arange(P)
result_matrix[:, d_indices, d_indices] += se_diags_as_matrix
return result_matrix
#################################
def qc_sigma(sigma: np.ndarray) -> np.ndarray:
"""
Runs checks over the sigma matrices for positive-definiteness. Returns an array of length M
(where M = number of SNPs) along the SNP axis (the first dimension of the MxPxP sigma)
where True indicates positive definiteness and False indicates non-positive definiteness
:param sigma: MxPxP matrix for Sigma values
:return np.ndarray: Array of length M where True indicates positive definiteness and False
indicates non-positive definiteness
"""
# Create result vector of length M, all values defaulting to False
M = sigma.shape[0]
result = np.full(M, False)
# Iterate over the M PxP matrices of sigma
for i in range(M):
sigma_slice = sigma[i, :, :]
try:
np.linalg.cholesky(sigma_slice)
result[i] = True
except np.linalg.LinAlgError:
# If not positive definite, then the Cholesky decomposition raises a LinAlgError
pass
return result
#################################
def run_mama_method(betas, omega, sigma):
"""
Runs the core MAMA method to combine results and generate final, combined summary statistics
:param betas: MxP matrix of beta values (M = # of SNPs, P = # of ancestries)
:param omega: MxPxP matrix of omega values (M = # of SNPs, P = # of ancestries)
:param sigma: MxPxP matrix of sigma values (M = # of SNPs, P = # of ancestries)
:return: Tuple containing:
1) Result ndarray of betas (MxP) where M = SNPs and P = populations
2) Result ndarray of beta standard errors (MxP) where M = SNPs and P = populations
"""
# Get values for M and P (used to keep track of slices / indices / broadcasting)
M, P, *extra_dimensions = omega.shape # pylint: disable=unused-variable
# Create a 3D matrix, M rows of Px1 column vectors with shape (M, P, 1)
d_indices = np.arange(P)
omega_diag = omega[:, d_indices, d_indices][:, :, np.newaxis]
omega_pp_scaled = np.divide(omega, omega_diag) # Slice rows are Omega'_pjj / omega_pp,j
# Produce center matrix in steps (product of omega terms, add omega and sigma, then invert)
center_matrix_inv = -omega_pp_scaled[:, :, :, np.newaxis] * omega[:, :, np.newaxis, :]
center_matrix_inv += omega[:, np.newaxis, :, :] + sigma[:, np.newaxis, :, :] # Broadcast add
center_matrix = np.linalg.inv(center_matrix_inv) # Inverts each slice separately
del center_matrix_inv # Clean up the inverse matrix to free space
gc.collect()
# Calculate (Omega'_p,j/omega_pp,j) * center_matrix
left_product = np.matmul(omega_pp_scaled[:, :, np.newaxis, :], center_matrix)
del center_matrix # Clean up the center matrix to free space
gc.collect()
# Calculate denominator (M x P x 1 x 1)
denom = np.matmul(left_product,
np.transpose(omega_pp_scaled[:, :, np.newaxis, :], (0, 1, 3, 2)))
denom_recip = np.reciprocal(denom)
denom_recip_view = denom_recip.view()
denom_recip_view.shape = (M, P)
# Calculate numerator (M x P x 1 x 1))
left_product_view = left_product.view().reshape(M, P, P)
numer = np.matmul(left_product_view, betas[:, :, np.newaxis])
numer_view = numer.view().reshape(M, P)
# Calculate result betas and standard errors
new_betas = denom_recip_view * numer_view
new_beta_ses = np.sqrt(denom_recip_view)
return new_betas, new_beta_ses
```
#### File: mama/legacy/mama_ldcalc.py
```python
from __future__ import division
from __future__ import absolute_import
from builtins import range
from builtins import object
import numpy as np
import bitarray as ba
import logging
def block_width(args,geno_array,array_snps):
x = np.array((args.ld_wind_snps, args.ld_wind_kb, args.ld_wind_cm), dtype=bool)
if np.sum(x) != 1:
raise ValueError('Must specify exactly one --ld-wind option')
if args.ld_wind_snps:
max_dist = args.ld_wind_snps
coords = np.array(list(range(geno_array.m)))
elif args.ld_wind_kb:
max_dist = args.ld_wind_kb*1000
coords = np.array(array_snps.df['BP'])[geno_array.kept_snps]
elif args.ld_wind_cm:
max_dist = args.ld_wind_cm
coords = np.array(array_snps.df['CM'])[geno_array.kept_snps]
return max_dist, coords
def getBlockLefts(coords, max_dist):
'''
Converts coordinates + max block length to the a list of coordinates of the leftmost
SNPs to be included in blocks.
Parameters
----------
coords : array
Array of coordinates. Must be sorted.
max_dist : float
Maximum distance between SNPs included in the same window.
Returns
-------
block_left : 1D np.ndarray with same length as coords
block_left[j] := min{k | dist(j, k) < max_dist}.
'''
M = len(coords)
j = 0
block_left = np.zeros(M)
for i in range(M):
while j < M and abs(coords[j] - coords[i]) > max_dist:
j += 1
block_left[i] = j
return block_left
def getBlockM(coords, max_dist):
'''
Converts coordinates + max block length to total number of SNPs tagged by the index j
Parameters
----------
coords : array
Array of coordinates. Must be sorted.
max_dist : float
Maximum distance between SNPs included in the same window.
Returns
-------
block_size : 1D np.ndarray with same length as coords
block_size[j] := max{l | l > j, dist(j,l) < max_dist} - min{k | k < j, dist(k,j) < max_dist}.
'''
obj_mat = np.tile(coords.reshape(-1,1), coords.shape[0])
thres_mat_l = (obj_mat-float(max_dist)).T
thres_mat_h = (obj_mat+float(max_dist)).T
block_mat = np.logical_and(obj_mat>thres_mat_l, obj_mat<thres_mat_h)
block_size = np.sum(block_mat, axis = 1)
return block_size
def l2_unbiased(x, n):
sq = np.square(x)
return sq - (1-sq) / (n-2)
def std_geno_old(geno, ances_index):
ances_geno = geno[ances_index,:]
avg = np.mean(ances_geno, axis=0)
sd = np.std(ances_geno, axis=0)
return (ances_geno-avg) / sd
def std_geno(geno, ances_index):
# Modified version of standardization that drops the SNP with zero sd.
ances_geno = geno[ances_index,:]
avg = np.mean(ances_geno, axis=0)
sd = np.std(ances_geno, axis=0)
out = np.zeros_like(ances_geno)
out[:,np.where(sd!=0)[0]]=(ances_geno[:,np.where(sd!=0)[0]] - avg[np.where(sd!=0)[0]])/sd[np.where(sd!=0)[0]]
return out
def scale_trans(A, B, ances_index, exp):
"""
Apply the pq-exp scaling.
0: standardized genotype scale (default)
1: per-allele scale
exp: specified scaling factor on the exponent
"""
A_geno = A[ances_index,:]
B_geno = B[ances_index,:]
A_avg = np.mean(A_geno, axis=0)
B_avg = np.mean(B_geno, axis=0)
A_std = np.std(A_geno, axis=0)
B_std = np.std(B_geno, axis=0)
A_factor = np.power(A_std**2, -0.5)
B_factor = np.power(B_std**2, (exp/2)-0.5)
assert len(A_std.shape) == len(B_std.shape)
A_homo = np.sum(A_std==0)
B_homo = np.sum(B_std==0)
A_new = (A_geno - A_avg) * A_factor
B_new = (B_geno - B_avg) * B_factor
return (A_new, B_new)
class __GenotypeArrayInMemory__(object):
'''
Parent class for various classes containing interfaces for files with genotype
matrices, e.g., plink .bed files, etc
'''
def __init__(self, fname, n, snp_list, keep_snps=None, keep_indivs=None, mafMin=None):
self.m = len(snp_list.IDList)
self.n = n
self.keep_snps = keep_snps
self.keep_indivs = keep_indivs
self.df = np.array(snp_list.df[['CHR', 'SNP', 'BP', 'CM']])
self.colnames = ['CHR', 'SNP', 'BP', 'CM']
self.mafMin = mafMin if mafMin is not None else 0
self._currentSNP = 0
(self.nru, self.geno) = self.__read__(fname, self.m, n)
# filter individuals
if keep_indivs is not None:
keep_indivs = np.array(keep_indivs, dtype='int')
if np.any(keep_indivs > self.n):
raise ValueError('keep_indivs indices out of bounds')
(self.geno, self.m, self.n) = self.__filter_indivs__(self.geno, keep_indivs, self.m,
self.n)
if self.n > 0:
logging.info('After filtering, {n} individuals remain'.format(n=self.n))
else:
raise ValueError('After filtering, no individuals remain')
# filter SNPs
if keep_snps is not None:
keep_snps = np.array(keep_snps, dtype='int')
if np.any(keep_snps > self.m): # if keep_snps is None, this returns False
raise ValueError('keep_snps indices out of bounds')
(self.geno, self.m, self.n, self.kept_snps, self.freq) = self.__filter_snps_maf__(
self.geno, self.m, self.n, self.mafMin, keep_snps)
if self.m > 0:
logging.info('After filtering, {m} SNPs remain'.format(m=self.m))
else:
raise ValueError('After filtering, no SNPs remain')
self.df = self.df[self.kept_snps, :]
self.maf = np.minimum(self.freq, np.ones(self.m)-self.freq)
self.sqrtpq = np.sqrt(self.freq*(np.ones(self.m)-self.freq))
self.df = np.c_[self.df, self.maf]
self.colnames.append('MAF')
def __read__(self, fname, m, n):
raise NotImplementedError
def __filter_indivs__(geno, keep_indivs, m, n):
raise NotImplementedError
def __filter_maf_(geno, m, n, maf):
raise NotImplementedError
def ldScoreVarBlocks(self, block_left, c, annot=None):
'''Computes an unbiased estimate of L2(j) for j=1,..,M.'''
func = lambda x: self.__l2_unbiased__(x, self.n)
snp_getter = self.nextSNPs
return self.__corSumVarBlocks__(block_left, c, func, snp_getter, annot)
def ldScoreBlockJackknife(self, block_left, c, annot=None, jN=10):
func = lambda x: np.square(x)
snp_getter = self.nextSNPs
return self.__corSumBlockJackknife__(block_left, c, func, snp_getter, annot, jN)
def __l2_unbiased__(self, x, n):
denom = n-2 if n > 2 else n # allow n<2 for testing purposes
sq = np.square(x)
return sq - (1-sq) / denom
# general methods for calculating sums of Pearson correlation coefficients
def __corSumVarBlocks__(self, block_left, c, func, snp_getter, annot=None):
'''
Parameters
----------
block_left : np.ndarray with shape (M, )
block_left[i] = index of leftmost SNP included in LD Score of SNP i.
if c > 1, then only entries that are multiples of c are examined, and it is
assumed that block_left[a*c+i] = block_left[a*c], except at
the beginning of the chromosome where the 0th SNP is included in the window.
c : int
Chunk size.
func : function
Function to be applied to the genotype correlation matrix. Before dotting with
annot. Examples: for biased L2, np.square. For biased L4,
lambda x: np.square(np.square(x)). For L1, lambda x: x.
snp_getter : function(int)
The method to be used to get the next SNPs (normalized genotypes? Normalized
genotypes with the minor allele as reference allele? etc)
annot: numpy array with shape (m,n_a)
SNP annotations.
Returns
-------
cor_sum : np.ndarray with shape (M, num_annots)
Estimates.
'''
m, n = self.m, self.n
block_sizes = np.array(np.arange(m) - block_left)
block_sizes = np.ceil(block_sizes / c)*c
if annot is None:
annot = np.ones((m, 1))
else:
annot_m = annot.shape[0]
if annot_m != self.m:
raise ValueError('Incorrect number of SNPs in annot')
n_a = annot.shape[1] # number of annotations
cor_sum = np.zeros((m, n_a))
# b = index of first SNP for which SNP 0 is not included in LD Score
b = np.nonzero(block_left > 0)
if np.any(b):
b = b[0][0]
else:
b = m
b = int(np.ceil(b/c)*c) # round up to a multiple of c
if b > m:
c = 1
b = m
l_A = 0 # l_A := index of leftmost SNP in matrix A
A = snp_getter(b)
rfuncAB = np.zeros((b, c))
rfuncBB = np.zeros((c, c))
# chunk inside of block
for l_B in range(0, b, c): # l_B := index of leftmost SNP in matrix B
B = A[:, l_B:l_B+c]
np.dot(A.T, B / n, out=rfuncAB)
rfuncAB = func(rfuncAB)
cor_sum[l_A:l_A+b, :] += np.dot(rfuncAB, annot[l_B:l_B+c, :])
# chunk to right of block
b0 = b
md = int(c*np.floor(m/c))
end = md + 1 if md != m else md
for l_B in range(b0, end, c):
# check if the annot matrix is all zeros for this block + chunk
# this happens w/ sparse categories (i.e., pathways)
# update the block
old_b = b
b = int(block_sizes[l_B])
if l_B > b0 and b > 0:
# block_size can't increase more than c
# block_size can't be less than c unless it is zero
# both of these things make sense
A = np.hstack((A[:, old_b-b+c:old_b], B))
l_A += old_b-b+c
elif l_B == b0 and b > 0:
A = A[:, b0-b:b0]
l_A = b0-b
elif b == 0: # no SNPs to left in window, e.g., after a sequence gap
A = np.array(()).reshape((n, 0))
l_A = l_B
if l_B == md:
c = m - md
rfuncAB = np.zeros((b, c))
rfuncBB = np.zeros((c, c))
if b != old_b:
rfuncAB = np.zeros((b, c))
B = snp_getter(c)
p1 = np.all(annot[l_A:l_A+b, :] == 0)
p2 = np.all(annot[l_B:l_B+c, :] == 0)
if p1 and p2:
continue
np.dot(A.T, B / n, out=rfuncAB)
rfuncAB = func(rfuncAB)
cor_sum[l_A:l_A+b, :] += np.dot(rfuncAB, annot[l_B:l_B+c, :])
cor_sum[l_B:l_B+c, :] += np.dot(annot[l_A:l_A+b, :].T, rfuncAB).T
np.dot(B.T, B / n, out=rfuncBB)
rfuncBB = func(rfuncBB)
cor_sum[l_B:l_B+c, :] += np.dot(rfuncBB, annot[l_B:l_B+c, :])
return cor_sum
class PlinkBEDFile(__GenotypeArrayInMemory__):
'''
Interface for Plink .bed format
'''
def __init__(self, fname, n, snp_list, keep_snps=None, keep_indivs=None, mafMin=None):
self._bedcode = {
2: ba.bitarray('11'),
9: ba.bitarray('10'),
1: ba.bitarray('01'),
0: ba.bitarray('00')
}
__GenotypeArrayInMemory__.__init__(self, fname, n, snp_list, keep_snps=keep_snps,
keep_indivs=keep_indivs, mafMin=mafMin)
def __read__(self, fname, m, n):
if not fname.endswith('.bed'):
raise ValueError('.bed filename must end in .bed')
fh = open(fname, 'rb')
magicNumber = ba.bitarray(endian="little")
magicNumber.fromfile(fh, 2)
bedMode = ba.bitarray(endian="little")
bedMode.fromfile(fh, 1)
e = (4 - n % 4) if n % 4 != 0 else 0
nru = n + e
self.nru = nru
# check magic number
if magicNumber != ba.bitarray('0011011011011000'):
raise IOError("Magic number from Plink .bed file not recognized")
if bedMode != ba.bitarray('10000000'):
raise IOError("Plink .bed file must be in default SNP-major mode")
# check file length
self.geno = ba.bitarray(endian="little")
self.geno.fromfile(fh)
self.__test_length__(self.geno, self.m, self.nru)
return (self.nru, self.geno)
def __test_length__(self, geno, m, nru):
exp_len = 2*m*nru
real_len = len(geno)
if real_len != exp_len:
s = "Plink .bed file has {n1} bits, expected {n2}"
raise IOError(s.format(n1=real_len, n2=exp_len))
def __filter_indivs__(self, geno, keep_indivs, m, n):
n_new = len(keep_indivs)
e = (4 - n_new % 4) if n_new % 4 != 0 else 0
nru_new = n_new + e
nru = self.nru
z = ba.bitarray(m*2*nru_new, endian="little")
z.setall(0)
for e, i in enumerate(keep_indivs):
z[2*e::2*nru_new] = geno[2*i::2*nru]
z[2*e+1::2*nru_new] = geno[2*i+1::2*nru]
self.nru = nru_new
return (z, m, n_new)
def __filter_snps_maf__(self, geno, m, n, mafMin, keep_snps):
'''
Credit to <NAME> and the Plink2 developers for this algorithm
Modified from plink_filter.c
https://github.com/chrchang/plink-ng/blob/master/plink_filter.c
Genotypes are read forwards (since we are cheating and using endian="little")
A := (genotype) & 1010...
B := (genotype) & 0101...
C := (A >> 1) & B
Then
a := A.count() = missing ct + hom major ct
b := B.count() = het ct + hom major ct
c := C.count() = hom major ct
Which implies that
missing ct = a - c
# of indivs with nonmissing genotype = n - a + c
major allele ct = b + c
major allele frequency = (b+c)/(2*(n-a+c))
het ct + missing ct = a + b - 2*c
Why does bitarray not have >> ????
'''
nru = self.nru
m_poly = 0
y = ba.bitarray()
if keep_snps is None:
keep_snps = list(range(m))
kept_snps = []
freq = []
# trouble shooting for unexpected kept_snps
shoot = []
shoot_major_ct = []
shoot_nomiss = []
shoot_freq = []
shoot_het_miss_ct = []
for e, j in enumerate(keep_snps):
z = geno[2*nru*j:2*nru*(j+1)]
A = z[0::2]
a = A.count()
B = z[1::2]
b = B.count()
c = (A & B).count()
major_ct = b + c # number of copies of the major allele
n_nomiss = n - a + c # number of individuals with nonmissing genotypes
f = major_ct / (2*n_nomiss) if n_nomiss > 0 else 0
het_miss_ct = a+b-2*c # remove SNPs that are only either het or missing
if np.minimum(f, 1-f) > mafMin and het_miss_ct < n:
#if het_miss_ct < n:
freq.append(f)
y += z
m_poly += 1
kept_snps.append(j)
else:
shoot.append(j)
shoot_major_ct.append(major_ct)
shoot_nomiss.append(n_nomiss)
shoot_freq.append(f)
shoot_het_miss_ct.append(het_miss_ct)
return (y, m_poly, n, kept_snps, freq)
# return (y, m_poly, n, kept_snps, freq, shoot)
def nextSNPs(self, b, minorRef=None):
'''
Unpacks the binary array of genotypes and returns an n x b matrix of floats of
normalized genotypes for the next b SNPs, where n := number of samples.
Parameters
----------
b : int
Number of SNPs to return.
minorRef: bool, default None
Should we flip reference alleles so that the minor allele is the reference?
(This is useful for computing l1 w.r.t. minor allele).
Returns
-------
X : np.array with dtype float64 with shape (n, b), where n := number of samples
Matrix of genotypes normalized to mean zero and variance one. If minorRef is
not None, then the minor allele will be the positive allele (i.e., two copies
of the minor allele --> a positive number).
'''
try:
b = int(b)
if b <= 0:
raise ValueError("b must be > 0")
except TypeError:
raise TypeError("b must be an integer")
if self._currentSNP + b > self.m:
s = '{b} SNPs requested, {k} SNPs remain'
raise ValueError(s.format(b=b, k=(self.m-self._currentSNP)))
c = self._currentSNP
n = self.n
nru = self.nru
slice = self.geno[2*c*nru:2*(c+b)*nru]
X = np.array(slice.decode(self._bedcode), dtype="float64").reshape((b, nru)).T
X = X[0:n, :]
Y = np.zeros(X.shape)
for j in range(0, b):
newsnp = X[:, j]
ii = newsnp != 9
avg = np.mean(newsnp[ii])
newsnp[np.logical_not(ii)] = avg
denom = np.std(newsnp)
if denom == 0:
denom = 1
if minorRef is not None and self.freq[self._currentSNP + j] > 0.5:
denom = denom*-1
Y[:, j] = newsnp
#Y[:, j] = (newsnp - avg) / denom
self._currentSNP += b
return Y
def block_left_to_right(block_left):
'''
Converts block lefts to block rights.
Parameters
----------
block_left : array
Array of block lefts.
Returns
-------
block_right : 1D np.ndarray with same length as block_left
block_right[j] := max {k | block_left[k] <= j}
'''
M = len(block_left)
j = 0
block_right = np.zeros(M)
for i in range(M):
while j < M and block_left[j] <= i:
j += 1
block_right[i] = j
return block_right
```
#### File: mama/legacy/mama_parse.py
```python
from __future__ import division
from __future__ import absolute_import
from builtins import object
import numpy as np
import pandas as pd
import os
import logging
def get_compression(fh):
'''Which sort of compression should we use with read_csv?'''
if fh.endswith('gz'):
compression = 'gzip'
elif fh.endswith('bz2'):
compression = 'bz2'
else:
compression = None
return compression
def __filter__(fname, noun, verb, merge_obj):
merged_list = None
if fname:
f = lambda x,n: x.format(noun=noun, verb=verb, fname=fname, num=n)
x = FilterFile(fname)
c = 'Read list of {num} {noun} to {verb} from {fname}'
logging.info(f(c, len(x.IDList)))
merged_list = merge_obj.loj(x.IDList)
len_merged_list = len(merged_list)
if len_merged_list == 0:
error_msg = 'No {noun} retained for analysis'
raise ValueError(f(error_msg, 0))
#else:
#c = 'After merging, {num} {noun} remain'
#logging.info(f(c, len_merged_list))
return merged_list
def __ID_List_Factory__(colnames, keepcol, fname_end, header=None, usecols=None):
class IDContainer(object):
def __init__(self, fname):
self.__usecols__ = usecols
self.__colnames__ = colnames
self.__keepcol__ = keepcol
self.__fname_end__ = fname_end
self.__header__ = header
self.__read__(fname)
self.n = len(self.df)
def __read__(self, fname):
end = self.__fname_end__
if end and not fname.endswith(end):
raise ValueError('{f} filename must end in {f}'.format(f=end))
comp = get_compression(fname)
self.df = pd.read_csv(fname, header=self.__header__, usecols=self.__usecols__,
delim_whitespace=True, compression=comp)
if self.__colnames__:
self.df.columns = self.__colnames__
if self.__keepcol__ is not None:
self.IDList = self.df.iloc[:, [self.__keepcol__]].astype('object')
def loj(self, externalDf):
'''Returns indices of those elements of self.IDList that appear in exernalDf.'''
r = externalDf.columns[0]
l = self.IDList.columns[0]
merge_df = externalDf.iloc[:, [0]]
merge_df['keep'] = True
z = pd.merge(self.IDList, merge_df, how='left', left_on=l, right_on=r,
sort=False)
ii = z['keep'] == True
return np.nonzero(list(ii))[0]
return IDContainer
PlinkBIMFile = __ID_List_Factory__(['CHR', 'SNP', 'CM', 'BP', 'A1', 'A2'], 1, '.bim', usecols=[0, 1, 2, 3, 4, 5])
PlinkFAMFile = __ID_List_Factory__(['IID'], 0, '.fam', usecols=[1])
FilterFile = __ID_List_Factory__(['ID'], 0, None, usecols=[0])
AnnotFile = __ID_List_Factory__(None, 2, None, header=0, usecols=None)
ThinAnnotFile = __ID_List_Factory__(None, None, None, header=0, usecols=None)
```
#### File: JonJala/mama/mama.py
```python
import argparse as argp
import contextlib
import glob
import io
import itertools
import logging
import os
import re
import sys
from typing import Any, Callable, Dict, List, Tuple
import numpy as np
import pandas as pd
from mama_pipeline import (MAMA_REQ_STD_COLS, MAMA_RE_EXPR_MAP, MAMA_STD_FILTERS,
DEFAULT_MAF_MIN, DEFAULT_MAF_MAX, FREQ_FILTER, CHR_FILTER,
SNP_PALIN_FILT, DEFAULT_CHR_LIST, mama_pipeline, PopulationId,
write_sumstats_to_file)
from reg_mama import (MAMA_REG_OPT_ALL_FREE, MAMA_REG_OPT_ALL_ZERO, MAMA_REG_OPT_OFFDIAG_ZERO,
MAMA_REG_OPT_IDENT, MAMA_REG_OPT_SET_CORR)
from util.df import determine_column_mapping, Filter
from util.sumstats import SNP_COL, create_chr_filter, create_freq_filter
# Constants / Parameters / Types #############
ParserFunc = Callable[[str], argp.ArgumentParser]
# Software version
__version__ = '1.0.0'
# Email addresses to use in header banner to denote contacts
SOFTWARE_CORRESPONDENCE_EMAIL1 = "<EMAIL>"
SOFTWARE_CORRESPONDENCE_EMAIL2 = "<EMAIL>"
OTHER_CORRESPONDENCE_EMAIL = "<EMAIL>"
# The default short file prefix to use for output and logs
DEFAULT_SHORT_PREFIX = "mama"
# Constants used for labeling output files
RESULTS_SUFFIX = ".res"
HARMONIZED_SUFFIX = ".hrm"
LD_COEF_SUFFIX = "reg.cf"
# Separator used to pass in triples of summary stats file, ancestry, and phenotype
# Note: Do not make this whitespace! (it will negatively affect parsing)
INPUT_TRIPLE_SEP = ","
# Dictionary keys for internal usage
OUT_DIR = "output_directory"
OUT_PREFIX = "output_prefix"
ANCESTRIES = "ancestries"
RE_MAP = "re_map"
COL_MAP = "col_map"
FILTER_MAP = "filter_map"
SUMSTATS_MAP = "sumstats_map"
REG_LD_COEF_OPT = "regression_ld_option"
REG_SE2_COEF_OPT = "regression_se2_option"
REG_INT_COEF_OPT = "regression_intercept_option"
REG_LD_COEF_SCALE_COEF = "regression_ld_scale_factor"
HARM_FILENAME_FSTR = "harmonized_sumstats_filename_format_str"
REG_FILENAME_FSTR = "regression_coef_filename_format_str"
# Correlation scaling factor min and max
CORR_MIN_SCALING = -1.0
CORR_MAX_SCALING = 1.0
# Derived Constants###########################
# Dictionaries that create and map argparse flags to the corresponding column affected
MAMA_RE_REPLACE_FLAGS = {col : "replace-%s-col-match" % col.lower()
for col in MAMA_RE_EXPR_MAP}
MAMA_RE_ADD_FLAGS = {col : "add-%s-col-match" % col.lower()
for col in MAMA_RE_EXPR_MAP}
# Default prefix to use for output when not specified
DEFAULT_FULL_OUT_PREFIX = "%s/%s" % (os.getcwd(), DEFAULT_SHORT_PREFIX)
# Logging banner to use at the top of the log file
HEADER = """
<><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
<>
<> MAMA: Multi-Ancestry Meta-Analysis
<> Version: %s
<> (C) 2020 Social Science Genetic Association Consortium (SSGAC)
<> MIT License
<>
<><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
<> Software-related correspondence: %s or %s
<> All other correspondence: %s
<><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><><>
""" % (__version__, SOFTWARE_CORRESPONDENCE_EMAIL1, SOFTWARE_CORRESPONDENCE_EMAIL2,
OTHER_CORRESPONDENCE_EMAIL)
# Functions ##################################
#################################
def numpy_err_handler(err: str, flag: bytes):
"""
Function that numpy should call when an error occurs. This is used to ensure that any errors
are also logged, as opposed to just going to stderr and not being collected in the log
:param err: String describing the error
:param flag: A byte describing the error (see numpy.seterrcall() docs)
"""
logging.error("Received Numpy error: %s (%s)", err, flag)
#################################
def reg_ex(s_input: str) -> str:
"""
Used for parsing some inputs to this program, namely regular expressions given as input.
Whitespace is removed, but no case-changing occurs.
:param s_input: String passed in by argparse
:return str: The regular expression
"""
stripped_regex = s_input.strip()
try:
re.compile(stripped_regex)
except re.error as exc:
raise RuntimeError("Invalid regular expression \"%s\" supplied: %s" %
(stripped_regex, exc)) from exc
return stripped_regex
def input_file(s_input: str) -> str:
"""
Used for parsing some inputs to this program, namely filenames given as input.
Whitespace is removed, but no case-changing occurs. Existence of the file is verified.
:param s_input: String passed in by argparse
:return str: The filename
"""
stripped_file = s_input.strip()
if not os.path.exists(stripped_file):
raise FileNotFoundError("The input file [%s] does not exist." % stripped_file)
return os.path.abspath(stripped_file)
def output_prefix(s_input: str) -> str:
"""
Used for parsing some inputs to this program, namely full file prefixes used for output.
Whitespace is removed, but no case-changing occurs.
:param s_input: String passed in by argparse
:return str: The prefix
"""
stripped_p = s_input.strip()
# Validate existence of output directory (and that no conflicts exist)
if os.path.exists(stripped_p):
raise RuntimeError("The designated output prefix \"%s\" conflicts with an existing "
"file or directory" % stripped_p)
s_dir = os.path.dirname(stripped_p)
if not os.path.exists(s_dir):
raise FileNotFoundError("The designated output directory [%s] does not exist." % s_dir)
return stripped_p
def ss_input_tuple(s_input: str) -> Tuple[str, str, str]:
"""
Used for parsing some inputs to this program, namely the triples used to identify summary
statistics files, ancestries, and phenotypes. Whitespace is removed, but no case-changing
occurs.
:param s_input: String passed in by argparse
:return: Tuple (all strings) containing:
1) summary statistics file path
2) ancestry
3) phenotype
"""
try:
ss_file, ancestry, phenotype = map(lambda x: x.strip(), s_input.split(INPUT_TRIPLE_SEP))
except Exception as exc:
raise RuntimeError("Error parsing %s into GWAS file, ancestry, and phenotype" %
s_input) from exc
return input_file(ss_file), ancestry.strip(), phenotype.strip()
def input_np_matrix(s_input: str) -> np.ndarray:
"""
Used for parsing some inputs to this program, namely Numpy ndarrays (such as regression
coefficient matrices).
:param s_input: String passed in by argparse
:return: ndarray containing the matrix in the file indicated
"""
filename = input_file(s_input)
return np.fromfile(filename, sep='\t')
def glob_path(s_input: str) -> List[str]:
"""
Used for parsing some inputs to this program, namely glob paths (see Python glob module docs).
:param s_input: String passed in by argparse
:return: List of file paths
"""
file_path_list = glob.glob(s_input)
if not file_path_list:
raise RuntimeError("Glob string \"%s\" matches with no files." % s_input)
return [os.path.abspath(f) for f in file_path_list]
def corr_coef(s_input: str) -> float:
"""
Used for parsing some inputs to this program, namely input correlation coefficients
:param s_input: String passed in by argparse
:return: Float value of correlation coefficient
"""
c = float(s_input)
if c < CORR_MIN_SCALING or c > CORR_MAX_SCALING:
raise ValueError("Value given for correlation coefficient (%s) must be between "
"%s and %s." % (s_input, CORR_MIN_SCALING, CORR_MAX_SCALING))
return c
#################################
def get_mama_parser(progname: str) -> argp.ArgumentParser:
"""
Return a parser configured for this command line utility
:param prog: Value to pass to ArgumentParser for prog (should generally be sys.argv[0])
:return: argparse ArgumentParser
"""
# Create the initally blank parser
parser = argp.ArgumentParser(prog=progname)
# Now, add argument groups and options:
# Input Options
in_opt = parser.add_argument_group(title="Main Input Specifications")
in_opt.add_argument("--sumstats", type=ss_input_tuple, nargs="+", required=True,
metavar="FILE%sANCESTRY%sPHENOTYPE" % (INPUT_TRIPLE_SEP, INPUT_TRIPLE_SEP),
help="List of triples F%sA%sP where F is path to a summary statistics "
"file, A is the name of an ancestry, and P is the name of a "
"phenotype. The ancestry is used for column lookup in the "
"LD Score file (columns are expected to be of the form ANC1_ANC2, "
"where ANC1 and ANC2 are ancestries. The ancestry and phenotype "
"for a given summary statistics file are used in combination as a "
"unique identifier. Currently, these are all case sensitive." %
(INPUT_TRIPLE_SEP, INPUT_TRIPLE_SEP))
in_opt.add_argument("--ld-scores", type=glob_path, required=True, metavar="GLOB_PATH",
help="Path to LD scores file(s). See python glob module for documentation "
"on the string to be provided here (full path with support for \"*\", "
"\"?\", and \"[]\"). This string should be encased in quotes. "
"Note: File columns are assumed to be of the form "
"ANC1_ANC2, where ANC1 and ANC2 are ancestries. Matching is case "
"sensitive, so these should match exactly to the ancestries passed "
"in via the --sumstats flag.")
in_opt.add_argument("--snp-list", type=input_file, required=False, metavar="FILE",
help="Path to optional SNP list file (one rsID per line). "
"If specified, this list will be used to restrict the final list "
"of SNPs reported (anything outside of this list will be dropped)")
# Output Options
out_opt = parser.add_argument_group(title="Output Specifications")
out_opt.add_argument("--out", metavar="FILE_PREFIX", type=output_prefix,
default=DEFAULT_FULL_OUT_PREFIX,
help="Full prefix of output files (logs, sumstats results, etc.). If not "
"set, [current working directory]/%s = \"%s\" will be used. "
"Note: The containing directory specified must already exist." %
(DEFAULT_SHORT_PREFIX, DEFAULT_FULL_OUT_PREFIX))
out_opt.add_argument("--out-reg-coef", action="store_true",
help="If specified, MAMA will output the LD regression coefficients "
"to disk. This is useful for reference, but also in the case "
"where it is desired to edit the matrices and then pass back into "
"MAMA with the --reg-*-coef options below to enable more complex "
"constraints than are allowed for in the existing precanned "
"options. The mechanism used is Numpy's tofile() method with a "
"tab separator (\\t) specified, which produces ASCII files with "
"the elements of the matrices listed in row-major order.")
out_opt.add_argument("--out-harmonized", action="store_true",
help="If specified, MAMA will output harmonized summary statistics "
"to disk. This can be useful for reference and (potentially) "
"debugging / troubleshooting. This will take place after "
"harmonizing all input GWAS files with each other and the LD scores.")
# General Options
gen_opt = parser.add_argument_group(title="General Options")
gen_opt.add_argument("--use-standardized-units", default=False, action="store_true",
help="This option should be specified to cause the processing done in "
"MAMA to be done in standardized units. Inputs and outputs are "
"always in allele count, but internal processing can be done in "
"standardized units by selecting this option (units will be "
"converted to standard units before processing, and then back to "
"allele count before final results are reported)")
gen_opt.add_argument("--input-sep", default=None, type=str,
help="This option is what is passed via the \"sep\" argument in Pandas' "
"read_csv() function when reading in summary statistics and LD score "
"files. This defaults to None, meaning the delimiter "
"will be inferred and the Python parsing engine will be used, which "
"yields maximum flexibility, but slower performance. Specifying a "
"value for this flag will cause MAMA to try to use the C parsing "
"engine and can significantly speed up execution, but all input file "
"reads will share this parameter, so it must work for all inputs. "
"See Pandas' read_csv \"sep\" parameter documentation for more "
"details and information."
)
# Logging options (subgroup)
log_opt = gen_opt.add_mutually_exclusive_group()
log_opt.add_argument("--quiet", action="store_true",
help="This option will cause the program to limit logging and terminal "
"output to warnings and errors, reducing output compared to "
"the default/standard logging mode. It is mutually "
"exclusive with the --verbose/--debug option.")
log_opt.add_argument("--verbose", action="store_true",
help="This option will greatly increase the logging and terminal output "
"of the program compared to the default/standard logging mode. "
"This is useful for debugging and greater visibility into the "
"processing that is occurring. It is mutually exclusive with the "
"--quiet option.")
# Regression Options
reg_opt = parser.add_argument_group(title="Regression Specifications",
description="Optional regression inputs / constraints")
# LD score coefficient options (subgroup)
reg_ld_opt = reg_opt.add_mutually_exclusive_group()
reg_ld_opt.add_argument("--reg-ld-coef", type=input_np_matrix, metavar="FILE",
help="Optional argument indicating the ASCII file containing the "
"regression coefficients for the LD scores. If this is "
"specified, this will override calculation of LD score "
"coefficients. The mechanism used is Numpy's fromfile() method "
"with a tab separator (\\t) specified. For a CxC matrix, the "
"file is C^2 numbers in row-major order separated by tabs. "
"This is mutually exclusive with other --reg-ld-* options")
reg_ld_opt.add_argument("--reg-ld-set-corr", type=corr_coef, metavar="CORR_COEF",
help="Optional argument indicating that off-diagonal elements in the "
"LD score coefficients matrix should be set to be equal to the "
"square root of the product of the associated diagonal entries "
"multiplied by the given scaling factor in range [%s, %s]. "
"This is mutually exclusive with other --reg-ld-* options" %
(CORR_MIN_SCALING, CORR_MAX_SCALING))
# SE^2 coefficient options (subgroup)
reg_se2_opt = reg_opt.add_mutually_exclusive_group()
reg_se2_opt.add_argument("--reg-se2-coef", type=input_np_matrix, metavar="FILE",
help="Optional argument indicating the file containing the regression "
"coefficients for SE^2. If this is specified, this will "
"override calculation of SE^2 coefficients. "
"This is mutually exclusive with other --reg-se2-* options")
reg_se2_opt.add_argument("--reg-se2-zero", action="store_true",
help="Optional argument indicating that the SE^2 coefficients matrix "
"should be set to be all zeroes. "
"This is mutually exclusive with other --reg-se2-* options")
reg_se2_opt.add_argument("--reg-se2-ident", action="store_true",
help="Optional argument indicating that the SE^2 coefficients matrix "
"should be set to be the identity matrix. "
"This is mutually exclusive with other --reg-se2-* options")
reg_se2_opt.add_argument("--reg-se2-diag", action="store_true",
help="Optional argument indicating that the SE^2 coefficients matrix "
"should have off-diagonal elements set to zero. "
"This is mutually exclusive with other --reg-se2-* options")
# Intercept coefficient options (subgroup)
reg_int_opt = reg_opt.add_mutually_exclusive_group()
reg_int_opt.add_argument("--reg-int-coef", type=input_np_matrix, metavar="FILE",
help="Optional argument indicating the file containing the regression "
"coefficients for the intercept. If this is specified, this "
"will override calculation of intercept coefficients. "
"This is mutually exclusive with other --reg-int-* options")
reg_int_opt.add_argument("--reg-int-zero", action="store_true",
help="Optional argument indicating that the intercept coefficients "
"matrix should be set to be all zeroes. "
"This is mutually exclusive with other --reg-int-* options")
reg_int_opt.add_argument("--reg-int-diag", action="store_true",
help="Optional argument indicating that the intercept coefficients "
"matrix should have off-diagonal elements set to zero. "
"This is mutually exclusive with other --reg-int-* options")
# Summary Statistics Filtering Options
ss_filt_opt = parser.add_argument_group(title="Summary Statistics Filtering Options",
description="Options for filtering/processing "
"summary stats")
ss_filt_opt.add_argument("--freq-bounds", nargs=2, metavar=("MIN", "MAX"), type=float,
help="This option adjusts the filtering of summary statistics. "
"Specify minimum frequency first, then maximum. "
"Defaults to minimum of %s and maximum of %s." %
(DEFAULT_MAF_MIN, DEFAULT_MAF_MAX))
ss_filt_opt.add_argument("--allowed-chr-values", type=str.upper, nargs="+",
help="This option allows specification of allowed values for the "
"chromosome field in summary statistics. Case is converted to "
"upper here and in the resulting data. Default is %s." %
DEFAULT_CHR_LIST)
ss_filt_opt.add_argument("--allow-palindromic-snps", action="store_true",
help="This option removes the filter that drops SNPs whose major "
"and minor alleles form a base pair (e.g. Major allele = \'G\' "
"and Minor allele = \'C\')")
# Summary Statistics Column Options
ss_col_opt = parser.add_argument_group(title="Summary Statistics Column Options",
description="Options for parsing summary stats columns")
for col in MAMA_RE_EXPR_MAP:
col_opt_group = ss_col_opt.add_mutually_exclusive_group()
col_opt_group.add_argument("--" + MAMA_RE_ADD_FLAGS[col], metavar="REGEX", type=reg_ex,
help="This option adds to the default (case-insensitive) "
"regular expression \"%s\" used for "
"matching / identifying the %s column. "
"Use any valid Python re module string. "
"Mutually exclusive with other --*-%s-col-match options " %
(MAMA_RE_EXPR_MAP[col], col, col.lower()))
col_opt_group.add_argument("--" + MAMA_RE_REPLACE_FLAGS[col], metavar="REGEX", type=reg_ex,
help="This option replaces the default (case-insensitive) "
"regular expression \"%s\" used for "
"matching / identifying the %s column. "
"Use any valid Python re module string. "
"Mutually exclusive with other --*-%s-col-match options " %
(MAMA_RE_EXPR_MAP[col], col, col.lower()))
return parser
#################################
def to_flag(arg_str: str) -> str:
"""
Utility method to convert from the name of an argparse Namespace attribute / variable
(which often is adopted elsewhere in this code, as well) to the corresponding flag
:param arg_str: Name of the arg
:return: The name of the flag (sans "--")
"""
return arg_str.replace("_", "-")
def to_arg(flag_str: str) -> str:
"""
Utility method to convert from an argparse flag name to the name of the corresponding attribute
in the argparse Namespace (which often is adopted elsewhere in this code, as well)
:param flag_str: Name of the flag (sans "--")
:return: The name of the argparse attribute/var
"""
return flag_str.replace("-", "_")
#################################
def format_terminal_call(cmd: List[str]) -> str:
"""
Format commands to/from the terminal for readability
:param cmd: List of strings much like sys.argv
:return: Formatted string used for display purposes
"""
return ' '.join(cmd).replace("--", " \\ \n\t--")
#################################
def get_user_inputs(argv: List[str], parsed_args: argp.Namespace) -> str:
"""
Create dictionary of user-specified options/flags and their values. Leverages the argparse
parsing output to glean the actual value, but checks for actual user-set flags in the input
:param argv: Tokenized list of inputs (meant to be sys.argv in most cases)
:param parsed_args: Result of argparse parsing the user input
:return: Dictionary containing user-set args keyed to their values
"""
# Search for everything beginning with "--" (flag names), strip off the --, take everything
# before any "=", and convert - to _
user_set_args = {to_arg(token[2:].split("=")[0]) for token in argv if token.startswith("--")}
# Since any flag actually specified by the user shouldn't have been replaced by a default
# value, one can grab the actual value from argparse without having to parse again
return {user_arg : getattr(parsed_args, user_arg) for user_arg in user_set_args}
#################################
def set_up_logger(log_file: str, log_level: int):
"""
Set up the logger for this utility.
:param log_file: Full path to the file used to store logs
:param log_level: Level used for logging
"""
log_handlers = []
# Create the stderr handler
stderr_handler = logging.StreamHandler(stream=sys.stderr)
stderr_handler.setLevel(logging.WARNING)
stderr_formatter = logging.Formatter('%(levelname)s: %(message)s')
stderr_handler.setFormatter(stderr_formatter)
log_handlers.append(stderr_handler)
# Create the stdout handler (if, based on log level, it could possibly have messages to log)
if log_level <= logging.INFO:
stdout_handler = logging.StreamHandler(stream=sys.stdout)
stdout_handler.setLevel(log_level)
stdout_formatter = logging.Formatter('%(message)s')
stdout_handler.setFormatter(stdout_formatter)
stdout_handler.addFilter(lambda record: record.levelno <= logging.INFO)
log_handlers.append(stdout_handler)
# Create the file handler
file_handler = logging.FileHandler(log_file, mode='w')
file_handler.setLevel(log_level)
file_formatter = logging.Formatter('%(asctime)s %(message)s')
file_handler.setFormatter(file_formatter)
log_handlers.append(file_handler)
# Set logging handlers and level for root logger
logging.basicConfig(handlers=log_handlers, level=log_level, datefmt='%I:%M:%S %p')
#################################
def setup_func(argv: List[str], get_parser: ParserFunc) -> Tuple[argp.Namespace, Dict[str, Any]]:
"""
Function to handle argument parsing, logging setup, and header printing
:param argv: List of arguments passed to the program (meant to be sys.argv)
:param get_parser: Function to call to get argument parser, given a program name
:return: Tuple of:
1) Argparse Namespace of parsed arguments
2) Dictionary of user-specified arguments
"""
# Parse the input flags using argparse
parser = get_parser(argv[0])
parsed_args = parser.parse_args(argv[1:])
# Break down inputs to keep track of arguments and values specified directly by the user
user_args = get_user_inputs(argv, parsed_args)
# Set up the logger
log_file = parsed_args.out + ".log"
if parsed_args.quiet:
log_level = logging.WARN
elif parsed_args.verbose:
log_level = logging.DEBUG
else:
log_level = logging.INFO
set_up_logger(log_file, log_level)
# Log header and other information
logging.info(HEADER)
logging.info("See full log at: %s\n", os.path.abspath(log_file))
logging.info("\nProgram executed via:\n%s\n", format_terminal_call(argv))
return parsed_args, user_args
#################################
# Disable pylint branch warning because we actually need all these checks
def validate_reg_options(pargs: argp.Namespace, internal_values: Dict[str, Any]): # pylint: disable=too-many-branches
"""
Responsible for validating regression-related inputs and populating the internal map with
values related to this processing
:param pargs: Result of argparse parsing user command / flags
:param internal_values: Dictionary containing internal values that might be updated
in this function
"""
# Process regression coefficient options
num_pops = len(pargs.sumstats)
num_pops_sq = num_pops * num_pops
# 1) LD coefs
ld_coef_matrix = getattr(pargs, "reg_ld_coef", None)
if ld_coef_matrix is not None:
if len(ld_coef_matrix) != num_pops * num_pops:
raise RuntimeError("Expected a matrix with %s elements for regression coefficients "
"(LD) but got %s." % (num_pops_sq, len(ld_coef_matrix)))
internal_values[REG_LD_COEF_OPT] = ld_coef_matrix.reshape((num_pops, num_pops))
internal_values[REG_LD_COEF_SCALE_COEF] = None
elif getattr(pargs, "reg_ld_set_corr", None):
internal_values[REG_LD_COEF_OPT] = MAMA_REG_OPT_SET_CORR
internal_values[REG_LD_COEF_SCALE_COEF] = getattr(pargs, "reg_ld_set_corr")
else:
internal_values[REG_LD_COEF_OPT] = MAMA_REG_OPT_ALL_FREE
internal_values[REG_LD_COEF_SCALE_COEF] = None
logging.debug("Regression coeffient option (LD) = %s", internal_values[REG_LD_COEF_OPT])
logging.debug("Regression coeffient option (LD Scale) = %s",
internal_values[REG_LD_COEF_SCALE_COEF])
# 2) Intercept coefs
int_coef_matrix = getattr(pargs, "reg_int_coef", None)
if int_coef_matrix is not None:
if len(int_coef_matrix) != num_pops * num_pops:
raise RuntimeError("Expected a matrix with %s elements for regression coefficients "
"(intercept) but got %s." % (num_pops_sq, len(int_coef_matrix)))
internal_values[REG_INT_COEF_OPT] = int_coef_matrix.reshape((num_pops, num_pops))
elif getattr(pargs, "reg_int_zero", None):
internal_values[REG_INT_COEF_OPT] = MAMA_REG_OPT_ALL_ZERO
elif getattr(pargs, "reg_int_diag", None):
internal_values[REG_INT_COEF_OPT] = MAMA_REG_OPT_OFFDIAG_ZERO
else:
internal_values[REG_INT_COEF_OPT] = MAMA_REG_OPT_ALL_FREE
logging.debug("Regression coeffient option (Intercept) = %s", internal_values[REG_INT_COEF_OPT])
# 3) SE^2 coefs
se2_coef_matrix = getattr(pargs, "reg_se2_coef", None)
if se2_coef_matrix is not None:
if len(se2_coef_matrix) != num_pops * num_pops:
raise RuntimeError("Expected a matrix with %s elements for regression coefficients "
"(SE^2) but got %s." % (num_pops_sq, len(se2_coef_matrix)))
internal_values[REG_SE2_COEF_OPT] = se2_coef_matrix.reshape((num_pops, num_pops))
elif getattr(pargs, "reg_se2_zero", None):
internal_values[REG_SE2_COEF_OPT] = MAMA_REG_OPT_ALL_ZERO
elif getattr(pargs, "reg_se2_ident", None):
internal_values[REG_SE2_COEF_OPT] = MAMA_REG_OPT_IDENT
elif getattr(pargs, "reg_se2_diag", None):
internal_values[REG_SE2_COEF_OPT] = MAMA_REG_OPT_OFFDIAG_ZERO
else:
internal_values[REG_SE2_COEF_OPT] = MAMA_REG_OPT_ALL_FREE
logging.debug("Regression coeffient option (SE^2) = %s", internal_values[REG_SE2_COEF_OPT])
#################################
def construct_re_map(pargs: argp.Namespace) -> Dict[str, str]:
"""
Responsible for constructing the regular expressions map for column matching used by this
execution of the MAMA program. It begins with the default map and then adjusts it based on
user argparse inputs.
:param pargs: Result of argparse parsing user command / flags
:return: Dictionary that maps regular expressions to standard column names
(used for column matching used by this execution of the MAMA program)
"""
re_map = MAMA_RE_EXPR_MAP.copy()
for req_col in MAMA_REQ_STD_COLS:
additional_re = getattr(pargs, to_arg(MAMA_RE_ADD_FLAGS[req_col]), None)
replacement_re = getattr(pargs, to_arg(MAMA_RE_REPLACE_FLAGS[req_col]), None)
if additional_re:
re_map[req_col] = "%s|%s" % (re_map[req_col], additional_re)
elif replacement_re:
re_map[req_col] = replacement_re
logging.debug("\nRegex map = %s", re_map)
return re_map
#################################
def construct_filter_map(pargs: argp.Namespace) -> Dict[str, Tuple[Filter, str]]:
"""
Responsible for constructing the sumstats filter map for QC of GWAS used by this
execution of the MAMA program. It begins with the default map and then adjusts it based on
user argparse inputs.
:param pargs: Result of argparse parsing user command / flags
:return: Dictionary that maps names of filters to the function and description of the filter
(used for GWAS QC by this execution of the MAMA program)
"""
filt_map = MAMA_STD_FILTERS.copy()
if getattr(pargs, "freq_bounds", None):
if pargs.freq_bounds[0] > pargs.freq_bounds[1]:
raise RuntimeError("Minimum MAF (%s) must be less than maximum MAF (%s) " %
(pargs.freq_bounds[0], pargs.freq_bounds[1]))
filt_map[FREQ_FILTER] = (create_freq_filter(pargs.freq_bounds[0], pargs.freq_bounds[1]),
"Filters out SNPs with FREQ values outside of [%s, %s]" %
(pargs.freq_bounds[0], pargs.freq_bounds[1]))
if getattr(pargs, "allowed_chr_values", None):
filt_map[CHR_FILTER] = (create_chr_filter(pargs.allowed_chr_values),
"Filters out SNPs with listed chromosomes not in %s" %
pargs.allowed_chr_values)
if getattr(pargs, "allow_palindromic_snps", None):
del filt_map[SNP_PALIN_FILT]
logging.debug("\nFilter map = %s\n", filt_map)
return filt_map
#################################
def construct_ss_and_col_maps(pargs: argp.Namespace, re_map: Dict[str, str])\
-> Tuple[Dict[str, Dict[str, str]], Dict[PopulationId, str]]:
"""
Responsible for constructing:
1) the map between population ID and column map used for mapping sumstats columns
2) the map between population ID (ancestry + phenotype) and summary stats filename
:param pargs: Result of argparse parsing user command / flags
:return: Tuple containing:
1) the map between population ID and column map used for mapping sumstats columns
2) the map between population ID (ancestry + phenotype) and summary stats filename
"""
col_map = dict()
ss_map = dict()
for ss_file, anc, phen in pargs.sumstats:
cols = list(pd.read_csv(ss_file, sep=None, engine='python', nrows=1, comment="#").columns)
ss_map[(anc, phen)] = ss_file
try:
col_map[(anc, phen)] = determine_column_mapping(cols, re_map, MAMA_REQ_STD_COLS)
except RuntimeError as exc:
raise RuntimeError("Column mapping error for summary statistics file %s (ancestry = "
"%s and phenotype = %s): %s" % (ss_file, anc, phen, exc)) from exc
return col_map, ss_map
#################################
def validate_inputs(pargs: argp.Namespace, user_args: Dict[str, Any]):
"""
Responsible for coordinating whatever initial validation of inputs can be done
:param pargs: Result of argparse parsing user command / flags
:param user_args: Flags explicitly set by the user along with their values
:return: Dictionary that contains flags and parameters needed by this program. It contains
user-input flags along with defaults set through argparse, and any additional flags
added as calculations proceed
"""
# Log user-specified arguments
logging.debug("\nProgram was called with the following arguments:\n%s", user_args)
# Prepare dictionary that will hold internal values for this program
internal_values = dict()
# Get output directory
internal_values[OUT_DIR] = os.path.dirname(pargs.out)
# Validate columns of the LD scores file(s)
for ld_score_file in pargs.ld_scores:
ld_cols = set(
pd.read_csv(ld_score_file, sep=None, engine='python', nrows=1, comment="#").columns)
ancestries = {a for ss_file, a, p in pargs.sumstats}
anc_tuples = itertools.combinations_with_replacement(ancestries, 2)
missing_ld_pair_cols = {anc_tuple for anc_tuple in anc_tuples
if not("%s_%s" % anc_tuple in ld_cols or
"%s_%s" % anc_tuple[::-1] in ld_cols)}
if missing_ld_pair_cols:
raise RuntimeError("The LD scores file %s is missing columns for the following "
"ancestry pairs: %s" % (ld_score_file, missing_ld_pair_cols))
if SNP_COL not in ld_cols:
raise RuntimeError("The LD scores file %s is missing SNP column \"%s\"" %
(ld_score_file, SNP_COL))
# Construct RE map for sumstats column matching (must be done before verifying sumstats columns)
internal_values[RE_MAP] = construct_re_map(pargs)
# Construct maps of pop ID to sumstats file and to column mappings (validate along the way)
internal_values[COL_MAP], internal_values[SUMSTATS_MAP] =\
construct_ss_and_col_maps(pargs, internal_values[RE_MAP])
# Create filter map to use for summary statistics
internal_values[FILTER_MAP] = construct_filter_map(pargs)
# Validate and process regression options
validate_reg_options(pargs, internal_values)
# If harmonized summary statistics should be written to disk, determine filename format string
internal_values[HARM_FILENAME_FSTR] = pargs.out + "_%s_%s" + HARMONIZED_SUFFIX \
if getattr(pargs, "out_harmonized", None) else ""
# If regression coefficients should be written to disk, determine filename format string
internal_values[REG_FILENAME_FSTR] = pargs.out + "_%s_" + LD_COEF_SUFFIX \
if getattr(pargs, "out_reg_coef", None) else ""
# Copy attributes to the internal dictionary from parsed args
for attr in vars(pargs):
internal_values[attr] = getattr(pargs, attr)
# Set some extra values based on parsed arg values
internal_values[OUT_PREFIX] = os.path.basename(pargs.out)
internal_values[ANCESTRIES] = ancestries
return internal_values
#################################
def main_func(argv: List[str]):
"""
Main function that should handle all the top-level processing for this program
:param argv: List of arguments passed to the program (meant to be sys.argv)
"""
# Perform argument parsing and program setup
parsed_args, user_args = setup_func(argv, get_mama_parser)
# Set Numpy error handling to shunt error messages to a logging function
np.seterr(all='call')
np.seterrcall(numpy_err_handler)
# Attempt to print package version info (pandas has a nice version info summary)
if logging.root.level <= logging.DEBUG:
logging.debug("Printing Pandas' version summary:")
with contextlib.redirect_stdout(io.StringIO()) as f:
pd.show_versions()
logging.debug("%s\n", f.getvalue())
# Execute the rest of the program, but catch and log exceptions before failing
try:
# Validate user inputs and create internal dictionary
iargs = validate_inputs(parsed_args, user_args)
# Run the MAMA pipeline
result_sumstats = mama_pipeline(iargs[SUMSTATS_MAP], iargs['ld_scores'], iargs['snp_list'],
iargs[COL_MAP], iargs[RE_MAP], iargs[FILTER_MAP],
iargs[REG_LD_COEF_OPT], iargs[REG_SE2_COEF_OPT],
iargs[REG_INT_COEF_OPT], iargs[REG_LD_COEF_SCALE_COEF],
iargs['use_standardized_units'], iargs[HARM_FILENAME_FSTR],
iargs[REG_FILENAME_FSTR], iargs['input_sep'])
# Write out the summary statistics to disk
logging.info("Writing results to disk.")
for (ancestry, phenotype), ss_df in result_sumstats.items():
filename = "%s_%s_%s%s" % (iargs["out"], ancestry, phenotype, RESULTS_SUFFIX)
logging.info("\t%s", filename)
write_sumstats_to_file(filename, ss_df)
# Log any remaining information TODO(jonbjala) Timing info?
logging.info("\nExecution complete.\n")
# Disable pylint error since we do actually want to capture all exceptions here
except Exception as exc: # pylint: disable=broad-except
logging.exception(exc)
sys.exit(1)
#################################
if __name__ == "__main__":
# Call the main function
main_func(sys.argv)
```
#### File: test/system/e2e_test.py
```python
import os
import sys
main_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))
test_dir = os.path.abspath(os.path.join(main_dir, 'test'))
data_dir = os.path.abspath(os.path.join(test_dir, 'data'))
sys.path.append(main_dir)
import subprocess
import tempfile
import numpy as np
import pandas as pd
import pytest
import mama.mama_pipeline as mp
import mama.util.sumstats as ss
@pytest.fixture(scope="function")
def temp_test_dir():
with tempfile.TemporaryDirectory(dir=os.getcwd()) as t:
yield t
TWO_POP_DIR = os.path.abspath(os.path.join(data_dir, 'two_pop'))
POP1_PHENO1_FILE = os.path.abspath(os.path.join(TWO_POP_DIR, 'pop1_pheno1_sumstats.txt'))
POP2_PHENO1_FILE = os.path.abspath(os.path.join(TWO_POP_DIR, 'pop2_pheno1_sumstats.txt'))
POP1_POP2_LDSCORE_FILE = os.path.abspath(os.path.join(TWO_POP_DIR, 'pop1_pop2_chr1.l2.ldscore'))
#===================================================================================================
class TestEndToEnd:
#########
def test__two_pops_default_logging__expected_results(self, temp_test_dir):
mama_py = os.path.abspath(os.path.join(main_dir, 'mama.py'))
test_prefix = 'test'
full_test_prefix = os.path.join(temp_test_dir, test_prefix)
logfile = full_test_prefix + ".log"
harmfile1 = full_test_prefix + "_POP1_PHENO1.hrm"
harmfile2 = full_test_prefix + "_POP2_PHENO1.hrm"
harmfile1_expected = os.path.join(TWO_POP_DIR, "POP1_PHENO1.hrm_expected")
harmfile2_expected = os.path.join(TWO_POP_DIR, "POP2_PHENO1.hrm_expected")
reg_ld_file = full_test_prefix + "_ld_reg.cf"
reg_int_file = full_test_prefix + "_int_reg.cf"
reg_se2_file = full_test_prefix + "_se2_reg.cf"
reg_ld_file_expected = os.path.join(TWO_POP_DIR, "ld_reg.cf_expected")
reg_int_file_expected = os.path.join(TWO_POP_DIR, "int_reg.cf_expected")
reg_se2_file_expected = os.path.join(TWO_POP_DIR, "se2_reg.cf_expected")
resultfile1 = full_test_prefix + "_POP1_PHENO1.res"
resultfile2 = full_test_prefix + "_POP2_PHENO1.res"
resultfile1_expected = os.path.join(TWO_POP_DIR, "POP1_PHENO1.res_expected")
resultfile2_expected = os.path.join(TWO_POP_DIR, "POP2_PHENO1.res_expected")
# Determine the command that will be run
run_cmd = [
mama_py,
'--sumstats', '%s,POP1,PHENO1' % POP1_PHENO1_FILE, '%s,POP2,PHENO1' % POP2_PHENO1_FILE,
'--ld-scores', POP1_POP2_LDSCORE_FILE,
'--out', full_test_prefix,
'--out-harmonized',
'--out-reg-coef',
]
# Run the process, make sure output is captured and process exit code is good
cp = subprocess.run(run_cmd, capture_output=True, check=True)
sout = cp.stdout.decode("utf-8")
serr = cp.stderr.decode("utf-8")
# Make sure various statements are in the log and were sent to stdout
assert os.path.exists(logfile)
with open(logfile) as logf:
logtext = logf.read()
assert "POP1,PHENO1" in logtext
assert "POP1,PHENO1" in sout
assert "POP2,PHENO1" in logtext
assert "POP2,PHENO1" in sout
assert "Number of SNPS in initial intersection of all sources: 32" in logtext
assert "Number of SNPS in initial intersection of all sources: 32" in sout
assert "Writing harmonized summary statistics to disk" in logtext
assert "Writing harmonized summary statistics to disk" in sout
assert "Harmonized POP1 PHENO1 mean chi squared:" not in logtext
assert "Harmonized POP1 PHENO1 mean chi squared:" not in sout
assert "Harmonized POP2 PHENO1 mean chi squared:" not in logtext
assert "Harmonized POP2 PHENO1 mean chi squared:" not in sout
assert "SNPs to make omega positive semi-definite" in logtext
assert "SNPs to make omega positive semi-definite" in sout
assert "Skipping positive-semi-definiteness check of Omega" not in logtext
assert "Skipping positive-semi-definiteness check of Omega" not in sout
assert "SNPs due to non-positive-definiteness of sigma" in logtext
assert "SNPs due to non-positive-definiteness of sigma" in sout
assert "total SNPs due to non-positive-(semi)-definiteness of omega / sigma" in logtext
assert "total SNPs due to non-positive-(semi)-definiteness of omega / sigma" in sout
assert "Mean Chi^2 for (\'POP1\', \'PHENO1\') = 3.4" in logtext
assert "Mean Chi^2 for (\'POP1\', \'PHENO1\') = 3.4" in sout
assert "%s_POP1_PHENO1.res" % full_test_prefix in logtext
assert "%s_POP1_PHENO1.res" % full_test_prefix in sout
assert "%s_POP2_PHENO1.res" % full_test_prefix in logtext
assert "%s_POP2_PHENO1.res" % full_test_prefix in sout
# Make sure the harmonized files exist and contain the right contents
assert os.path.exists(harmfile1)
assert os.path.exists(harmfile2)
harm1_df = pd.read_csv(harmfile1, sep=None, comment="#", engine="python")
harm2_df = pd.read_csv(harmfile2, sep=None, comment="#", engine="python")
harm1_expected_df = pd.read_csv(harmfile1_expected, sep=None, comment="#", engine="python")
harm2_expected_df = pd.read_csv(harmfile2_expected, sep=None, comment="#", engine="python")
pd.testing.assert_frame_equal(harm1_df, harm1_expected_df, check_exact=False)
pd.testing.assert_frame_equal(harm2_df, harm2_expected_df, check_exact=False)
# Make sure the regression coefficient files exist and contain the right contents
assert os.path.exists(reg_ld_file)
assert os.path.exists(reg_int_file)
assert os.path.exists(reg_se2_file)
ld_coef_matrix = np.fromfile(reg_ld_file, sep='\t')
int_coef_matrix = np.fromfile(reg_int_file, sep='\t')
se2_coef_matrix = np.fromfile(reg_se2_file, sep='\t')
ld_coef_matrix_expected = np.fromfile(reg_ld_file_expected, sep='\t')
int_coef_matrix_expected = np.fromfile(reg_int_file_expected, sep='\t')
se2_coef_matrix_expected = np.fromfile(reg_se2_file_expected, sep='\t')
assert np.allclose(ld_coef_matrix, ld_coef_matrix_expected)
assert np.allclose(int_coef_matrix, int_coef_matrix_expected)
assert np.allclose(se2_coef_matrix, se2_coef_matrix_expected)
# Make sure the result files exist and contain the right contents
assert os.path.exists(resultfile1)
assert os.path.exists(resultfile2)
res1_df = pd.read_csv(resultfile1, sep=None, comment="#", engine="python")
res2_df = pd.read_csv(resultfile2, sep=None, comment="#", engine="python")
res1_df_expected = pd.read_csv(resultfile1_expected, sep=None, comment="#", engine="python")
res2_df_expected = pd.read_csv(resultfile2_expected, sep=None, comment="#", engine="python")
pd.testing.assert_frame_equal(res1_df, res1_df_expected, check_exact=False)
pd.testing.assert_frame_equal(res2_df, res2_df_expected, check_exact=False)
#########
def test__one_pop_default_logging__expected_results(self, temp_test_dir):
mama_py = os.path.abspath(os.path.join(main_dir, 'mama.py'))
test_prefix = 'test'
full_test_prefix = os.path.join(temp_test_dir, test_prefix)
logfile = full_test_prefix + ".log"
harmfile1 = full_test_prefix + "_POP1_PHENO1.hrm"
harmfile2 = full_test_prefix + "_POP2_PHENO1.hrm"
reg_ld_file = full_test_prefix + "_ld_reg.cf"
reg_int_file = full_test_prefix + "_int_reg.cf"
reg_se2_file = full_test_prefix + "_se2_reg.cf"
reg_ld_file_expected = os.path.join(TWO_POP_DIR, "ld_reg.cf_expected")
reg_int_file_expected = os.path.join(TWO_POP_DIR, "int_reg.cf_expected")
reg_se2_file_expected = os.path.join(TWO_POP_DIR, "se2_reg.cf_expected")
resultfile1 = full_test_prefix + "_POP1_PHENO1.res"
resultfile2 = full_test_prefix + "_POP2_PHENO1.res"
reg_ld_file_expected = os.path.join(TWO_POP_DIR, "ld_reg.cf_expected")
reg_int_file_expected = os.path.join(TWO_POP_DIR, "int_reg.cf_expected")
reg_se2_file_expected = os.path.join(TWO_POP_DIR, "se2_reg.cf_expected")
# Determine the command that will be run
run_cmd = [
mama_py,
'--sumstats', '%s,POP1,PHENO1' % POP1_PHENO1_FILE,
'--ld-scores', POP1_POP2_LDSCORE_FILE,
'--out', full_test_prefix,
'--out-harmonized',
'--out-reg-coef',
]
# Run the process, make sure output is captured and process exit code is good
cp = subprocess.run(run_cmd, capture_output=True, check=True)
sout = cp.stdout.decode("utf-8")
serr = cp.stderr.decode("utf-8")
# Make sure various statements are in the log and were sent to stdout
assert os.path.exists(logfile)
with open(logfile) as logf:
logtext = logf.read()
assert "POP1,PHENO1" in logtext
assert "POP1,PHENO1" in sout
assert "POP2,PHENO1" not in logtext
assert "POP2,PHENO1" not in sout
assert "Number of SNPS in initial intersection of all sources: 32" in logtext
assert "Number of SNPS in initial intersection of all sources: 32" in sout
assert "Writing harmonized summary statistics to disk" in logtext
assert "Writing harmonized summary statistics to disk" in sout
assert "Harmonized POP1 PHENO1 mean chi squared:" not in logtext
assert "Harmonized POP1 PHENO1 mean chi squared:" not in sout
assert "SNPs to make omega positive semi-definite" not in logtext
assert "SNPs to make omega positive semi-definite" not in sout
assert "Skipping positive-semi-definiteness check of Omega" in logtext
assert "Skipping positive-semi-definiteness check of Omega" in sout
assert "SNPs due to non-positive-definiteness of sigma" in logtext
assert "SNPs due to non-positive-definiteness of sigma" in sout
assert "total SNPs due to non-positive-(semi)-definiteness of omega / sigma" in logtext
assert "total SNPs due to non-positive-(semi)-definiteness of omega / sigma" in sout
assert "Mean Chi^2 for (\'POP1\', \'PHENO1\')" in logtext
assert "Mean Chi^2 for (\'POP1\', \'PHENO1\')" in sout
assert "%s_POP1_PHENO1.res" % full_test_prefix in logtext
assert "%s_POP1_PHENO1.res" % full_test_prefix in sout
assert "%s_POP2_PHENO1.res" % full_test_prefix not in logtext
assert "%s_POP2_PHENO1.res" % full_test_prefix not in sout
# Make sure the harmonized files exist and contain the right contents
assert os.path.exists(harmfile1)
assert not os.path.exists(harmfile2)
harm1_df = pd.read_csv(harmfile1, sep=None, comment="#", engine="python")
harm1_expected_df = pd.read_csv(POP1_PHENO1_FILE, sep=None, comment="#", engine="python")
# Make sure the regression coefficient files exist and contain the right contents
assert os.path.exists(reg_ld_file)
assert os.path.exists(reg_int_file)
assert os.path.exists(reg_se2_file)
ld_coef_matrix = np.fromfile(reg_ld_file, sep='\t')
int_coef_matrix = np.fromfile(reg_int_file, sep='\t')
se2_coef_matrix = np.fromfile(reg_se2_file, sep='\t')
ld_coef_matrix_expected = np.fromfile(reg_ld_file_expected, sep='\t')
int_coef_matrix_expected = np.fromfile(reg_int_file_expected, sep='\t')
se2_coef_matrix_expected = np.fromfile(reg_se2_file_expected, sep='\t')
assert len(ld_coef_matrix) == 1
assert len(int_coef_matrix) == 1
assert len(se2_coef_matrix) == 1
assert np.allclose(ld_coef_matrix, ld_coef_matrix_expected[0])
assert np.allclose(int_coef_matrix, int_coef_matrix_expected[0])
assert np.allclose(se2_coef_matrix, se2_coef_matrix_expected[0])
# Make sure the result files exist and contain the right contents
assert os.path.exists(resultfile1)
assert not os.path.exists(resultfile2)
res1_df = pd.read_csv(resultfile1, sep=None, comment="#", engine="python")
res1_df_expected = pd.read_csv(POP1_PHENO1_FILE, sep=None, comment="#", engine="python")
pd.testing.assert_series_equal(res1_df[ss.SNP_COL], res1_df_expected[ss.SNP_COL])
pd.testing.assert_series_equal(res1_df[ss.CHR_COL], res1_df_expected[ss.CHR_COL])
pd.testing.assert_series_equal(res1_df[ss.BP_COL], res1_df_expected[ss.BP_COL])
pd.testing.assert_series_equal(res1_df[ss.A1_COL], res1_df_expected[ss.A1_COL])
pd.testing.assert_series_equal(res1_df[ss.A2_COL], res1_df_expected[ss.A2_COL])
pd.testing.assert_series_equal(res1_df[ss.FREQ_COL], res1_df_expected['FRQ'], check_names=False)
pd.testing.assert_series_equal(res1_df[ss.BETA_COL], res1_df_expected[ss.BETA_COL], check_dtype=False)
pd.testing.assert_series_equal(res1_df[mp.ORIGINAL_N_COL_RENAME], res1_df_expected[ss.N_COL], check_names=False)
se_ratios = res1_df[ss.SE_COL] / res1_df_expected[ss.SE_COL]
assert np.isclose(se_ratios.min(), se_ratios.max())
```
#### File: test/unit/reg_mama_test.py
```python
import os
import sys
main_directory = os.path.abspath(os.path.join(os.path.dirname(__file__), '../..'))
test_directory = os.path.abspath(os.path.join(main_directory, 'test'))
data_directory = os.path.abspath(os.path.join(test_directory, 'data'))
sys.path.append(main_directory)
import numpy as np
import pytest
import mama.reg_mama as reg_mama
###########################################
class TestFixedOptionHelper:
SIZE_LIST = [1, 2, 3, 4]
#########
@pytest.mark.parametrize("size", SIZE_LIST)
def test__all_free__return_expected(self, size):
result1 = reg_mama.fixed_option_helper(size, reg_mama.MAMA_REG_OPT_ALL_FREE)
result2 = reg_mama.fixed_option_helper(size)
nan_result1 = np.isnan(result1)
nan_result2 = np.isnan(result2)
assert np.all(nan_result1)
assert np.all(nan_result2)
#########
@pytest.mark.parametrize("size", SIZE_LIST)
def test__all_zero__return_expected(self, size):
result = reg_mama.fixed_option_helper(size, reg_mama.MAMA_REG_OPT_ALL_ZERO)
assert np.all(np.where(result == 0.0, True, False))
#########
@pytest.mark.parametrize("size", SIZE_LIST)
def test__offdiag_zero__return_expected(self, size):
result = reg_mama.fixed_option_helper(size, reg_mama.MAMA_REG_OPT_OFFDIAG_ZERO)
nan_result = np.isnan(result)
assert np.all(np.diag(nan_result))
assert np.all(nan_result.sum(axis=0) == 1)
assert np.all(nan_result.sum(axis=1) == 1)
#########
@pytest.mark.parametrize("size", SIZE_LIST)
def test__identity__return_expected(self, size):
result = reg_mama.fixed_option_helper(size, reg_mama.MAMA_REG_OPT_IDENT)
assert np.all(np.diag(result) == 1.0)
assert np.all(np.where(result == 1.0, True, False).sum(axis=0) == 1)
assert np.all(np.where(result == 1.0, True, False).sum(axis=1) == 1)
#########
@pytest.mark.parametrize("size", SIZE_LIST)
def test__valid_matrix_input__return_expected(self, size):
M = np.random.rand(size, size)
result = reg_mama.fixed_option_helper(size, M)
assert np.array_equal(result, M)
#########
@pytest.mark.parametrize("size", SIZE_LIST)
def test__invalid_matrix_input__return_expected(self, size):
M = np.random.rand(size + 1, size + 1)
with pytest.raises(RuntimeError) as ex_info:
reg_mama.fixed_option_helper(size, M)
assert str(size) in str(ex_info.value)
assert str(size + 1) in str(ex_info.value)
#########
def test__invalid_opt_str_type__return_expected(self):
val = 1.5
with pytest.raises(RuntimeError) as ex_info:
reg_mama.fixed_option_helper(5, float(val))
assert "float" in str(ex_info.value)
assert str(val) in str(ex_info.value)
#########
def test__invalid_opt_str_value__return_expected(self):
val = "INVALID_OPTION_ABC_XYZ"
with pytest.raises(RuntimeError) as ex_info:
reg_mama.fixed_option_helper(5, val)
assert "str" in str(ex_info.value)
assert str(val) in str(ex_info.value)
``` |
{
"source": "JonJala/pgi_correct",
"score": 3
} |
#### File: pgi_correct/test/pgic_test.py
```python
import numpy as np
import pgs_correct.pgic as pgic
import pytest
###########################################
class TestCalculateRho:
#########
@pytest.mark.parametrize("h2, r2, expected_rho",
[
(1.0, 1.0, 1.0),
(4.0, 1.0, 2.0),
(1.0, 4.0, 0.5)
]
)
def test_happypath_noerrors(self, h2, r2, expected_rho):
assert pgic.calculate_rho(h2, r2) == pytest.approx(expected_rho)
#########
@pytest.mark.parametrize("h2, r2, expected_error",
[
(0.0, 0.0, ZeroDivisionError),
(1.0, 0.0, ZeroDivisionError),
]
)
def test_zero_r2_raises(self, h2, r2, expected_error):
with pytest.raises(expected_error):
pgic.calculate_rho(h2, r2)
#########
@pytest.mark.parametrize("h2, r2, expected_error",
[
("abc", 1.0, TypeError),
(1.0, "abc", TypeError)
]
)
def test_input_string_type_raises(self, h2, r2, expected_error):
with pytest.raises(expected_error):
pgic.calculate_rho(h2, r2)
#########
@pytest.mark.parametrize("h2, r2, expected_warning",
[
(-1.0, 1.0, RuntimeWarning),
(1.0, -1.0, RuntimeWarning)
]
)
def test_input_negative_warns(self, h2, r2, expected_warning):
with pytest.warns(expected_warning):
pgic.calculate_rho(h2, r2)
###########################################
class TestCalculateCenterMatrix:
#########
@pytest.mark.parametrize("V_ghat, rho, z_int_mean, z_int_cov, expected_matrix",
[
(np.identity(5), 1.0, np.zeros(2), np.identity(2), np.identity(5)),
(np.identity(5), 2.0, np.zeros(2), np.identity(2), np.diag([4, 4, 4, 1, 1])),
(np.identity(5), 2.0, np.array([-2/3, -1/3]), np.array([[-10/9, 4/9], [4/9, -4/9]]),
np.array([[-13.0, 6.0, 5.0, 0.0, 0.0], [6.0, -2.0, -2.0, 0.0, 0.0],
[5.0, -2.0, -1.0, 0.0, 0.0], [0.0, 0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 0.0, 1.0]]))
]
)
def test_happypath_noerrors(self, V_ghat, rho, z_int_mean, z_int_cov, expected_matrix):
assert np.allclose(pgic.calculate_center_matrix(V_ghat, rho, z_int_mean, z_int_cov),
expected_matrix)
#########
@pytest.mark.parametrize("V_ghat, rho, z_int_mean, z_int_cov, expected_error",
[
(np.identity(2), 0.0, np.zeros(2), np.identity(2), ZeroDivisionError),
]
)
def test_zero_rho_raises(self, V_ghat, rho, z_int_mean, z_int_cov, expected_error):
with pytest.raises(expected_error):
pgic.calculate_center_matrix(V_ghat, rho, z_int_mean, z_int_cov)
###########################################
class TestCalculateCorrectedCoefficients:
#########
@pytest.mark.parametrize("corr_matrix, coefficients, expected_coef",
[
(np.identity(5), np.zeros(5), np.zeros(5)),
(np.identity(5), np.ones(5), np.ones(5)),
(np.ones((5,5)), np.ones(5), np.full((5,5), 5))
]
)
def test_happypath_noerrors(self, corr_matrix, coefficients, expected_coef):
assert np.allclose(pgic.calculate_corrected_coefficients(
corr_matrix, coefficients), expected_coef)
#########
@pytest.mark.parametrize("corr_matrix, coefficients, expected_error",
[
(np.identity(5), np.ones(4), ValueError),
]
)
def test_dimension_mismatch_raises(self, corr_matrix, coefficients, expected_error):
with pytest.raises(expected_error):
pgic.calculate_corrected_coefficients(corr_matrix, coefficients)
###########################################
arg_to_flag_dict = {
"" : "",
" " : " ",
"abc" : "abc",
"ab_c" : "ab-c",
"a_b_c" : "a-b-c"
}
flag_to_arg_dict = {arg_to_flag_dict[arg] : arg for arg in arg_to_flag_dict}
test_arg_flag_pairs = list(arg_to_flag_dict.items())
test_flag_arg_pairs = list(flag_to_arg_dict.items())
test_args = list(arg_to_flag_dict.keys())
test_flags = list(flag_to_arg_dict.keys())
class TestToArgAndToFlag:
#########
@pytest.mark.parametrize("arg_str, expected_flag", test_arg_flag_pairs)
def test_happypath_toflag_noerrors(self, arg_str, expected_flag):
assert pgic.to_flag(arg_str) == expected_flag
#########
@pytest.mark.parametrize("flag_str, expected_arg", test_flag_arg_pairs)
def test_happypath_toarg_noerrors(self, flag_str, expected_arg):
assert pgic.to_arg(flag_str) == expected_arg
#########
@pytest.mark.parametrize("str_input", test_flags)
def test_toflag_inverseof_toarg_noerrors(self, str_input):
assert pgic.to_flag(pgic.to_arg(str_input)) == str_input
#########
@pytest.mark.parametrize("str_input", test_args)
def test_toarg_inverseof_toflag_noerrors(self, str_input):
assert pgic.to_arg(pgic.to_flag(str_input)) == str_input
###########################################
# TODO(jonbjala) Was just starting to write these tests when we had our meeting, so I'm just
# block commenting these for now. If I can fit them in with the MVP delivery, then great, but if
# not, then it's not critical.
# test_gcta_base_settings = InternalNamespace()
# test_gcta_base_settings.pheno_file = None
# test_gcta_base_settings.bfile = None
# test_gcta_base_settings.gcta_exec = None
# test_gcta_base_settings.grm = None
# test_gcta_base_settings.grm_cutoff = 0.25
# class TestValidateGCTAInputs:
# #########
# @pytest.mark.parametrize("user_args, parsed_args, settings",
# [
# ({})
# ]
# )
# def test_nogcta_nogctaflags_noerrors(self, user_args, parsed_args, settings):
# validate_gcta_inputs(user_args, parsed_args, settings)
# #########
# @pytest.mark.parametrize("user_args, parsed_args, settings", [])
# def test_gcta_allgctaflags_noerrors(self, user_args, parsed_args, settings):
# validate_gcta_inputs(user_args, parsed_args, settings)
# #########
# @pytest.mark.parametrize("user_args, parsed_args, settings", [])
# def test_nogcta_withgctaflags_noforce_errors(self, user_args, parsed_args, settings):
# settings.force = false
# with pytest.raises(RuntimeError):
# validate_gcta_inputs(user_args, parsed_args, settings)
# #########
# @pytest.mark.parametrize("user_args, parsed_args, settings", [])
# def test_nogcta_withgctaflags_withforce_errors(self, user_args, parsed_args, settings):
# settings.force = true
# validate_gcta_inputs(user_args, parsed_args, settings) # TODO(check stderr for warning?)
# #########
# @pytest.mark.parametrize("user_args, parsed_args, settings", [])
# def test_gcta_missinggctaflags_errors(self, user_args, parsed_args, settings):
# with pytest.raises(RuntimeError):
# validate_gcta_inputs(user_args, parsed_args, settings)
# #########
# @pytest.mark.parametrize("user_args, parsed_args, settings", [])
# def test_gcta_wrongexec_errors(self, user_args, parsed_args, settings):
# with pytest.raises(NameError):
# validate_gcta_inputs(user_args, parsed_args, settings)
###########################################
``` |
{
"source": "jonjau/cifero",
"score": 2
} |
#### File: cifero/ciferogui/ciphers.py
```python
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_CiphersDialog(object):
def setupUi(self, CiphersDialog):
CiphersDialog.setObjectName("CiphersDialog")
CiphersDialog.resize(869, 496)
self.gridLayout = QtWidgets.QGridLayout(CiphersDialog)
self.gridLayout.setObjectName("gridLayout")
self.buttonBox = QtWidgets.QDialogButtonBox(CiphersDialog)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.gridLayout.addWidget(self.buttonBox, 1, 1, 1, 1)
self.tabWidget = QtWidgets.QTabWidget(CiphersDialog)
self.tabWidget.setObjectName("tabWidget")
self.IPATab = QtWidgets.QWidget()
self.IPATab.setMinimumSize(QtCore.QSize(0, 375))
self.IPATab.setObjectName("IPATab")
self.groupBox_1 = QtWidgets.QGroupBox(self.IPATab)
self.groupBox_1.setGeometry(QtCore.QRect(330, 0, 341, 421))
self.groupBox_1.setObjectName("groupBox_1")
self.IPAConsTableWidget = QtWidgets.QTableWidget(self.groupBox_1)
self.IPAConsTableWidget.setGeometry(QtCore.QRect(10, 20, 321, 391))
self.IPAConsTableWidget.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.IPAConsTableWidget.setObjectName("IPAConsTableWidget")
self.IPAConsTableWidget.setColumnCount(3)
self.IPAConsTableWidget.setRowCount(12)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setVerticalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setVerticalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setVerticalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setVerticalHeaderItem(8, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setVerticalHeaderItem(9, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setVerticalHeaderItem(10, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setVerticalHeaderItem(11, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(0, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(0, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(1, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(1, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(2, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(2, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(3, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(3, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(4, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(4, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(5, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(5, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(5, 2, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(6, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(6, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(7, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(7, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(8, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(8, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(8, 2, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(9, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(9, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(9, 2, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(10, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(10, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(10, 2, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(11, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(11, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAConsTableWidget.setItem(11, 2, item)
self.groupBox_2 = QtWidgets.QGroupBox(self.IPATab)
self.groupBox_2.setGeometry(QtCore.QRect(0, 150, 331, 271))
self.groupBox_2.setObjectName("groupBox_2")
self.IPAVowsTableWidget = QtWidgets.QTableWidget(self.groupBox_2)
self.IPAVowsTableWidget.setGeometry(QtCore.QRect(10, 20, 311, 241))
self.IPAVowsTableWidget.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.IPAVowsTableWidget.setObjectName("IPAVowsTableWidget")
self.IPAVowsTableWidget.setColumnCount(3)
self.IPAVowsTableWidget.setRowCount(7)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setVerticalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setVerticalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setItem(0, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setItem(0, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setItem(1, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setItem(1, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setItem(2, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setItem(2, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setItem(3, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setItem(3, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setItem(4, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setItem(4, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setItem(5, 0, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setItem(5, 1, item)
item = QtWidgets.QTableWidgetItem()
self.IPAVowsTableWidget.setItem(6, 0, item)
self.groupBox_3 = QtWidgets.QGroupBox(self.IPATab)
self.groupBox_3.setGeometry(QtCore.QRect(670, 0, 171, 421))
self.groupBox_3.setObjectName("groupBox_3")
self.IPASheetTextEdit = QtWidgets.QPlainTextEdit(self.groupBox_3)
self.IPASheetTextEdit.setGeometry(QtCore.QRect(10, 20, 151, 391))
self.IPASheetTextEdit.setReadOnly(True)
self.IPASheetTextEdit.setObjectName("IPASheetTextEdit")
self.IPADesc = QtWidgets.QTextBrowser(self.IPATab)
self.IPADesc.setGeometry(QtCore.QRect(10, 10, 311, 131))
self.IPADesc.setObjectName("IPADesc")
self.tabWidget.addTab(self.IPATab, "")
self.KeyTab = QtWidgets.QWidget()
self.KeyTab.setObjectName("KeyTab")
self.groupBox_4 = QtWidgets.QGroupBox(self.KeyTab)
self.groupBox_4.setGeometry(QtCore.QRect(330, 0, 341, 421))
self.groupBox_4.setObjectName("groupBox_4")
self.KeyConsTableWidget = QtWidgets.QTableWidget(self.groupBox_4)
self.KeyConsTableWidget.setGeometry(QtCore.QRect(10, 20, 321, 391))
self.KeyConsTableWidget.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.KeyConsTableWidget.setObjectName("KeyConsTableWidget")
self.KeyConsTableWidget.setColumnCount(3)
self.KeyConsTableWidget.setRowCount(12)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setVerticalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setVerticalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setVerticalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setVerticalHeaderItem(8, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setVerticalHeaderItem(9, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setVerticalHeaderItem(10, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setVerticalHeaderItem(11, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(0, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(0, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(1, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(1, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(2, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(2, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(3, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(3, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(4, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(4, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(5, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(5, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(5, 2, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(6, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(6, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(7, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(7, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(8, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(8, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(8, 2, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(9, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(9, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(9, 2, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(10, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(10, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(10, 2, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(11, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(11, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyConsTableWidget.setItem(11, 2, item)
self.KeyDesc = QtWidgets.QTextBrowser(self.KeyTab)
self.KeyDesc.setGeometry(QtCore.QRect(10, 10, 311, 131))
self.KeyDesc.setObjectName("KeyDesc")
self.groupBox_5 = QtWidgets.QGroupBox(self.KeyTab)
self.groupBox_5.setGeometry(QtCore.QRect(0, 150, 331, 271))
self.groupBox_5.setObjectName("groupBox_5")
self.KeyVowsTableWidget = QtWidgets.QTableWidget(self.groupBox_5)
self.KeyVowsTableWidget.setGeometry(QtCore.QRect(10, 20, 311, 241))
self.KeyVowsTableWidget.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.KeyVowsTableWidget.setObjectName("KeyVowsTableWidget")
self.KeyVowsTableWidget.setColumnCount(3)
self.KeyVowsTableWidget.setRowCount(7)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setVerticalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setVerticalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setItem(0, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setItem(0, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setItem(1, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setItem(1, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setItem(2, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setItem(2, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setItem(3, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setItem(3, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setItem(4, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setItem(4, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setItem(5, 0, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setItem(5, 1, item)
item = QtWidgets.QTableWidgetItem()
self.KeyVowsTableWidget.setItem(6, 0, item)
self.groupBox_6 = QtWidgets.QGroupBox(self.KeyTab)
self.groupBox_6.setGeometry(QtCore.QRect(670, 0, 171, 421))
self.groupBox_6.setObjectName("groupBox_6")
self.KeySheetTextEdit = QtWidgets.QPlainTextEdit(self.groupBox_6)
self.KeySheetTextEdit.setGeometry(QtCore.QRect(10, 20, 151, 391))
self.KeySheetTextEdit.setReadOnly(True)
self.KeySheetTextEdit.setObjectName("KeySheetTextEdit")
self.tabWidget.addTab(self.KeyTab, "")
self.CipherTab = QtWidgets.QWidget()
self.CipherTab.setObjectName("CipherTab")
self.groupBox_7 = QtWidgets.QGroupBox(self.CipherTab)
self.groupBox_7.setGeometry(QtCore.QRect(330, 0, 341, 421))
self.groupBox_7.setObjectName("groupBox_7")
self.CipherConsTableWidget = QtWidgets.QTableWidget(self.groupBox_7)
self.CipherConsTableWidget.setGeometry(QtCore.QRect(10, 20, 321, 391))
self.CipherConsTableWidget.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.CipherConsTableWidget.setObjectName("CipherConsTableWidget")
self.CipherConsTableWidget.setColumnCount(3)
self.CipherConsTableWidget.setRowCount(12)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setVerticalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setVerticalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setVerticalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setVerticalHeaderItem(8, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setVerticalHeaderItem(9, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setVerticalHeaderItem(10, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setVerticalHeaderItem(11, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(0, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(0, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(1, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(1, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(2, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(2, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(3, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(3, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(3, 2, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(4, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(4, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(4, 2, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(5, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(5, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(5, 2, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(6, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(6, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(7, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(7, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(8, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(8, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(8, 2, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(9, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(9, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(9, 2, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(10, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(10, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(10, 2, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(11, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(11, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherConsTableWidget.setItem(11, 2, item)
self.CipherConsTableWidget.horizontalHeader().setDefaultSectionSize(100)
self.groupBox_9 = QtWidgets.QGroupBox(self.CipherTab)
self.groupBox_9.setGeometry(QtCore.QRect(670, 0, 171, 421))
self.groupBox_9.setObjectName("groupBox_9")
self.CipherSheetTextEdit = QtWidgets.QPlainTextEdit(self.groupBox_9)
self.CipherSheetTextEdit.setGeometry(QtCore.QRect(10, 20, 151, 391))
self.CipherSheetTextEdit.setReadOnly(True)
self.CipherSheetTextEdit.setObjectName("CipherSheetTextEdit")
self.groupBox_8 = QtWidgets.QGroupBox(self.CipherTab)
self.groupBox_8.setGeometry(QtCore.QRect(0, 150, 331, 271))
self.groupBox_8.setObjectName("groupBox_8")
self.CipherVowsTableWidget = QtWidgets.QTableWidget(self.groupBox_8)
self.CipherVowsTableWidget.setGeometry(QtCore.QRect(10, 20, 311, 241))
self.CipherVowsTableWidget.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.CipherVowsTableWidget.setObjectName("CipherVowsTableWidget")
self.CipherVowsTableWidget.setColumnCount(3)
self.CipherVowsTableWidget.setRowCount(7)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setVerticalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setVerticalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setItem(0, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setItem(0, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setItem(1, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setItem(1, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setItem(2, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setItem(2, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setItem(3, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setItem(3, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setItem(4, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setItem(4, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setItem(5, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setItem(5, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setItem(6, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CipherVowsTableWidget.setItem(6, 1, item)
self.CipherVowsTableWidget.horizontalHeader().setDefaultSectionSize(98)
self.CipherVowsTableWidget.horizontalHeader().setMinimumSectionSize(35)
self.CipherDesc = QtWidgets.QTextBrowser(self.CipherTab)
self.CipherDesc.setGeometry(QtCore.QRect(10, 10, 311, 131))
self.CipherDesc.setObjectName("CipherDesc")
self.tabWidget.addTab(self.CipherTab, "")
self.BaseTab = QtWidgets.QWidget()
self.BaseTab.setObjectName("BaseTab")
self.BaseDesc = QtWidgets.QTextBrowser(self.BaseTab)
self.BaseDesc.setGeometry(QtCore.QRect(10, 10, 311, 131))
self.BaseDesc.setObjectName("BaseDesc")
self.groupBox_11 = QtWidgets.QGroupBox(self.BaseTab)
self.groupBox_11.setGeometry(QtCore.QRect(0, 150, 331, 271))
self.groupBox_11.setObjectName("groupBox_11")
self.BaseVowsTableWidget = QtWidgets.QTableWidget(self.groupBox_11)
self.BaseVowsTableWidget.setGeometry(QtCore.QRect(10, 20, 311, 241))
self.BaseVowsTableWidget.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.BaseVowsTableWidget.setObjectName("BaseVowsTableWidget")
self.BaseVowsTableWidget.setColumnCount(3)
self.BaseVowsTableWidget.setRowCount(7)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setVerticalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setVerticalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setItem(0, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setItem(0, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setItem(1, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setItem(1, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setItem(2, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setItem(2, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setItem(3, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setItem(3, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setItem(4, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setItem(4, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setItem(5, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setItem(5, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setItem(5, 2, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setItem(6, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseVowsTableWidget.setItem(6, 1, item)
self.groupBox_10 = QtWidgets.QGroupBox(self.BaseTab)
self.groupBox_10.setGeometry(QtCore.QRect(330, 0, 341, 421))
self.groupBox_10.setObjectName("groupBox_10")
self.BaseConsTableWidget = QtWidgets.QTableWidget(self.groupBox_10)
self.BaseConsTableWidget.setGeometry(QtCore.QRect(10, 20, 321, 391))
self.BaseConsTableWidget.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.BaseConsTableWidget.setObjectName("BaseConsTableWidget")
self.BaseConsTableWidget.setColumnCount(3)
self.BaseConsTableWidget.setRowCount(12)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setVerticalHeaderItem(5, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setVerticalHeaderItem(6, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setVerticalHeaderItem(7, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setVerticalHeaderItem(8, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setVerticalHeaderItem(9, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setVerticalHeaderItem(10, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setVerticalHeaderItem(11, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setHorizontalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(0, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(0, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(1, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(1, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(2, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(2, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(3, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(3, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(4, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(4, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(5, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(5, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(5, 2, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(6, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(6, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(7, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(7, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(8, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(8, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(8, 2, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(9, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(9, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(9, 2, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(10, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(10, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(10, 2, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(11, 0, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(11, 1, item)
item = QtWidgets.QTableWidgetItem()
self.BaseConsTableWidget.setItem(11, 2, item)
self.groupBox_12 = QtWidgets.QGroupBox(self.BaseTab)
self.groupBox_12.setGeometry(QtCore.QRect(670, 0, 171, 421))
self.groupBox_12.setObjectName("groupBox_12")
self.BaseSheetTextEdit = QtWidgets.QPlainTextEdit(self.groupBox_12)
self.BaseSheetTextEdit.setGeometry(QtCore.QRect(10, 20, 151, 391))
self.BaseSheetTextEdit.setReadOnly(True)
self.BaseSheetTextEdit.setObjectName("BaseSheetTextEdit")
self.tabWidget.addTab(self.BaseTab, "")
self.gridLayout.addWidget(self.tabWidget, 0, 0, 1, 3)
self.label = QtWidgets.QLabel(CiphersDialog)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 1, 0, 1, 1)
self.retranslateUi(CiphersDialog)
self.tabWidget.setCurrentIndex(0)
self.buttonBox.accepted.connect(CiphersDialog.accept)
self.buttonBox.rejected.connect(CiphersDialog.reject)
QtCore.QMetaObject.connectSlotsByName(CiphersDialog)
def retranslateUi(self, CiphersDialog):
_translate = QtCore.QCoreApplication.translate
CiphersDialog.setWindowTitle(_translate("CiphersDialog", "Ciphers"))
self.groupBox_1.setTitle(_translate("CiphersDialog", "Consonants"))
__sortingEnabled = self.IPAConsTableWidget.isSortingEnabled()
self.IPAConsTableWidget.setSortingEnabled(False)
item = self.IPAConsTableWidget.item(0, 0)
item.setText(_translate("CiphersDialog", "p"))
item = self.IPAConsTableWidget.item(0, 1)
item.setText(_translate("CiphersDialog", "b"))
item = self.IPAConsTableWidget.item(1, 0)
item.setText(_translate("CiphersDialog", "t"))
item = self.IPAConsTableWidget.item(1, 1)
item.setText(_translate("CiphersDialog", "d"))
item = self.IPAConsTableWidget.item(2, 0)
item.setText(_translate("CiphersDialog", "k"))
item = self.IPAConsTableWidget.item(2, 1)
item.setText(_translate("CiphersDialog", "g"))
item = self.IPAConsTableWidget.item(3, 0)
item.setText(_translate("CiphersDialog", "θ"))
item = self.IPAConsTableWidget.item(3, 1)
item.setText(_translate("CiphersDialog", "ð"))
item = self.IPAConsTableWidget.item(4, 0)
item.setText(_translate("CiphersDialog", "f"))
item = self.IPAConsTableWidget.item(4, 1)
item.setText(_translate("CiphersDialog", "v"))
item = self.IPAConsTableWidget.item(5, 0)
item.setText(_translate("CiphersDialog", "s"))
item = self.IPAConsTableWidget.item(5, 1)
item.setText(_translate("CiphersDialog", "z"))
item = self.IPAConsTableWidget.item(5, 2)
item.setText(_translate("CiphersDialog", "ʦ"))
item = self.IPAConsTableWidget.item(6, 0)
item.setText(_translate("CiphersDialog", "ʃ"))
item = self.IPAConsTableWidget.item(6, 1)
item.setText(_translate("CiphersDialog", "ʒ"))
item = self.IPAConsTableWidget.item(7, 0)
item.setText(_translate("CiphersDialog", "ʧ"))
item = self.IPAConsTableWidget.item(7, 1)
item.setText(_translate("CiphersDialog", "ʤ"))
item = self.IPAConsTableWidget.item(8, 0)
item.setText(_translate("CiphersDialog", "h"))
item = self.IPAConsTableWidget.item(8, 1)
item.setText(_translate("CiphersDialog", "x"))
item = self.IPAConsTableWidget.item(8, 2)
item.setText(_translate("CiphersDialog", "ɲ"))
item = self.IPAConsTableWidget.item(9, 0)
item.setText(_translate("CiphersDialog", "n"))
item = self.IPAConsTableWidget.item(9, 1)
item.setText(_translate("CiphersDialog", "m"))
item = self.IPAConsTableWidget.item(9, 2)
item.setText(_translate("CiphersDialog", "ŋ"))
item = self.IPAConsTableWidget.item(10, 0)
item.setText(_translate("CiphersDialog", "l"))
item = self.IPAConsTableWidget.item(10, 1)
item.setText(_translate("CiphersDialog", "ɹ"))
item = self.IPAConsTableWidget.item(10, 2)
item.setText(_translate("CiphersDialog", "r"))
item = self.IPAConsTableWidget.item(11, 0)
item.setText(_translate("CiphersDialog", "ʔ"))
item = self.IPAConsTableWidget.item(11, 1)
item.setText(_translate("CiphersDialog", "j"))
item = self.IPAConsTableWidget.item(11, 2)
item.setText(_translate("CiphersDialog", "w"))
self.IPAConsTableWidget.setSortingEnabled(__sortingEnabled)
self.groupBox_2.setTitle(_translate("CiphersDialog", "Vowels"))
__sortingEnabled = self.IPAVowsTableWidget.isSortingEnabled()
self.IPAVowsTableWidget.setSortingEnabled(False)
item = self.IPAVowsTableWidget.item(0, 0)
item.setText(_translate("CiphersDialog", "ɑ"))
item = self.IPAVowsTableWidget.item(0, 1)
item.setText(_translate("CiphersDialog", "a"))
item = self.IPAVowsTableWidget.item(1, 0)
item.setText(_translate("CiphersDialog", "ɪ"))
item = self.IPAVowsTableWidget.item(1, 1)
item.setText(_translate("CiphersDialog", "i"))
item = self.IPAVowsTableWidget.item(2, 0)
item.setText(_translate("CiphersDialog", "ʊ"))
item = self.IPAVowsTableWidget.item(2, 1)
item.setText(_translate("CiphersDialog", "u"))
item = self.IPAVowsTableWidget.item(3, 0)
item.setText(_translate("CiphersDialog", "ɛ"))
item = self.IPAVowsTableWidget.item(3, 1)
item.setText(_translate("CiphersDialog", "e"))
item = self.IPAVowsTableWidget.item(4, 0)
item.setText(_translate("CiphersDialog", "o"))
item = self.IPAVowsTableWidget.item(4, 1)
item.setText(_translate("CiphersDialog", "ɔ"))
item = self.IPAVowsTableWidget.item(5, 0)
item.setText(_translate("CiphersDialog", "ə"))
item = self.IPAVowsTableWidget.item(5, 1)
item.setText(_translate("CiphersDialog", "ʌ"))
item = self.IPAVowsTableWidget.item(6, 0)
item.setText(_translate("CiphersDialog", "æ"))
self.IPAVowsTableWidget.setSortingEnabled(__sortingEnabled)
self.groupBox_3.setTitle(_translate("CiphersDialog", "Raw Sheet"))
self.IPASheetTextEdit.setPlainText(_translate("CiphersDialog", "ipa_sheet = {\n"
" \'title\': \'IPA\',\n"
" \'consonants\':\n"
" [\n"
" \'p\',\'b\',\'\',\n"
" \'t\',\'d\',\'\',\n"
" \'k\',\'g\',\'\',\n"
" \'θ\',\'ð\',\'\',\n"
" \'f\',\'v\',\'\',\n"
" \'s\',\'z\',\'ʦ\',\n"
" \'ʃ\',\'ʒ\',\'\',\n"
" \'ʧ\',\'ʤ\',\'\',\n"
" \'h\',\'x\',\'ɲ\',\n"
" \'n\',\'m\',\'ŋ\',\n"
" \'l\',\'ɹ\',\'r\',\n"
" \'ʔ\',\'j\',\'w\'\n"
" ],\n"
" \'vowels\':\n"
" [\n"
" \'ɑ\',\'a\',\'\',\n"
" \'ɪ\',\'i\',\'\',\n"
" \'ʊ\',\'u\',\'\',\n"
" \'ɛ\',\'e\',\'\',\n"
" \'o\',\'ɔ\',\'\',\n"
" \'ə\',\'ʌ\',\'\',\n"
" \'æ\',\'\',\'\'\n"
" ]\n"
"}"))
self.IPADesc.setHtml(_translate("CiphersDialog", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">These are the selected IPA phonemes, from which the other 3 default sheets are derived. Every transliterate standardizes input to this sheet, and then converts from this to the desired sheet. Syllable separation also works with this sheet. The default of defaults.</p></body></html>"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.IPATab), _translate("CiphersDialog", "IPA"))
self.groupBox_4.setTitle(_translate("CiphersDialog", "Consonants"))
__sortingEnabled = self.KeyConsTableWidget.isSortingEnabled()
self.KeyConsTableWidget.setSortingEnabled(False)
item = self.KeyConsTableWidget.item(0, 0)
item.setText(_translate("CiphersDialog", "9"))
item = self.KeyConsTableWidget.item(0, 1)
item.setText(_translate("CiphersDialog", "b"))
item = self.KeyConsTableWidget.item(1, 0)
item.setText(_translate("CiphersDialog", "1"))
item = self.KeyConsTableWidget.item(1, 1)
item.setText(_translate("CiphersDialog", "d"))
item = self.KeyConsTableWidget.item(2, 0)
item.setText(_translate("CiphersDialog", "7"))
item = self.KeyConsTableWidget.item(2, 1)
item.setText(_translate("CiphersDialog", "g"))
item = self.KeyConsTableWidget.item(3, 0)
item.setText(_translate("CiphersDialog", "f"))
item = self.KeyConsTableWidget.item(3, 1)
item.setText(_translate("CiphersDialog", "t"))
item = self.KeyConsTableWidget.item(4, 0)
item.setText(_translate("CiphersDialog", "8"))
item = self.KeyConsTableWidget.item(4, 1)
item.setText(_translate("CiphersDialog", "v"))
item = self.KeyConsTableWidget.item(5, 0)
item.setText(_translate("CiphersDialog", "0"))
item = self.KeyConsTableWidget.item(5, 1)
item.setText(_translate("CiphersDialog", "z"))
item = self.KeyConsTableWidget.item(5, 2)
item.setText(_translate("CiphersDialog", "c"))
item = self.KeyConsTableWidget.item(6, 0)
item.setText(_translate("CiphersDialog", "q"))
item = self.KeyConsTableWidget.item(6, 1)
item.setText(_translate("CiphersDialog", "p"))
item = self.KeyConsTableWidget.item(7, 0)
item.setText(_translate("CiphersDialog", "6"))
item = self.KeyConsTableWidget.item(7, 1)
item.setText(_translate("CiphersDialog", "j"))
item = self.KeyConsTableWidget.item(8, 0)
item.setText(_translate("CiphersDialog", "h"))
item = self.KeyConsTableWidget.item(8, 1)
item.setText(_translate("CiphersDialog", "k"))
item = self.KeyConsTableWidget.item(8, 2)
item.setText(_translate("CiphersDialog", "m"))
item = self.KeyConsTableWidget.item(9, 0)
item.setText(_translate("CiphersDialog", "2"))
item = self.KeyConsTableWidget.item(9, 1)
item.setText(_translate("CiphersDialog", "3"))
item = self.KeyConsTableWidget.item(9, 2)
item.setText(_translate("CiphersDialog", "n"))
item = self.KeyConsTableWidget.item(10, 0)
item.setText(_translate("CiphersDialog", "5"))
item = self.KeyConsTableWidget.item(10, 1)
item.setText(_translate("CiphersDialog", "4"))
item = self.KeyConsTableWidget.item(10, 2)
item.setText(_translate("CiphersDialog", "r"))
item = self.KeyConsTableWidget.item(11, 0)
item.setText(_translate("CiphersDialog", "l"))
item = self.KeyConsTableWidget.item(11, 1)
item.setText(_translate("CiphersDialog", "y"))
item = self.KeyConsTableWidget.item(11, 2)
item.setText(_translate("CiphersDialog", "w"))
self.KeyConsTableWidget.setSortingEnabled(__sortingEnabled)
self.KeyDesc.setHtml(_translate("CiphersDialog", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Key is a one-to-one alphanumeric representation of the selected IPA sounds. A single symbol for every phoneme. Incidentally, the numbers 0-9 are mapped according to the mnemonic major system. The custom font Keyform is for this sheet.</p></body></html>"))
self.groupBox_5.setTitle(_translate("CiphersDialog", "Vowels"))
__sortingEnabled = self.KeyVowsTableWidget.isSortingEnabled()
self.KeyVowsTableWidget.setSortingEnabled(False)
item = self.KeyVowsTableWidget.item(0, 0)
item.setText(_translate("CiphersDialog", "a"))
item = self.KeyVowsTableWidget.item(0, 1)
item.setText(_translate("CiphersDialog", "&"))
item = self.KeyVowsTableWidget.item(1, 0)
item.setText(_translate("CiphersDialog", "i"))
item = self.KeyVowsTableWidget.item(1, 1)
item.setText(_translate("CiphersDialog", "#"))
item = self.KeyVowsTableWidget.item(2, 0)
item.setText(_translate("CiphersDialog", "u"))
item = self.KeyVowsTableWidget.item(2, 1)
item.setText(_translate("CiphersDialog", "$"))
item = self.KeyVowsTableWidget.item(3, 0)
item.setText(_translate("CiphersDialog", "e"))
item = self.KeyVowsTableWidget.item(3, 1)
item.setText(_translate("CiphersDialog", "%"))
item = self.KeyVowsTableWidget.item(4, 0)
item.setText(_translate("CiphersDialog", "o"))
item = self.KeyVowsTableWidget.item(4, 1)
item.setText(_translate("CiphersDialog", "@"))
item = self.KeyVowsTableWidget.item(5, 0)
item.setText(_translate("CiphersDialog", "x"))
item = self.KeyVowsTableWidget.item(5, 1)
item.setText(_translate("CiphersDialog", "="))
item = self.KeyVowsTableWidget.item(6, 0)
item.setText(_translate("CiphersDialog", "s"))
self.KeyVowsTableWidget.setSortingEnabled(__sortingEnabled)
self.groupBox_6.setTitle(_translate("CiphersDialog", "Raw Sheet"))
self.KeySheetTextEdit.setPlainText(_translate("CiphersDialog", "key_sheet = {\n"
" \'title\' : \'Key\',\n"
" \'consonants\':\n"
" [\n"
" \'9\',\'b\',\'\',\n"
" \'1\',\'d\',\'\',\n"
" \'7\',\'g\',\'\',\n"
" \'f\',\'t\',\'\',\n"
" \'8\',\'v\',\'\',\n"
" \'0\',\'z\',\'c\',\n"
" \'q\',\'p\',\'\',\n"
" \'6\',\'j\',\'\',\n"
" \'h\',\'k\',\'m\',\n"
" \'2\',\'3\',\'n\',\n"
" \'5\',\'4\',\'r\',\n"
" \'l\',\'y\',\'w\'\n"
" ],\n"
" \'vowels\':\n"
" [\n"
" \'a\',\'&\',\'\',\n"
" \'i\',\'#\',\'\',\n"
" \'u\',\'$\',\'\',\n"
" \'e\',\'%\',\'\',\n"
" \'o\',\'@\',\'\',\n"
" \'x\',\'=\',\'\',\n"
" \'s\',\'\',\'\'\n"
" ]\n"
"}"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.KeyTab), _translate("CiphersDialog", "Key"))
self.groupBox_7.setTitle(_translate("CiphersDialog", "Consonants"))
item = self.CipherConsTableWidget.verticalHeaderItem(0)
item.setText(_translate("CiphersDialog", "b"))
item = self.CipherConsTableWidget.verticalHeaderItem(1)
item.setText(_translate("CiphersDialog", "d"))
item = self.CipherConsTableWidget.verticalHeaderItem(2)
item.setText(_translate("CiphersDialog", "g"))
item = self.CipherConsTableWidget.verticalHeaderItem(3)
item.setText(_translate("CiphersDialog", "t"))
item = self.CipherConsTableWidget.verticalHeaderItem(4)
item.setText(_translate("CiphersDialog", "p"))
item = self.CipherConsTableWidget.verticalHeaderItem(5)
item.setText(_translate("CiphersDialog", "c"))
item = self.CipherConsTableWidget.verticalHeaderItem(6)
item.setText(_translate("CiphersDialog", "k"))
item = self.CipherConsTableWidget.verticalHeaderItem(7)
item.setText(_translate("CiphersDialog", "q"))
item = self.CipherConsTableWidget.verticalHeaderItem(8)
item.setText(_translate("CiphersDialog", "h"))
item = self.CipherConsTableWidget.verticalHeaderItem(9)
item.setText(_translate("CiphersDialog", "m"))
item = self.CipherConsTableWidget.verticalHeaderItem(10)
item.setText(_translate("CiphersDialog", "r"))
item = self.CipherConsTableWidget.verticalHeaderItem(11)
item.setText(_translate("CiphersDialog", "w"))
item = self.CipherConsTableWidget.horizontalHeaderItem(1)
item.setText(_translate("CiphersDialog", "f"))
item = self.CipherConsTableWidget.horizontalHeaderItem(2)
item.setText(_translate("CiphersDialog", "n"))
__sortingEnabled = self.CipherConsTableWidget.isSortingEnabled()
self.CipherConsTableWidget.setSortingEnabled(False)
item = self.CipherConsTableWidget.item(0, 0)
item.setText(_translate("CiphersDialog", "b"))
item = self.CipherConsTableWidget.item(0, 1)
item.setText(_translate("CiphersDialog", "bf"))
item = self.CipherConsTableWidget.item(1, 0)
item.setText(_translate("CiphersDialog", "d"))
item = self.CipherConsTableWidget.item(1, 1)
item.setText(_translate("CiphersDialog", "df"))
item = self.CipherConsTableWidget.item(2, 0)
item.setText(_translate("CiphersDialog", "g"))
item = self.CipherConsTableWidget.item(2, 1)
item.setText(_translate("CiphersDialog", "gf"))
item = self.CipherConsTableWidget.item(3, 0)
item.setText(_translate("CiphersDialog", "t"))
item = self.CipherConsTableWidget.item(3, 1)
item.setText(_translate("CiphersDialog", "tf"))
item = self.CipherConsTableWidget.item(4, 0)
item.setText(_translate("CiphersDialog", "p"))
item = self.CipherConsTableWidget.item(4, 1)
item.setText(_translate("CiphersDialog", "pf"))
item = self.CipherConsTableWidget.item(5, 0)
item.setText(_translate("CiphersDialog", "c"))
item = self.CipherConsTableWidget.item(5, 1)
item.setText(_translate("CiphersDialog", "cf"))
item = self.CipherConsTableWidget.item(5, 2)
item.setText(_translate("CiphersDialog", "cn"))
item = self.CipherConsTableWidget.item(6, 0)
item.setText(_translate("CiphersDialog", "k"))
item = self.CipherConsTableWidget.item(6, 1)
item.setText(_translate("CiphersDialog", "kf"))
item = self.CipherConsTableWidget.item(7, 0)
item.setText(_translate("CiphersDialog", "q"))
item = self.CipherConsTableWidget.item(7, 1)
item.setText(_translate("CiphersDialog", "qf"))
item = self.CipherConsTableWidget.item(8, 0)
item.setText(_translate("CiphersDialog", "h"))
item = self.CipherConsTableWidget.item(8, 1)
item.setText(_translate("CiphersDialog", "hf"))
item = self.CipherConsTableWidget.item(8, 2)
item.setText(_translate("CiphersDialog", "hn"))
item = self.CipherConsTableWidget.item(9, 0)
item.setText(_translate("CiphersDialog", "m"))
item = self.CipherConsTableWidget.item(9, 1)
item.setText(_translate("CiphersDialog", "mf"))
item = self.CipherConsTableWidget.item(9, 2)
item.setText(_translate("CiphersDialog", "mn"))
item = self.CipherConsTableWidget.item(10, 0)
item.setText(_translate("CiphersDialog", "r"))
item = self.CipherConsTableWidget.item(10, 1)
item.setText(_translate("CiphersDialog", "rf"))
item = self.CipherConsTableWidget.item(10, 2)
item.setText(_translate("CiphersDialog", "rn"))
item = self.CipherConsTableWidget.item(11, 0)
item.setText(_translate("CiphersDialog", "w"))
item = self.CipherConsTableWidget.item(11, 1)
item.setText(_translate("CiphersDialog", "wf"))
item = self.CipherConsTableWidget.item(11, 2)
item.setText(_translate("CiphersDialog", "wn"))
self.CipherConsTableWidget.setSortingEnabled(__sortingEnabled)
self.groupBox_9.setTitle(_translate("CiphersDialog", "Raw Sheet"))
self.CipherSheetTextEdit.setPlainText(_translate("CiphersDialog", "cipher_sheet = {\n"
" \'title\' : \'Cipher\',\n"
" \'consonants\':\n"
" [\n"
" \'b\',\'bf\',\'\',\n"
" \'d\',\'df\',\'\',\n"
" \'g\',\'gf\',\'\',\n"
" \'t\',\'tf\',\'\',\n"
" \'p\',\'pf\',\'\',\n"
" \'c\',\'cf\',\'cn\',\n"
" \'k\',\'kf\',\'\',\n"
" \'q\',\'qf\',\'\',\n"
" \'h\',\'hf\',\'hn\',\n"
" \'m\',\'mf\',\'mn\',\n"
" \'r\',\'rf\',\'rn\',\n"
" \'w\',\'wf\',\'wn\'\n"
" ],\n"
" \'vowels\' :\n"
" [\n"
" \'l\',\'lf\',\'\',\n"
" \'j\',\'jf\',\'\',\n"
" \'y\',\'yf\',\'\',\n"
" \'z\',\'zf\',\'\',\n"
" \'v\',\'vf\',\'\',\n"
" \'x\',\'xf\',\'\',\n"
" \'s\',\'\',\'\'\n"
" ]\n"
"}"))
self.groupBox_8.setTitle(_translate("CiphersDialog", "Vowels"))
item = self.CipherVowsTableWidget.verticalHeaderItem(0)
item.setText(_translate("CiphersDialog", "l"))
item = self.CipherVowsTableWidget.verticalHeaderItem(1)
item.setText(_translate("CiphersDialog", "j"))
item = self.CipherVowsTableWidget.verticalHeaderItem(2)
item.setText(_translate("CiphersDialog", "y"))
item = self.CipherVowsTableWidget.verticalHeaderItem(3)
item.setText(_translate("CiphersDialog", "z"))
item = self.CipherVowsTableWidget.verticalHeaderItem(4)
item.setText(_translate("CiphersDialog", "v"))
item = self.CipherVowsTableWidget.verticalHeaderItem(5)
item.setText(_translate("CiphersDialog", "x"))
item = self.CipherVowsTableWidget.verticalHeaderItem(6)
item.setText(_translate("CiphersDialog", "s"))
item = self.CipherVowsTableWidget.horizontalHeaderItem(1)
item.setText(_translate("CiphersDialog", "f"))
item = self.CipherVowsTableWidget.horizontalHeaderItem(2)
item.setText(_translate("CiphersDialog", "n"))
__sortingEnabled = self.CipherVowsTableWidget.isSortingEnabled()
self.CipherVowsTableWidget.setSortingEnabled(False)
item = self.CipherVowsTableWidget.item(0, 0)
item.setText(_translate("CiphersDialog", "l"))
item = self.CipherVowsTableWidget.item(0, 1)
item.setText(_translate("CiphersDialog", "lf"))
item = self.CipherVowsTableWidget.item(1, 0)
item.setText(_translate("CiphersDialog", "j"))
item = self.CipherVowsTableWidget.item(1, 1)
item.setText(_translate("CiphersDialog", "jf"))
item = self.CipherVowsTableWidget.item(2, 0)
item.setText(_translate("CiphersDialog", "y"))
item = self.CipherVowsTableWidget.item(2, 1)
item.setText(_translate("CiphersDialog", "yf"))
item = self.CipherVowsTableWidget.item(3, 0)
item.setText(_translate("CiphersDialog", "z"))
item = self.CipherVowsTableWidget.item(3, 1)
item.setText(_translate("CiphersDialog", "zf"))
item = self.CipherVowsTableWidget.item(4, 0)
item.setText(_translate("CiphersDialog", "v"))
item = self.CipherVowsTableWidget.item(4, 1)
item.setText(_translate("CiphersDialog", "vf"))
item = self.CipherVowsTableWidget.item(5, 0)
item.setText(_translate("CiphersDialog", "x"))
item = self.CipherVowsTableWidget.item(5, 1)
item.setText(_translate("CiphersDialog", "xf"))
item = self.CipherVowsTableWidget.item(6, 0)
item.setText(_translate("CiphersDialog", "s"))
item = self.CipherVowsTableWidget.item(6, 1)
item.setText(_translate("CiphersDialog", "sf"))
self.CipherVowsTableWidget.setSortingEnabled(__sortingEnabled)
self.CipherDesc.setHtml(_translate("CiphersDialog", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Cipher is the distinct system of base symbols and modifiers. Capitalization can work, but it doesn\'t, for compatibility with the other sheets. This uses all the consonant symbols in the Latin alphabet, and no vowels, to further obscure the phonetics. This is the only \'cipher\' in the default sheets.</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.CipherTab), _translate("CiphersDialog", "Cipher"))
self.BaseDesc.setHtml(_translate("CiphersDialog", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Base is a one-to-one representation of IPA, like Key, but prioritizes readability rather than conciseness. This only uses alphabetical characters and brackets. Very easy to pronounce.</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.groupBox_11.setTitle(_translate("CiphersDialog", "Vowels"))
__sortingEnabled = self.BaseVowsTableWidget.isSortingEnabled()
self.BaseVowsTableWidget.setSortingEnabled(False)
item = self.BaseVowsTableWidget.item(0, 0)
item.setText(_translate("CiphersDialog", "a"))
item = self.BaseVowsTableWidget.item(0, 1)
item.setText(_translate("CiphersDialog", "(aa)"))
item = self.BaseVowsTableWidget.item(1, 0)
item.setText(_translate("CiphersDialog", "i"))
item = self.BaseVowsTableWidget.item(1, 1)
item.setText(_translate("CiphersDialog", "(ii)"))
item = self.BaseVowsTableWidget.item(2, 0)
item.setText(_translate("CiphersDialog", "u"))
item = self.BaseVowsTableWidget.item(2, 1)
item.setText(_translate("CiphersDialog", "(oo)"))
item = self.BaseVowsTableWidget.item(3, 0)
item.setText(_translate("CiphersDialog", "e"))
item = self.BaseVowsTableWidget.item(3, 1)
item.setText(_translate("CiphersDialog", "(ee)"))
item = self.BaseVowsTableWidget.item(4, 0)
item.setText(_translate("CiphersDialog", "o"))
item = self.BaseVowsTableWidget.item(4, 1)
item.setText(_translate("CiphersDialog", "(aw)"))
item = self.BaseVowsTableWidget.item(5, 0)
item.setText(_translate("CiphersDialog", "(uh)"))
item = self.BaseVowsTableWidget.item(5, 1)
item.setText(_translate("CiphersDialog", "(ah)"))
item = self.BaseVowsTableWidget.item(6, 0)
item.setText(_translate("CiphersDialog", "(ea)"))
self.BaseVowsTableWidget.setSortingEnabled(__sortingEnabled)
self.groupBox_10.setTitle(_translate("CiphersDialog", "Consonants"))
__sortingEnabled = self.BaseConsTableWidget.isSortingEnabled()
self.BaseConsTableWidget.setSortingEnabled(False)
item = self.BaseConsTableWidget.item(0, 0)
item.setText(_translate("CiphersDialog", "p"))
item = self.BaseConsTableWidget.item(0, 1)
item.setText(_translate("CiphersDialog", "b"))
item = self.BaseConsTableWidget.item(1, 0)
item.setText(_translate("CiphersDialog", "t"))
item = self.BaseConsTableWidget.item(1, 1)
item.setText(_translate("CiphersDialog", "d"))
item = self.BaseConsTableWidget.item(2, 0)
item.setText(_translate("CiphersDialog", "k"))
item = self.BaseConsTableWidget.item(2, 1)
item.setText(_translate("CiphersDialog", "g"))
item = self.BaseConsTableWidget.item(3, 0)
item.setText(_translate("CiphersDialog", "(th)"))
item = self.BaseConsTableWidget.item(3, 1)
item.setText(_translate("CiphersDialog", "(dth)"))
item = self.BaseConsTableWidget.item(4, 0)
item.setText(_translate("CiphersDialog", "f"))
item = self.BaseConsTableWidget.item(4, 1)
item.setText(_translate("CiphersDialog", "v"))
item = self.BaseConsTableWidget.item(5, 0)
item.setText(_translate("CiphersDialog", "s"))
item = self.BaseConsTableWidget.item(5, 1)
item.setText(_translate("CiphersDialog", "z"))
item = self.BaseConsTableWidget.item(5, 2)
item.setText(_translate("CiphersDialog", "(ts)"))
item = self.BaseConsTableWidget.item(6, 0)
item.setText(_translate("CiphersDialog", "(sh)"))
item = self.BaseConsTableWidget.item(6, 1)
item.setText(_translate("CiphersDialog", "(jh)"))
item = self.BaseConsTableWidget.item(7, 0)
item.setText(_translate("CiphersDialog", "(ch)"))
item = self.BaseConsTableWidget.item(7, 1)
item.setText(_translate("CiphersDialog", "j"))
item = self.BaseConsTableWidget.item(8, 0)
item.setText(_translate("CiphersDialog", "h"))
item = self.BaseConsTableWidget.item(8, 1)
item.setText(_translate("CiphersDialog", "(kh)"))
item = self.BaseConsTableWidget.item(8, 2)
item.setText(_translate("CiphersDialog", "(ny)"))
item = self.BaseConsTableWidget.item(9, 0)
item.setText(_translate("CiphersDialog", "n"))
item = self.BaseConsTableWidget.item(9, 1)
item.setText(_translate("CiphersDialog", "m"))
item = self.BaseConsTableWidget.item(9, 2)
item.setText(_translate("CiphersDialog", "(ng)"))
item = self.BaseConsTableWidget.item(10, 0)
item.setText(_translate("CiphersDialog", "l"))
item = self.BaseConsTableWidget.item(10, 1)
item.setText(_translate("CiphersDialog", "r"))
item = self.BaseConsTableWidget.item(10, 2)
item.setText(_translate("CiphersDialog", "(rr)"))
item = self.BaseConsTableWidget.item(11, 0)
item.setText(_translate("CiphersDialog", "(-)"))
item = self.BaseConsTableWidget.item(11, 1)
item.setText(_translate("CiphersDialog", "y"))
item = self.BaseConsTableWidget.item(11, 2)
item.setText(_translate("CiphersDialog", "w"))
self.BaseConsTableWidget.setSortingEnabled(__sortingEnabled)
self.groupBox_12.setTitle(_translate("CiphersDialog", "Raw Sheet"))
self.BaseSheetTextEdit.setPlainText(_translate("CiphersDialog", "base_sheet = {\n"
" \'title\' : \'Base\',\n"
" \'consonants\':\n"
" [\n"
" \'p\',\'b\',\'\',\n"
" \'t\',\'d\',\'\',\n"
" \'k\',\'g\',\'\',\n"
" \'(th)\',\'(dth)\',\'\',\n"
" \'f\',\'v\',\'\',\n"
" \'s\',\'z\',\'(ts)\',\n"
" \'(sh)\',\'(jh)\',\'\',\n"
" \'(ch)\',\'j\',\'\',\n"
" \'h\',\'(kh)\',\'(ny)\',\n"
" \'n\',\'m\',\'(ng)\',\n"
" \'l\',\'r\',\'(rr)\',\n"
" \'(-)\',\'y\',\'w\'\n"
" ],\n"
" \'vowels\':\n"
" [\n"
" \'a\',\'(aa)\',\'\',\n"
" \'i\',\'(ii)\',\'\',\n"
" \'u\',\'(oo)\',\'\',\n"
" \'e\',\'(ee)\',\'\',\n"
" \'o\',\'(aw)\',\'\',\n"
" \'(uh)\',\'(ah)\',\'\',\n"
" \'(ea)\',\'\',\'\'\n"
" ]\n"
"}"))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.BaseTab), _translate("CiphersDialog", "Base"))
self.label.setText(_translate("CiphersDialog", "This is ugly hard-coded documentation"))
```
#### File: cifero/ciferogui/main.py
```python
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_CiferoMain(object):
def setupUi(self, CiferoMain):
CiferoMain.setObjectName("CiferoMain")
CiferoMain.resize(429, 293)
self.centralWidget = QtWidgets.QWidget(CiferoMain)
self.centralWidget.setObjectName("centralWidget")
self.gridLayout_2 = QtWidgets.QGridLayout(self.centralWidget)
self.gridLayout_2.setContentsMargins(11, 11, 11, 11)
self.gridLayout_2.setSpacing(6)
self.gridLayout_2.setObjectName("gridLayout_2")
self.OutputModeCombo = QtWidgets.QComboBox(self.centralWidget)
self.OutputModeCombo.setObjectName("OutputModeCombo")
self.gridLayout_2.addWidget(self.OutputModeCombo, 0, 3, 1, 1)
self.InputModeCombo = QtWidgets.QComboBox(self.centralWidget)
self.InputModeCombo.setObjectName("InputModeCombo")
self.gridLayout_2.addWidget(self.InputModeCombo, 0, 0, 1, 1)
self.CiphersButton = QtWidgets.QPushButton(self.centralWidget)
self.CiphersButton.setObjectName("CiphersButton")
self.gridLayout_2.addWidget(self.CiphersButton, 7, 3, 1, 1)
self.QuitButton = QtWidgets.QPushButton(self.centralWidget)
self.QuitButton.setObjectName("QuitButton")
self.gridLayout_2.addWidget(self.QuitButton, 7, 2, 1, 1)
self.CentraButtonArea = QtWidgets.QVBoxLayout()
self.CentraButtonArea.setSpacing(6)
self.CentraButtonArea.setObjectName("CentraButtonArea")
self.SwapButton = QtWidgets.QPushButton(self.centralWidget)
self.SwapButton.setObjectName("SwapButton")
self.CentraButtonArea.addWidget(self.SwapButton)
self.RawTransliterateButton = QtWidgets.QPushButton(self.centralWidget)
self.RawTransliterateButton.setObjectName("RawTransliterateButton")
self.CentraButtonArea.addWidget(self.RawTransliterateButton)
self.StripButton = QtWidgets.QPushButton(self.centralWidget)
self.StripButton.setObjectName("StripButton")
self.CentraButtonArea.addWidget(self.StripButton)
self.PasteButton = QtWidgets.QPushButton(self.centralWidget)
self.PasteButton.setObjectName("PasteButton")
self.CentraButtonArea.addWidget(self.PasteButton)
self.CopyButton = QtWidgets.QPushButton(self.centralWidget)
self.CopyButton.setObjectName("CopyButton")
self.CentraButtonArea.addWidget(self.CopyButton)
self.ClearButton = QtWidgets.QPushButton(self.centralWidget)
self.ClearButton.setObjectName("ClearButton")
self.CentraButtonArea.addWidget(self.ClearButton)
spacerItem = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.CentraButtonArea.addItem(spacerItem)
self.gridLayout_2.addLayout(self.CentraButtonArea, 2, 2, 1, 1)
self.SettingsButton = QtWidgets.QPushButton(self.centralWidget)
self.SettingsButton.setObjectName("SettingsButton")
self.gridLayout_2.addWidget(self.SettingsButton, 7, 0, 1, 1)
self.TransliterateButton = QtWidgets.QPushButton(self.centralWidget)
self.TransliterateButton.setObjectName("TransliterateButton")
self.gridLayout_2.addWidget(self.TransliterateButton, 0, 2, 1, 1)
self.OutputTextEdit = QtWidgets.QTextEdit(self.centralWidget)
self.OutputTextEdit.setObjectName("OutputTextEdit")
self.gridLayout_2.addWidget(self.OutputTextEdit, 2, 3, 1, 1)
self.InputTextEdit = QtWidgets.QPlainTextEdit(self.centralWidget)
self.InputTextEdit.setObjectName("InputTextEdit")
self.gridLayout_2.addWidget(self.InputTextEdit, 2, 0, 1, 1)
CiferoMain.setCentralWidget(self.centralWidget)
self.statusBar = QtWidgets.QStatusBar(CiferoMain)
self.statusBar.setObjectName("statusBar")
CiferoMain.setStatusBar(self.statusBar)
self.menuBar = QtWidgets.QMenuBar(CiferoMain)
self.menuBar.setGeometry(QtCore.QRect(0, 0, 429, 21))
self.menuBar.setObjectName("menuBar")
CiferoMain.setMenuBar(self.menuBar)
self.retranslateUi(CiferoMain)
QtCore.QMetaObject.connectSlotsByName(CiferoMain)
def retranslateUi(self, CiferoMain):
_translate = QtCore.QCoreApplication.translate
CiferoMain.setWindowTitle(_translate("CiferoMain", "Cifero 1.0"))
self.CiphersButton.setText(_translate("CiferoMain", "Ciphers"))
self.QuitButton.setText(_translate("CiferoMain", "Quit"))
self.SwapButton.setText(_translate("CiferoMain", "Swap"))
self.RawTransliterateButton.setText(_translate("CiferoMain", "Raw >>"))
self.StripButton.setText(_translate("CiferoMain", "Strip"))
self.PasteButton.setText(_translate("CiferoMain", "Paste Input"))
self.CopyButton.setText(_translate("CiferoMain", "Copy Output"))
self.ClearButton.setText(_translate("CiferoMain", "Clear"))
self.SettingsButton.setText(_translate("CiferoMain", "Settings"))
self.TransliterateButton.setText(_translate("CiferoMain", ">>"))
```
#### File: cifero/cifero/ciferomain.py
```python
import sys
from PyQt5.QtWidgets import QMainWindow,QDialog,QApplication,QAction,QShortcut
from PyQt5.QtGui import QKeySequence
import cifero.ciferogui.main
import cifero.ciferogui.settings
import cifero.ciferogui.about
import cifero.ciferogui.ciphers
from pathlib import Path
import pyperclip # TextEdit's .copy() doesn't work for some reason
import cifero.translit as tl
class Application(object):
'''Parent class to store Application data'''
def __init__(self):
self._default_syll_sep = "'"
self._default_sent_sep = " "
self.syll_sep = "'"
self.word_sep = " "
pass
class MainWindow(QMainWindow, Application):
base_app = Application()
def __init__(self):
# defaults
self.modes = sorted(list(tl.sheets.sheetsdict.keys())+['English'])
super().__init__()
self.ui = cifero.ciferogui.main.Ui_CiferoMain()
self.ui.setupUi(self)
self.show()
self.ui.InputModeCombo.addItems(self.modes)
self.ui.OutputModeCombo.addItems(self.modes)
# set initial combobox
i = self.ui.InputModeCombo.findText('Key')
self.ui.InputModeCombo.setCurrentIndex(i)
i = self.ui.OutputModeCombo.findText('Cipher')
self.ui.OutputModeCombo.setCurrentIndex(i)
self.ui.OutputTextEdit.setReadOnly(True)
# this works
self.actionTest = QAction()
self.actionTest.setText('About')
self.actionTest.setToolTip('Shift+Ctrl+A')
self.actionTest.triggered.connect(self.open_about)
self.ui.menuBar.addAction(self.actionTest)
# some more shortcuts
QShortcut(QKeySequence('Shift+Ctrl+A'), self, self.open_about)
QShortcut(QKeySequence('Ctrl+Q'), self, self.quit)
QShortcut(QKeySequence('Ctrl+W'), self, self.swap)
QShortcut(QKeySequence('Ctrl+E'), self, self.clear)
QShortcut(QKeySequence('Ctrl+R'), self, self.raw_transliterate)
QShortcut(QKeySequence('Ctrl+T'), self, self.transliterate)
QShortcut(QKeySequence('Ctrl+D'), self, self.open_settings)
QShortcut(QKeySequence('Ctrl+F'), self, self.open_ciphers)
QShortcut(QKeySequence('Shift+Ctrl+S'), self, self.strip)
QShortcut(QKeySequence('Shift+Ctrl+V'), self, self.paste)
QShortcut(QKeySequence('Shift+Ctrl+C'), self, self.copy)
QShortcut(QKeySequence('Ctrl+1'),self, self.cycle_input_mode)
QShortcut(QKeySequence('Ctrl+2'),self, self.cycle_output_mode)
QShortcut(QKeySequence('Ctrl+Space'),self,self.ui.InputTextEdit.setFocus)
# hover tooltips
self.ui.InputModeCombo.setToolTip(
'Input mode (Ctrl+1)')
self.ui.OutputModeCombo.setToolTip(
'Output mode (Ctrl+2)')
self.ui.InputTextEdit.setToolTip(
'Input text (Ctrl+Space)')
self.ui.TransliterateButton.setToolTip(
'Transliterates, applying syllable and word separators (Ctrl+T)')
self.ui.SwapButton.setToolTip(
'Swaps input and output text, as well as their modes (Ctrl+W)')
self.ui.RawTransliterateButton.setToolTip(
'Transliterates, using spaces to separate words and ignoring syllable separators (Ctrl+R)')
self.ui.StripButton.setToolTip(
'''Strips punctuation, removing :;,.!?[](){}"'<> and *+-/~_, ignoring &#$%@=` (Shift+Ctrl+S)''')
self.ui.PasteButton.setToolTip(
'Pastes the text from the system clipboard to the input (Shift+Ctrl+V)')
self.ui.CopyButton.setToolTip(
'Copies the text in the output to the system clipboard (Shift+Ctrl+C)')
self.ui.ClearButton.setToolTip(
'Clears both input and output textboxes (Ctrl+E)')
self.ui.SettingsButton.setToolTip(
'Opens the settings dialog (Ctrl+D)')
self.ui.CiphersButton.setToolTip(
'Opens the documentation for the default cipher sheets (Ctrl+F)')
self.ui.QuitButton.setToolTip(
'Closes the program (Ctrl+Q)')
# connect buttons to functions
self.ui.TransliterateButton.clicked.connect(self.transliterate)
self.ui.SwapButton.clicked.connect(self.swap)
self.ui.RawTransliterateButton.clicked.connect(self.raw_transliterate)
self.ui.StripButton.clicked.connect(self.strip)
self.ui.PasteButton.clicked.connect(self.paste)
self.ui.CopyButton.clicked.connect(self.copy)
self.ui.ClearButton.clicked.connect(self.clear)
self.ui.SettingsButton.clicked.connect(self.open_settings)
self.ui.CiphersButton.clicked.connect(self.open_ciphers)
self.ui.QuitButton.clicked.connect(self.quit)
def cycle_input_mode(self):
i = self.ui.InputModeCombo.currentIndex()
self.ui.InputModeCombo.setCurrentIndex(i+1)
def cycle_output_mode(self):
i = self.ui.OutputModeCombo.currentIndex()
self.ui.OutputModeCombo.setCurrentIndex(i+1)
def transliterate(self):
input_text = self.ui.InputTextEdit.toPlainText()
input_mode = self.ui.InputModeCombo.currentText()
output_mode = self.ui.OutputModeCombo.currentText()
try:
output_text = tl.transliterate(input_text,
input_mode,
output_mode,
syll_sep=self.base_app.syll_sep,
word_sep=self.base_app.word_sep)
except TypeError:
output_text = 'Something went wrong.'
self.ui.statusBar.showMessage('ERROR: Bad input.')
except tl.cmudict.WordNotFound:
output_text = 'English word not found in dictionary.'
self.ui.statusBar.showMessage('ERROR: Word not found in dictionary.')
else:
# this is run only if the excepts didn't catch anything
self.ui.statusBar.showMessage('Transliterated from {} to {}.'.format(
input_mode, output_mode))
# should finally: be used here?
self.ui.OutputTextEdit.setText(output_text)
def swap(self):
'''Swaps contents of input and output TextEdit's as well as their modes'''
output_text = self.ui.OutputTextEdit.toPlainText()
input_text = self.ui.InputTextEdit.toPlainText()
self.ui.OutputTextEdit.clear()
self.ui.OutputTextEdit.setPlainText(input_text)
self.ui.InputTextEdit.clear()
self.ui.InputTextEdit.setPlainText(output_text)
output_mode = self.ui.OutputModeCombo.currentIndex()
input_mode = self.ui.InputModeCombo.currentIndex()
self.ui.OutputModeCombo.setCurrentIndex(input_mode)
self.ui.InputModeCombo.setCurrentIndex(output_mode)
def raw_transliterate(self):
'''calls transliterate, then remove syllable separators, BUT NOT
SENTENCE SEPARATORS, they are changed to spaces'''
input_text = self.ui.InputTextEdit.toPlainText()
input_mode = self.ui.InputModeCombo.currentText()
output_mode = self.ui.OutputModeCombo.currentText()
try:
output_text = tl.raw_transliterate(input_text,
input_mode,
output_mode,
syll_sep=self.base_app.syll_sep,
word_sep=self.base_app.word_sep)
except TypeError:
output_text = 'Something went wrong.'
self.ui.statusBar.showMessage('ERROR: Bad input.')
except tl.cmudict.WordNotFound:
output_text = 'English word not found in dictionary.'
self.ui.statusBar.showMessage('ERROR: Word not found in dictionary.')
else:
# this is run only if the excepts didn't catch anything
self.ui.statusBar.showMessage('Raw Transliterated from {} to {}.'.format(
input_mode, output_mode))
# should finally: be used here?
self.ui.OutputTextEdit.setPlainText(output_text)
self.ui.statusBar.showMessage('Raw transliterated from {} to {}.'.format(
input_mode, output_mode))
def strip(self):
'''Simply removes marks and symbols from input and puts them in output'''
input_text = self.ui.InputTextEdit.toPlainText()
self.ui.OutputTextEdit.setPlainText(tl.strip_marks_and_symbols(input_text))
self.ui.statusBar.showMessage('''Stripped :;,.!?[](){}"'<> from input.''')
def paste(self):
'''Pastes contents of system clipboard to input TextEdit'''
self.ui.InputTextEdit.setPlainText(pyperclip.paste())
self.ui.statusBar.showMessage('Pasted from clipboard to input.')
def copy(self):
'''Copy contents of output TextEdit to system clipboard'''
output_text = self.ui.OutputTextEdit.toPlainText()
pyperclip.copy(output_text)
self.ui.statusBar.showMessage('Copied from input to clipboard.')
def clear(self):
self.ui.OutputTextEdit.clear()
self.ui.InputTextEdit.clear()
self.ui.statusBar.showMessage('Cleared both input and output.')
def open_about(self):
self.AboutUi = AboutDialog()
def open_ciphers(self):
# Ciphers Dialog is read-only for now, no need to inherit data
self.CiphersUi = CiphersDialog()
def open_settings(self):
self.SettingsUi = SettingsDialog(self.base_app)
def quit(self):
sys.exit()
class SettingsDialog(QDialog):
def __init__(self, base_app):
super().__init__()
self.ui = cifero.ciferogui.settings.Ui_SettingsDialog()
self.ui.setupUi(self)
self.show()
self.base_app = base_app
# initial values remembered in case changes are cancelled
self.init_sentSep = base_app.word_sep
self.init_syllSep = base_app.syll_sep
self.ui.SentSepLineEdit.setText(base_app.word_sep)
self.ui.SyllSepLineEdit.setText(base_app.syll_sep)
self.ui.ResetButton.clicked.connect(self.reset_to_defaults)
self.ui.buttonBox.accepted.connect(self.update_settings)
self.ui.buttonBox.rejected.connect(self.close_without_update)
def reset_to_defaults(self):
# update base_app data
self.base_app.syll_sep = self.base_app._default_syll_sep
self.base_app.word_sep = self.base_app._default_sent_sep
# update view
self.ui.SentSepLineEdit.setText(self.base_app.word_sep)
self.ui.SyllSepLineEdit.setText(self.base_app.syll_sep)
def update_settings(self):
self.base_app.syll_sep = self.ui.SyllSepLineEdit.text()
self.base_app.word_sep = self.ui.SentSepLineEdit.text()
def close_without_update(self):
self.base_app.syll_sep = self.init_syllSep
self.base_app.word_sep = self.init_sentSep
self.done(0)
class CiphersDialog(QDialog):
def __init__(self):
super().__init__()
self.ui = cifero.ciferogui.ciphers.Ui_CiphersDialog()
self.ui.setupUi(self)
self.show()
class AboutDialog(QDialog):
def __init__(self):
super().__init__()
self.ui = cifero.ciferogui.about.Ui_AboutDialog()
self.ui.setupUi(self)
self.show()
def main():
app = QApplication(sys.argv)
w = MainWindow()
w.show()
sys.exit(app.exec_())
if __name__ == "__main__":
main()
```
#### File: cifero/cifero/sheets.py
```python
ipa_sheet = {
'title': 'IPA',
'consonants':
[
'p','b','',
't','d','',
'k','g','',
'θ','ð','',
'f','v','',
's','z','ʦ',
'ʃ','ʒ','',
'ʧ','ʤ','',
'h','x','ɲ',
'n','m','ŋ',
'l','ɹ','r',
'ʔ','j','w'
],
'vowels':
[
'ɑ','a','',
'ɪ','i','',
'ʊ','u','',
'ɛ','e','',
'o','ɔ','',
'ə','ʌ','',
'æ','',''
]
}
key_sheet = {
'title' : 'Key',
'consonants':
[
'9','b','',
'1','d','',
'7','g','',
'f','t','',
'8','v','',
'0','z','c',
'q','p','',
'6','j','',
'h','k','m',
'2','3','n',
'5','4','r',
'l','y','w'
],
'vowels':
[
'a','&','',
'i','#','',
'u','$','',
'e','%','',
'o','@','',
'x','=','',
's','',''
]
}
base_sheet = {
'title' : 'Base',
'consonants':
[
'p','b','',
't','d','',
'k','g','',
'(th)','(dth)','',
'f','v','',
's','z','(ts)',
'(sh)','(jh)','',
'(ch)','j','',
'h','(kh)','(ny)',
'n','m','(ng)',
'l','r','(rr)',
'(-)','y','w'
],
'vowels':
[
'a','(aa)','',
'i','(ii)','',
'u','(oo)','',
'e','(ee)','',
'o','(aw)','',
'(uh)','(ah)','',
'(ea)','',''
]
}
cipher_sheet = {
'title' : 'Cipher',
'consonants':
[
'b','bf','',
'd','df','',
'g','gf','',
't','tf','',
'p','pf','',
'c','cf','cn',
'k','kf','',
'q','qf','',
'h','hf','hn',
'm','mf','mn',
'r','rf','rn',
'w','wf','wn'
],
'vowels' :
[
'l','lf','',
'j','jf','',
'y','yf','',
'z','zf','',
'v','vf','',
'x','xf','',
's','',''
]
}
# note that 'marks' and 'symbols' are somewhat arbitrarily classified by their
# relative position in a word. This is strict and inputs often don't conform
# To be safe, just strip punctuation before transliterating.
# list of punctuation marks. These must be attached to the end of a word
# \ undefined
marks = (
':', ';', ',', '.', '!', '?',
'[', ']', '(', ')', '{', '}',
'"',"'",'<','>'
)
# list of symbols. These must stand alone in a sentence
# _ ^ | undefined
# Edit Oct '18: &#$%@= are now used for default key (capitalization problems)
symbols = (
'*','+','-','/','~'
)
def fl(list1):
'''remove empty strings from list'''
list1 = list(filter(None, list1))
return list1
# cons in IPA, 'class t'
reg1_c = fl(ipa_sheet['consonants'][:15] + ipa_sheet['consonants'][21:24])
# cons in IPA, 'class s'
reg2_c = fl(ipa_sheet['consonants'][15:17] + ipa_sheet['consonants'][18:21])
# cons in IPA, 'class h'
irreg1_c = fl(ipa_sheet['consonants'][24:26])
# cons in IPA, 'class n'
irreg2_c = fl(ipa_sheet['consonants'][26:30])
# cons in IPA, 'class r'
irreg3_c = fl(ipa_sheet['consonants'][30:33])
# pseudo-vowels in IPA
pseudo_v = fl(ipa_sheet['consonants'][34:])
other_c = fl([ipa_sheet['consonants'][17]] + [ipa_sheet['consonants'][33]])
def init_vcc():
'''cons clusters by their "classes"'''
vcc_st = [s + t for s in reg2_c for t in reg1_c]
vcc_sn = [s + n for s in reg2_c for n in irreg2_c]
vcc_sr = [s + r for s in reg2_c for r in irreg3_c]
vcc_tr = [t + r for t in reg1_c for r in irreg3_c]
return vcc_st + vcc_sn + vcc_sr + vcc_tr
# valid consonant clusters in IPA
vcc = init_vcc()
################################################################################
sheetsdict = {ipa_sheet['title'] : ipa_sheet,
key_sheet['title']: key_sheet,
base_sheet['title'] : base_sheet,
cipher_sheet['title'] : cipher_sheet}
################################################################################
``` |
{
"source": "jonjitsu/aws-buildspec",
"score": 2
} |
#### File: src/aws_buildspec/cmd.py
```python
import pkg_resources
from . import BUILDSPEC_YML
from . import decide_phases
from . import load_file
from . import validate_phases
from .compat import to_str
from .executors import DockerExecutor
from .executors import SystemExecutor
def init(type='full', filename=BUILDSPEC_YML):
""""""
# data = pkgutil.get_data('buildspec', 'data/template.yml')
template_name = 'templates/%s.yml' % type
data = pkg_resources.resource_string('aws_buildspec', template_name)
with open(filename, 'w') as fp:
fp.write(to_str(data))
def run(phases, filename=BUILDSPEC_YML, shell=None, docker_image=None):
""""""
validate_phases(phases)
spec = load_file(filename)
phases = decide_phases(phases, spec)
if docker_image:
executor = DockerExecutor(docker_image, shell)
else:
executor = SystemExecutor(shell)
return executor.execute_phases(phases, spec)
```
#### File: src/aws_buildspec/compat.py
```python
import sys
# if sys.version_info[:1] == (2,):
# def _to_str(unicode_or_str):
# if isinstance(unicode_or_str, unicode):
# return unicode_or_str.encode('utf-8')
# else:
# return unicode_or_str
# elif sys.version_info[:1] == (3,):
# def _to_str(bytes_or_str):
# if isinstance(bytes_or_str, bytes):
# return bytes_or_str.decode('utf-8')
# else:
# return bytes_or_str
# to_str = _to_str
def to_str(something):
if sys.version_info[:1] == (2,):
if isinstance(something, unicode): # noqa
return something.encode('utf-8')
else:
return something
elif sys.version_info[:1] == (3,):
if isinstance(something, bytes):
return something.decode('utf-8')
else:
return something
```
#### File: src/aws_buildspec/__init__.py
```python
__version__ = "0.2.0"
import yaml
BUILDSPEC_YML = 'buildspec.yml'
def load_file(filename):
with open(filename, 'r') as fp:
return yaml.load(fp)
PHASE_ORDER = {'install': 10, 'pre_build': 20, 'build': 30, 'post_build': 40}
def sort_phases(phases):
""" Sort phases in order defined in AWS buildspec reference """
def sorter(phase):
return PHASE_ORDER[phase]
return sorted(phases, key=sorter)
def decide_phases(desired_phases, spec):
""" Given:
- a list of the desired phases to run
- the buildspec
Return a list of phases to run in the correct order.
???Should a missing phase be an error or a warning???
Empty list for desired_phases means run all defined phases.
"""
if 'phases' not in spec:
return []
if desired_phases:
phases = []
for phase in desired_phases:
if phase in spec['phases']:
phases.append(phase)
else:
raise Exception('Phase [%s] not defined' % phase)
else:
phases = [phase for phase, commands in spec['phases'].items()]
return sort_phases(phases)
def validate_phases(phases):
""" Given a list of phases throw exception if one is an invalid buildspec
phase.
"""
for phase in phases:
if phase not in PHASE_ORDER:
raise Exception('Invalid buildspec phase: [%s]' % phase)
def suggest_phase(phase):
""" Given an invalid phase, suggest the closest ones. (git style) """
```
#### File: src/aws_buildspec/results.py
```python
from collections import Sequence
from .compat import to_str
STDOUT = 1
STDERR = 2
BUILDSPEC = 4
def format_line(line):
output = line[1].rstrip()
if line[0] == STDERR:
return 'ERR: %s' % output
elif line[0] == STDOUT:
return 'OUT: %s' % output
else:
return output
class ResultLog(Sequence):
def __init__(self, stdout=[], line_formatter=format_line):
self.results = []
self.add(stdout)
self.line_formatter = line_formatter
def add_line(self, line, rtype=STDOUT):
self.results.append((rtype, to_str(line)))
def add(self, str_or_iter, rtype=STDOUT):
if isinstance(str_or_iter, type(self)):
# self.add_line('WTF')
self.results.extend(str_or_iter.results)
elif isinstance(str_or_iter, str):
self.add_line(str_or_iter, rtype)
else:
for line in str_or_iter:
self.add_line(line, rtype)
def __getitem__(self, index):
return self.line_formatter(self.results[index])
def __len__(self):
return len(self.results)
def __str__(self):
sep = '\n'
return sep.join(self)
def __repr__(self):
return str(self)
def stdstream(stream, rtype=STDOUT):
return [(rtype, to_str(line)) for line in stream]
def stdout(out): return stdstream(out, STDOUT)
def stderr(err): return stdstream(err, STDERR)
def join_lines(lines):
return '\n'.join(lines)
```
#### File: aws-buildspec/tests/test_cli.py
```python
import os
import pprint
import pytest
from click.testing import CliRunner
from aws_buildspec.cli import init
from aws_buildspec.cli import main
from .helpers import *
def test_init_command(tmpdir):
print(os.getcwd())
tmpdir.chdir()
print(os.getcwd())
runner = CliRunner()
result = runner.invoke(main, ['init'])
assert tmpdir.join('buildspec.yml').isfile()
assert len(tmpdir.listdir()) == 1
assert result.output == 'Generated buildspec.yml\n'
assert result.exit_code == 0
def test_init_command_with_argument(tmpdir):
tmpdir.chdir()
runner = CliRunner()
result = runner.invoke(main, ['init', 'simple'])
assert tmpdir.join('buildspec.yml').isfile()
assert len(tmpdir.listdir()) == 1
assert result.output == 'Generated buildspec.yml\n'
assert result.exit_code == 0
def test_init_command_with_invalid_argument():
runner = CliRunner()
result = runner.invoke(main, ['init', 'invalidtypecheers'])
assert result.exit_code != 0
def test_init_command_with_alternate_filename(tmpdir):
tmpdir.chdir()
runner = CliRunner()
result = runner.invoke(main, ['init', '-f', 'blah.yml'])
assert tmpdir.join('blah.yml').isfile()
assert len(tmpdir.listdir()) == 1
assert result.output == 'Generated blah.yml\n'
assert result.exit_code == 0
def test_main():
runner = CliRunner()
result = runner.invoke(main, [])
# assert result.output == '()\n'
assert result.exit_code == 0
def test_run(tmpdir):
buildspec_yml = """---
phases:
install:
commands:
- echo install
build:
commands:
- echo build
post_build:
commands:
- echo post_build
"""
runner = CliRunner()
with Tempfile(buildspec_yml) as filename:
result = runner.invoke(main, ['run', '-f', filename, 'install'])
assert result.exit_code == 0
assert result.output == u'Executing install phase\nOUT: install\n'
result = runner.invoke(main, ['run', '-f', filename])
assert result.output == u'Executing install phase\nOUT: install\nExecuting build phase\nOUT: build\nExecuting post_build phase\nOUT: post_build\n'
assert result.exit_code == 0
result = runner.invoke(main, ['run', '-f', filename, 'post_build'])
assert result.output == u'Executing post_build phase\nOUT: post_build\n'
assert result.exit_code == 0
``` |
{
"source": "jonjitsu/python-tmsu-tag-assistant",
"score": 2
} |
#### File: src/tmsu_tag_assistant/tmsu.py
```python
from difflib import get_close_matches
from itertools import chain
import os
import re
from prompt_toolkit import prompt
from prompt_toolkit.history import FileHistory
from prompt_toolkit.auto_suggest import AutoSuggestFromHistory
from prompt_toolkit.contrib.completers import WordCompleter
from prompt_toolkit.key_binding.defaults import load_key_bindings_for_prompt
from prompt_toolkit.keys import Keys
from .process import cmd, background
HISTORY_FILE = os.path.expanduser('~/.tmsu-ta-history')
def touch(filename):
open(filename, 'a').close()
if not os.path.isfile(HISTORY_FILE):
touch(HISTORY_FILE)
def is_tmsu_initialized():
ex, out, err = cmd('tmsu info')
return ex == 0
def parse_tags_list(o):
""""""
if '' == o.strip():
return set()
else:
return set(o.strip().split(os.linesep))
def get_all_tags():
ex, o, _ = cmd('tmsu tags')
assert ex == 0
return parse_tags_list(o)
def parse_tags_from_output(cmd_output):
return set(cmd_output.split()[1:])
def get_file_tags(f):
""" Return the current tags for a file f.
Returns:
Set(str) a set representing all the tags
"""
ex, o, _ = cmd(['tmsu', 'tags', f])
assert ex == 0
return parse_tags_from_output(o)
def get_suggested_tags(f, possible_tags):
""" Given:
- a file f
- a set of tags
return tag suggestions
Possibilities:
- from filename/path
- from metadata
"""
return simple_suggested_tags(f, possible_tags)
def simple_suggested_tags(f, tags):
""""""
separator = re.compile('[\s\-_.,(){}:;"\'|/]')
matches = [get_close_matches(p.lower(), tags) for p in separator.split(f) if p.strip()]
return set(chain.from_iterable(matches))
class PromptTagger(object):
def __init__(self, completer=None, history=None, auto_suggest=None):
self.completer = completer if completer else WordCompleter(get_all_tags())
self.history = history if history else FileHistory(HISTORY_FILE)
self.auto_suggest = auto_suggest if auto_suggest else AutoSuggestFromHistory()
self.registry = load_key_bindings_for_prompt()
def __call__(self, file):
def tags_to_str(tags):
return ' '.join(tags)
completer = self.completer
registry = self.registry
@registry.add_binding(Keys.F4, eager=True)
def _(event):
# event.cli.current_buffer.insert_text('hello world')
background(['xdg-open', file])
@registry.add_binding(Keys.F5, eager=True)
def _(event):
tags = get_file_tags(file)
event.cli.current_buffer.insert_text(tags_to_str(tags))
@registry.add_binding(Keys.F6, eager=True)
def _(event):
all_tags = completer.words
tags = get_suggested_tags(file, list(all_tags))
event.cli.current_buffer.insert_text(tags_to_str(list(tags)[:5]))
current_tags = get_file_tags(file)
p = "[F4] Open file [F5] insert current tags [F6] suggest tags\n%s: %s\n> " % (file, tags_to_str(current_tags))
tags = prompt(p, history=self.history,
auto_suggest=self.auto_suggest,
completer=completer,
key_bindings_registry=registry)
tags = set(tags.strip().split(' '))
self.add_tags_to_completer(tags)
return tags
def add_tags_to_completer(self, tags):
current = set(self.completer.words)
self.completer.words = list(current | tags)
def quote(s, char="'"):
return char + s + char
def tags_to_str(tags, sep=' '):
"""Given a set of tags,
return a string."""
if isinstance(tags, set) or isinstance(tags, list):
return sep.join(quote(tag) for tag in sorted(tags))
else:
raise 'Expected collection.'
class TmsuCmd(object):
""" Represents a tmsu command.
Works only with tag/untag commands.
"""
TMSU_CMD = 'tmsu'
def __init__(self, subcmd, *args):
self.subcmd = subcmd
self.args = args
def __str__(self):
""" Represent as a string invokable on command line.
Works only with tag/untag commands.
"""
args = list(self.args)
cmd = [self.TMSU_CMD, self.subcmd]
cmd.append("'%s'" % args.pop(0))
cmd.append(tags_to_str(args.pop()))
assert len(args) == 0
return ' '.join(cmd)
def as_cmd(self):
""" Return this command in a format to be executed by subprocess.* functions. """
args = list(self.args)
cmd = [self.TMSU_CMD, self.subcmd]
cmd.append(args.pop(0))
cmd.extend(sorted(args.pop()))
assert len(args) == 0
return cmd
@classmethod
def tag(cls, file, tags):
return cls('tag', file, tags)
@classmethod
def untag(cls, file, tags):
return cls('untag', file, tags)
def tag_files_with(files, tagger=None):
""" Given:
- a collection of files
- a tagging function
return a collection of TmsuCmd
"""
tagger = tagger or PromptTagger()
return (TmsuCmd.tag(f, tagger(f)) for f in files)
```
#### File: python-tmsu-tag-assistant/tests/test_tmsu_tag_assistant.py
```python
import pytest
# from click.testing import CliRunner
# from tmsu_tag_assistant.cli import main
# def test_main():
# runner = CliRunner()
# result = runner.invoke(main, ['--help'])
# assert result.output == '()\n'
# assert result.exit_code == 0
from tmsu_tag_assistant.cli import line_iter
from io import StringIO
import os
from collections import Iterable
def test_line_iter():
lines = ['line1', 'line2']
stream = StringIO(os.linesep.join(lines))
it = line_iter(stream)
# assert is an iterator
assert iter(it) == iter(it)
for actual, expected in zip(it, lines):
assert actual == expected
stream = StringIO(os.linesep.join(lines) + os.linesep)
it = line_iter(stream)
# assert is an iterator
assert iter(it) == iter(it)
for actual, expected in zip(it, lines):
assert actual == expected
from tmsu_tag_assistant.process import cmd
def test_cmd():
ex, out, err = cmd('true')
assert ex == 0
assert out == ''
assert err == ''
ex, out, err = cmd('false')
assert ex == 1
assert out == ''
assert err == ''
ex, out, err = cmd('ls /aasdflas12390812309asdfasdf')
assert ex == 2
assert out == ''
assert err == 'ls: cannot access /aasdflas12390812309asdfasdf: No such file or directory\n'
ex, out, err = cmd(['bash', '-c', 'echo testing 123'])
assert ex == 0
assert out == 'testing 123\n'
assert err == ''
from tmsu_tag_assistant.tmsu import parse_tags_from_output, parse_tags_list
def test_parse_tags_from_output():
o = "Generators.pdf: ebook python compsci"
expected = {'ebook', 'python', 'compsci'}
assert expected == parse_tags_from_output(o)
o = "Generators.pdf:"
assert set() == parse_tags_from_output(o)
def test_parse_tags_list():
o = ""
assert parse_tags_list(o) == set()
o = 'tag1\n'
assert parse_tags_list(o) == {'tag1'}
tags = {'tag1', 'tag2'}
o = '\n'.join(tags)
assert parse_tags_list(o) == tags
o = '\n'.join(tags) + '\n'
assert parse_tags_list(o) == tags
from tmsu_tag_assistant.tmsu import TmsuCmd
def test_TmsuCmd_tag():
file = 'Generators.pdf'
tags = {'pdf', 'ebook', 'python'}
cmd = TmsuCmd.tag(file, tags)
assert isinstance(cmd, TmsuCmd)
def test_TmsuCmd_untag():
file = 'Generators.pdf'
tags = {'pdf', 'ebook', 'python'}
cmd = TmsuCmd.untag(file, tags)
assert isinstance(cmd, TmsuCmd)
def test_TmsuCmd_str():
file = 'Generators Tutorial 2011.pdf'
tags = {'pdf', 'ebook', 'python'}
cmd = TmsuCmd.tag(file, tags)
expected = "tmsu tag '%s' 'ebook' 'pdf' 'python'" % (file)
assert str(cmd) == expected
def test_TmsuCmd_as_cmd():
file = 'Generators Tutorial 2011.pdf'
tags = {'pdf', 'ebook', 'python'}
cmd = TmsuCmd.tag(file, tags)
expected = ['tmsu', 'tag', file, 'ebook', 'pdf', 'python']
assert cmd.as_cmd() == expected
from tmsu_tag_assistant.tmsu import tag_files_with
def test_tag_files_with():
tags = {'pdf', 'ebook', 'year=1923'}
tagger = lambda f: tags
files = ['Generators.pdf']
actual = list(tag_files_with(files, tagger))
assert isinstance(actual[0], TmsuCmd)
assert "tmsu tag 'Generators.pdf' 'ebook' 'pdf' 'year=1923'" == str(actual[0])
from tmsu_tag_assistant.tmsu import simple_suggested_tags
def test_simple_suggested_tags():
tags = ['python', 'ebook', 'compsci']
f = './papers/Python Generators and object oriented programming.epub'
suggested = simple_suggested_tags(f, tags)
assert suggested == {'python'}
``` |
{
"source": "Jonjocarts/LION-Public",
"score": 2
} |
#### File: LION-Public/gwinc/io.py
```python
import h5py
import yaml
import datetime
from . import logger
from . import Struct
SCHEMA = 'GWINC noise budget'
SCHEMA_VERSION = 1
def _write_trace_recursive(grp, traces):
for name, trace in traces.items():
if isinstance(trace, dict):
tgrp = grp.create_group(name)
_write_trace_recursive(tgrp, trace)
else:
data, style = trace
dset = grp.create_dataset(name, data=data)
for key, val in style.items():
dset.attrs[key] = val
def save_hdf5(path, freq, traces, **kwargs):
"""Save GWINC budget data to an HDF5 file.
The `freq` argument should be the frequency array, and `traces`
should be the traces (recursive) dictionary. Keyword arguments
are stored in the HDF5 top level 'attrs' key-value store. If an
'ifo' keyword arg is supplied, it is assumed to be a Struct and
will be serialized to YAML for storage.
See HDF5_SCHEMA.
"""
with h5py.File(path, 'w') as f:
f.attrs['SCHEMA'] = SCHEMA
f.attrs['SCHEMA_VERSION'] = SCHEMA_VERSION
# FIXME: add budget code hash or something
f.attrs['date'] = datetime.datetime.now().isoformat()
for key, val in kwargs.items():
if key == 'ifo':
f.attrs['ifo'] = val.to_yaml()
else:
f.attrs[key] = val
f.create_dataset('Freq', data=freq)
tgrp = f.create_group('traces')
_write_trace_recursive(tgrp, traces)
def _read_trace_recursive(element):
trace = {}
for name, item in element.items():
if isinstance(item, h5py.Group):
trace[name] = _read_trace_recursive(item)
else:
trace[name] = item[:], dict(item.attrs.items())
return trace
def load_hdf5(path):
"""Load GWINC budget data from an HDF5 file.
Returns (freq, traces, attrs). An 'ifo' attr will be
de-serialized from YAML into a Struct object.
See HDF5_SCHEMA.
"""
with h5py.File(path, 'r') as f:
# FIXME: check SCHEMA name/version
freq = f['Freq'][:]
traces = _read_trace_recursive(f['/traces'])
attrs = dict(f.attrs)
if 'ifo' in attrs:
try:
attrs['ifo'] = Struct.from_yaml(attrs['ifo'])
except yaml.constructor.ConstructorError:
logger.warning("HDF5 load warning: Could not de-serialize 'ifo' YAML attribute.")
return freq, traces, attrs
```
#### File: gwinc/noise/substratethermal.py
```python
from __future__ import division, print_function
import numpy as np
from numpy import exp, inf, pi, sqrt
import scipy.special
import scipy.integrate
from .. import const
from ..const import BESSEL_ZEROS as zeta
from ..const import J0M as j0m
def substrate_thermorefractive(f, materials, wBeam, exact=False):
"""Substrate thermal displacement noise spectrum from thermorefractive fluctuations
:f: frequency array in Hz
:materials: gwinc optic materials structure
:wBeam: beam radius (at 1 / e^2 power)
:exact: whether to use adiabatic approximation or exact calculation (False)
:returns: displacement noise power spectrum at :f:, in meters
"""
H = materials.MassThickness
kBT = const.kB * materials.Substrate.Temp
Temp = materials.Substrate.Temp
rho = materials.Substrate.MassDensity
beta = materials.Substrate.dndT
C = materials.Substrate.MassCM
kappa = materials.Substrate.MassKappa
r0 = wBeam/np.sqrt(2)
omega = 2*pi*f
if exact:
def integrand(k, om, D):
return D * k**3 * exp(-k**2 * wBeam**2/4) / (D**2 * k**4 + om**2)
inte = np.array([scipy.integrate.quad(lambda k: integrand(k, om, kappa/(rho*C)), 0, inf)[0] for om in omega])
# From P1400084 Heinert et al. Eq. 15
#psdCD = @(gamma,m,int) 2*(3/pi^7)^(1/3)*kBT*H*gamma^2*m/hbar^2*cdDens^(1/3)*int; %units are meters
psdTR = lambda int_: 2/pi * H * beta**2 * kBT * Temp / (rho*C) * int_
psd = psdTR(inte)
psd = 2/pi * H * beta**2 * kBT * Temp / (rho*C) * inte
else:
psd = 4*H*beta**2*kappa*kBT*Temp/(pi*r0**4*omega**2*(rho*C)**2)
return psd
def substrate_brownian(f, materials, wBeam):
"""Substrate thermal displacement noise spectrum due to substrate mechanical loss
:f: frequency array in Hz
:materials: gwinc optic materials structure
:wBeam: beam radius (at 1 / e^2 power)
:returns: displacement noise power spectrum at :f:, in meters
"""
Y = materials.Substrate.MirrorY
sigma = materials.Substrate.MirrorSigma
c2 = materials.Substrate.c2
n = materials.Substrate.MechanicalLossExponent
alphas = materials.Substrate.Alphas
kBT = const.kB * materials.Substrate.Temp
cftm, aftm = substrate_brownian_FiniteCorr(materials, wBeam)
# Bulk substrate contribution
phibulk = c2 * f**n
cbulk = 8 * kBT * aftm * phibulk / (2 * pi * f)
# Surface loss contribution
# csurf = alphas/(Y*pi*wBeam^2)
csurf = alphas*(1-2*sigma)/((1-sigma)*Y*pi*wBeam**2)
csurf *= 8 * kBT / (2 * pi * f)
return csurf + cbulk
def substrate_brownian_FiniteCorr(materials, wBeam):
"""Substrate brownian noise finite-size test mass correction
:materials: gwinc optic materials structure
:wBeam: beam radius (at 1 / e^2 power)
:returns: correction factors tuple:
cftm = finite mirror correction factor
aftm = amplitude coefficient for thermal noise:
thermal noise contribution to displacement noise is
S_x(f) = (8 * kB * T / (2*pi*f)) * Phi(f) * aftm
Equation references to Bondu, et al. Physics Letters A 246 (1998)
227-236 (hereafter BHV) and Liu and Thorne gr-qc/0002055 (hereafter LT)
"""
a = materials.MassRadius
h = materials.MassThickness
Y = materials.Substrate.MirrorY
sigma = materials.Substrate.MirrorSigma
# LT uses e-folding of power
r0 = wBeam / sqrt(2)
km = zeta/a
Qm = exp(-2*km*h) # LT eq. 35a
Um = (1-Qm)*(1+Qm)+4*h*km*Qm
Um = Um/((1-Qm)**2-4*(km*h)**2*Qm) # LT 53 (BHV eq. btwn 29 & 30)
x = exp(-(zeta*r0/a)**2/4)
s = sum(x/(zeta**2*j0m)) # LT 57
x2 = x*x
U0 = sum(Um*x2/(zeta*j0m**2))
U0 = U0*(1-sigma)*(1+sigma)/(pi*a*Y) # LT 56 (BHV eq. 3)
p0 = 1/(pi*a**2) # LT 28
DeltaU = (pi*h**2*p0)**2
DeltaU = DeltaU + 12*pi*h**2*p0*sigma*s
DeltaU = DeltaU + 72*(1-sigma)*s**2
DeltaU = DeltaU*a**2/(6*pi*h**3*Y) # LT 54
# LT 58 (eq. following BHV 31)
aftm = DeltaU + U0
# amplitude coef for infinite TM, LT 59
# factored out: (8 * kB * T * Phi) / (2 * pi * f)
aitm = (1 - sigma**2) / (2 * sqrt(2 * pi) * Y * r0)
# finite mirror correction
cftm = aftm / aitm
return cftm, aftm
def substrate_thermoelastic(f, materials, wBeam):
"""Substrate thermal displacement noise spectrum from thermoelastic fluctuations
:f: frequency array in Hz
:materials: gwinc optic materials structure
:wBeam: beam radius (at 1 / e^2 power)
:returns: displacement noise power spectrum at :f:, in meters
"""
sigma = materials.Substrate.MirrorSigma
rho = materials.Substrate.MassDensity
kappa = materials.Substrate.MassKappa # thermal conductivity
alpha = materials.Substrate.MassAlpha # thermal expansion
CM = materials.Substrate.MassCM # heat capacity @ constant mass
Temp = materials.Substrate.Temp # temperature
kBT = const.kB * materials.Substrate.Temp
S = 8*(1+sigma)**2*kappa*alpha**2*Temp*kBT # note kBT has factor Temp
S /= (sqrt(2*pi)*(CM*rho)**2)
S /= (wBeam/sqrt(2))**3 # LT 18 less factor 1/omega^2
# Corrections for finite test masses:
S *= substrate_thermoelastic_FiniteCorr(materials, wBeam)
return S/(2*pi*f)**2
def substrate_thermoelastic_FiniteCorr(materials, wBeam):
"""Substrate thermoelastic noise finite-size test mass correction
:materials: gwinc optic materials structure
:wBeam: beam radius (at 1 / e^2 power)
:returns: correction factor
(Liu & Thorne gr-qc/0002055 equation 46)
Equation references to Bondu, et al. Physics Letters A 246 (1998)
227-236 (hereafter BHV) or Liu and Thorne gr-qc/0002055 (hereafter LT)
"""
a = materials.MassRadius
h = materials.MassThickness
sigma = materials.Substrate.MirrorSigma
# LT uses power e-folding
r0 = wBeam/sqrt(2)
km = zeta/a
Qm = exp(-2*km*h) # LT 35a
pm = exp(-(km*r0)**2/4)/(pi*(a*j0m)**2) # LT 37
c0 = 6*(a/h)**2*sum(j0m*pm/zeta**2) # LT 32
c1 = -2*c0/h # LT 32
p0 = 1/(pi*a**2) # LT 28
c1 += p0/(2*h) # LT 40
coeff = (1-Qm)*((1-Qm)*(1+Qm)+8*h*km*Qm)
coeff += 4*(h*km)**2*Qm*(1+Qm)
coeff *= km*(pm*j0m)**2*(1-Qm)
coeff /= ((1-Qm)**2-4*(h*km)**2*Qm)**2
coeff = sum(coeff) + h*c1**2/(1+sigma)**2
coeff *= (sqrt(2*pi)*r0)**3*a**2 # LT 46
return coeff
```
#### File: LION-Public/gwinc/struct.py
```python
import os
import re
import io
import yaml
import numpy as np
from scipy.io import loadmat
from scipy.io.matlab.mio5_params import mat_struct
# HACK: fix loading number in scientific notation
#
# https://stackoverflow.com/questions/30458977/yaml-loads-5e-6-as-string-and-not-a-number
#
# An apparent bug in python-yaml prevents it from regognizing
# scientific notation as a float. The following is a modified version
# of the parser that recognize scientific notation appropriately.
yaml_loader = yaml.SafeLoader
yaml_loader.add_implicit_resolver(
'tag:yaml.org,2002:float',
re.compile('''^(?:
[-+]?(?:[0-9][0-9_]*)\\.[0-9_]*(?:[eE][-+]?[0-9]+)?
|[-+]?(?:[0-9][0-9_]*)(?:[eE][-+]?[0-9]+)
|\\.[0-9_]+(?:[eE][-+][0-9]+)?
|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*
|[-+]?\\.(?:inf|Inf|INF)
|\\.(?:nan|NaN|NAN))$''', re.X),
list('-+0123456789.'))
def dictlist2recarray(l):
def dtype(v):
if isinstance(v, int):
return float
else:
return type(v)
# get dtypes from first element dict
dtypes = [(k, dtype(v)) for k,v in l[0].items()]
values = [tuple(el.values()) for el in l]
out = np.array(values, dtype=dtypes)
return out.view(np.recarray)
class Struct(object):
"""Matlab struct-like object
This is a simple implementation of a MATLAB struct-like object
that stores values as attributes of a simple class: and allows
assigning to attributes recursively, e.g.:
>>> s = Struct()
>>> s.a = 4
>>> s.b = Struct()
>>> s.b.c = 8
Various classmethods allow creating one of these objects from YAML
file, a nested dict, or a MATLAB struct object.
"""
STRUCT_EXT = ['.yaml', '.yml', '.mat', '.m']
"""accepted extension types for struct files"""
# FIXME: There should be a way to allow setting nested struct
# attributes, e.g.:
#
# >>> s = Struct()
# >>> s.a.b.c = 4
#
# Usage of __getattr__ like this is dangerous and creates
# non-intuitive behavior (i.e. an empty struct is returned when
# accessing attributes that don't exist). Is there a way to
# accomplish this without that adverse side affect?
#
# def __getattr__(self, name):
# if name not in self.__dict__:
# self.__dict__[name] = Struct()
# return self.__dict__[name]
##########
def __init__(self, *args, **kwargs):
"""Initialize Struct object
Initializes similar to dict(), taking a single dict or mapping
argument, or keyword arguments to initially populate the
Struct.
"""
self.update(dict(*args, **kwargs))
def __getitem__(self, key):
"""Get a (possibly nested) value from the struct.
"""
if '.' in key:
k, r = key.split('.', 1)
# FIXME: this is inelegant. better done with regexp?
if len(k.split('[')) > 1:
kl, i = k.split('[')
i = int(i.strip(']'))
return self.__dict__[kl][i][r]
return self.__dict__[k][r]
else:
return self.__dict__[key]
def get(self, key, default=None):
"""Get a (possibly nested) value from the struct, or default.
"""
try:
return self[key]
except KeyError:
return default
def __setitem__(self, key, value):
if '.' in key:
k, r = key.split('.', 1)
self.__dict__[k][r] = value
else:
self.__dict__[key] = value
def setdefault(self, key, default):
return self.__dict__.setdefault(key, default)
def update(self, other):
"""Update Struct from other Struct or dict.
"""
if isinstance(other, Struct):
d = other.__dict__
else:
d = dict(other)
for k, v in d.items():
if k in self:
if isinstance(self[k], Struct) \
and isinstance(v, (dict, Struct)):
self[k].update(v)
continue
try:
delattr(self, k)
except AttributeError:
delattr(self.__class__, k)
if isinstance(v, dict):
self[k] = Struct(v)
elif isinstance(v, (list, tuple)):
try:
self[k] = list(map(Struct, v))
except TypeError:
self[k] = v
else:
self[k] = v
def items(self):
return self.__dict__.items()
def keys(self):
return self.__dict__.keys()
def values(self):
return self.__dict__.values()
def __contains__(self, key):
return key in self.__dict__
def to_dict(self, array=False):
"""Return nested dictionary representation of Struct.
If `array` is True any lists encountered will be turned into
numpy arrays, and lists of Structs will be turned into record
arrays. This is needed to convert to structure arrays in
matlab.
"""
d = {}
for k,v in self.__dict__.items():
if isinstance(v, Struct):
d[k] = v.to_dict(array=array)
else:
if isinstance(v, list):
try:
# this should fail if the elements of v are
# not Struct
# FIXME: need cleaner way to do this
v = [i.to_dict(array=array) for i in v]
if array:
v = dictlist2recarray(v)
except AttributeError:
if array:
v = np.array(v)
# FIXME: there must be a better way to just match all
# numeric scalar types
elif isinstance(v, (int, float, np.int, np.float)):
v = float(v)
d[k] = v
return d
def to_yaml(self, path=None):
"""Return YAML representation of Struct.
Write YAML to `path` if specified.
"""
y = yaml.dump(self.to_dict(), default_flow_style=False)
if path:
with open(path, 'w') as f:
f.write(y)
else:
return y
def __str__(self):
return '<GWINC Struct: {}>'.format(list(self.__dict__.keys()))
def __repr__(self):
return self.__str__()
def __iter__(self):
return iter(self.__dict__)
def walk(self):
"""Iterate over all leaves in the struct tree.
"""
for k,v in self.__dict__.items():
if isinstance(v, type(self)):
for sk,sv in v.walk():
yield k+'.'+sk, sv
else:
try:
for i,vv in enumerate(v):
for sk,sv in vv.walk():
yield '{}[{}].{}'.format(k,i,sk), sv
except (AttributeError, TypeError):
yield k, v
def diff(self, other):
"""Return tuple of differences between target IFO.
Returns list of (key, value, other_value) tuples. Value is
None if key not present.
"""
diffs = []
for k, ov in other.walk():
v = self.get(k, None)
if ov != v and ov is not v:
diffs.append((k, v, ov))
for k, v in self.walk():
ov = other.get(k, None)
if ov is None:
diffs.append((k, v, ov))
return diffs
def __eq__(self, other):
"""True if structs have all equal values"""
return not bool(self.diff(other))
def to_txt(self, path=None, fmt='0.6e', delimiter=': ', end=''):
"""Return text represenation of Struct, one element per line.
Struct keys use '.' to indicate hierarchy. The `fmt` keyword
controls the formatting of numeric values. MATLAB code can be
generated with the following parameters:
>>> ifo.to_txt(delimiter=' = ', end=';')
Write text to `path` if specified.
"""
txt = io.StringIO()
for k, v in sorted(self.walk()):
if isinstance(v, (int, float, complex)):
base = fmt
elif isinstance(v, (list, np.ndarray)):
if isinstance(v, list):
v = np.array(v)
v = np.array2string(v, separator='', max_line_width=np.Inf, formatter={'all':lambda x: "{:0.6e} ".format(x)})
base = 's'
else:
base = 's'
txt.write(u'{key}{delimiter}{value:{base}}{end}\n'.format(
key=k, value=v, base=base,
delimiter=delimiter,
end=end,
))
if path:
with open(path, 'w') as f:
f.write(txt.getvalue())
else:
return txt.getvalue()
@classmethod
def from_yaml(cls, y):
"""Create Struct from YAML string.
"""
d = yaml.load(y, Loader=yaml_loader) or {}
return cls(d)
@classmethod
def from_matstruct(cls, s):
"""Create Struct from scipy.io.matlab mat_struct object.
"""
c = cls()
try:
s = s['ifo']
except:
pass
for k,v in s.__dict__.items():
if k in ['_fieldnames']:
# skip these fields
pass
elif type(v) is mat_struct:
c.__dict__[k] = Struct.from_matstruct(v)
else:
# handle lists of Structs
try:
c.__dict__[k] = list(map(Struct.from_matstruct, v))
except:
c.__dict__[k] = v
# try:
# c.__dict__[k] = float(v)
# except:
# c.__dict__[k] = v
return c
@classmethod
def from_file(cls, path):
"""Load Struct from .yaml or MATLAB .mat file.
Accepted file types are .yaml, .mat, or .m.
For .m files, the file is expected to include either an object
or function that corresponds to the basename of the file. The
MATLAB engine will be invoked to execute the .m code and
extract the resultant IFO data.
If `path` is a tuple, all elements will be joined ala
os.path.join, with the first element resolved to it's absolute
dirname. This is useful for loading package-relative files
with e.g.:
Struct.from_file((__file__, 'myifo.yaml'))
"""
if type(path) == tuple:
path = os.path.join(os.path.abspath(os.path.dirname(path[0])), *path[1:])
base, ext = os.path.splitext(path)
if ext == '.m':
from ..gwinc_matlab import Matlab
matlab = Matlab()
matlab.addpath(os.path.dirname(path))
func_name = os.path.basename(base)
matlab.eval("ifo = {};".format(func_name), nargout=0)
ifo = matlab.extract('ifo')
return Struct.from_matstruct(ifo)
with open(path, 'r') as f:
if ext in ['.yaml', '.yml']:
return cls.from_yaml(f)
elif ext == '.mat':
s = loadmat(f, squeeze_me=True, struct_as_record=False)
return cls.from_matstruct(s)
else:
raise IOError("Unknown file type: {}".format(ext))
``` |
{
"source": "jonjohansen/IFI-Survival-Kit",
"score": 3
} |
#### File: IFI-Survival-Kit/src/util.py
```python
import argparse
import subprocess
import os
import json
from .errors import NoUsernameError, NoTokenError, NoPackageError, NoSuchFileError
from .textcolor import TextColor, printBlue, printYellow, printPurple, printGreen, printRed, printCyan
from .emojis import Emojis
from pip._internal import main as pipmain
from .user import User
import importlib
from .git import TestToken
def parseArgs():
''' Parses arguments and handles all the user input at the initial part of the script
Should return the final username and token for the script to use
'''
desc = '''
This is a small script helping with setting up a folder structure based on a configuration.
The script will parse through the structure.json file within the same folder, and
create a folder structure, with a matching github structure
'''
parser = argparse.ArgumentParser(description=desc, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-t','--token', metavar="<token>", type=str, help='Personal access token')
parser.add_argument('-e','--email', metavar="<Email>", type=str, help='Github email')
parser.add_argument('-c','--config', metavar="<PATH>", type=str, default='structure.json', help='JSON file describing folder structure')
args = parser.parse_args()
printCyan("%s Welcome to IFI Survival Kit %s" % (Emojis.graduation_cap, Emojis.safety_helmet))
printBlue("We'll have you sorted and organized in no time %s" % Emojis.fire)
if args.token or not args.email:
printBlue("We will look in the global git config for default credentials....\n")
configToken = subprocess.Popen("git config --global user.token", shell=True, stdout=subprocess.PIPE).stdout.read().decode('utf-8').strip("\n")
configEmail = subprocess.Popen("git config --global user.email", shell=True, stdout=subprocess.PIPE).stdout.read().decode('utf-8').strip("\n")
while(True):
# Ask for user token
if not (args.token):
printPurple("Please enter your Github access token:", end='')
if (configToken != ""):
printGreen(" (default '%s') " % (configToken))
else:
printBlue(" no default found)")
inputToken = input()
if (inputToken == ""):
if (configToken == ""):
raise NoTokenError
else:
token = configToken
else:
token = inputToken
else:
token = args.token
# Test token and get Username
printBlue("Checking that the token works")
username = TestToken(token)
printGreen("%s Seems to work" % Emojis.thumbs_up)
printBlue("Please verify that ", end='')
printYellow(username, end='')
printBlue(" is your Github username", end='')
res = input(("%s (y/n)\n") % (TextColor.green))
if (res == "y" or res == "Y"):
break;
# Ask for user email
if not (args.email):
printPurple("Please enter your Github email address:", end='')
if (configToken != ""):
printGreen(" (default '%s')" % (configEmail))
else:
printBlue(" (no default found)%s")
inputEmail = input()
if (inputEmail == ""):
if (configEmail == ""):
printYellow("Commits will be made without an author e-mail")
else:
email = configEmail
else:
email = inputEmail
else:
token = args.token
user = User(username, email, token)
return user, args.config
def import_or_install(package):
''' Tries to import a package. If package is not found it prompts the user to install it through pip'''
try:
__import__(package)
globals()[package] = importlib.import_module(package)
except ImportError:
print(("%s %sPip package %s%s %sis missing") % (Emojis.package, TextColor.blue, TextColor.purple, package, TextColor.blue))
print(("%sYou can uninstall this afterwards with %s'pip uninstall %s'") % (TextColor.blue, TextColor.yellow, package) )
printBlue(("Do you want to install it"), end="")
res = input(("%s (y/n) ") % (TextColor.green))
if (res == "y" or res == "Y"):
pipmain(['install', package])
globals()[package] = importlib.import_module(package)
else:
raise NoPackageError(package)
def readConfig(config):
try:
with open(config) as file:
jsonConfig = file.read()
except:
raise NoSuchFileError(config)
return json.loads(jsonConfig)
``` |
{
"source": "jonjohnsonjr/distroless",
"score": 2
} |
#### File: distroless/package_manager/dpkg_parser.py
```python
import argparse
import gzip
import urllib2
import json
import os
from package_manager.parse_metadata import parse_package_metadata
from package_manager import util
OUT_FOLDER = "file"
PACKAGES_FILE_NAME = os.path.join(OUT_FOLDER,"Packages.json")
PACKAGE_MAP_FILE_NAME = os.path.join(OUT_FOLDER,"packages.bzl")
DEB_FILE_NAME = os.path.join(OUT_FOLDER,"pkg.deb")
FILENAME_KEY = "Filename"
SHA256_KEY = "SHA256"
parser = argparse.ArgumentParser(
description="Downloads a deb package from a package source file"
)
parser.add_argument("--package-files", action='store',
help='A list of Packages.gz files to use')
parser.add_argument("--packages", action='store',
help='A comma delimited list of packages to search for and download')
parser.add_argument("--workspace-name", action='store',
help='The name of the current bazel workspace')
parser.add_argument("--download-and-extract-only", action='store',
help='If True, download Packages.gz and make urls absolute from mirror url')
parser.add_argument("--mirror-url", action='store',
help='The base url for the package list mirror')
parser.add_argument("--arch", action='store',
help='The target architecture for the package list')
parser.add_argument("--distro", action='store',
help='The target distribution for the package list')
parser.add_argument("--snapshot", action='store',
help='The snapshot date to download')
parser.add_argument("--sha256", action='store',
help='The sha256 checksum to validate for the Packages.gz file')
def main():
""" A tool for downloading debian packages and package metadata """
args = parser.parse_args()
if args.download_and_extract_only:
download_package_list(args.mirror_url, args.distro, args.arch, args.snapshot, args.sha256)
else:
download_dpkg(args.package_files, args.packages, args.workspace_name)
def download_dpkg(package_files, packages, workspace_name):
""" Using an unzipped, json package file with full urls,
downloads a .deb package
Uses the 'Filename' key to download the .deb package
"""
package_to_rule_map = {}
for pkg_name in packages.split(","):
for package_file in package_files.split(","):
with open(package_file, 'rb') as f:
metadata = json.load(f)
if pkg_name in metadata:
pkg = metadata[pkg_name]
buf = urllib2.urlopen(pkg[FILENAME_KEY])
package_to_rule_map[pkg_name] = util.package_to_rule(workspace_name, pkg_name)
out_file = os.path.join("file", util.encode_package_name(pkg_name))
with open(out_file, 'w') as f:
f.write(buf.read())
expected_checksum = util.sha256_checksum(out_file)
actual_checksum = pkg[SHA256_KEY]
if actual_checksum != expected_checksum:
raise Exception("Wrong checksum for package %s. Expected: %s, Actual: %s", pkg_name, expected_checksum, actual_checksum)
break
else:
raise Exception("Package %s not found in any of the sources" % pkg_name)
with open(PACKAGE_MAP_FILE_NAME, 'w') as f:
f.write("packages = " + json.dumps(package_to_rule_map))
def download_package_list(mirror_url, distro, arch, snapshot, sha256):
"""Downloads a debian package list, expands the relative urls,
and saves the metadata as a json file
A debian package list is a gzipped, newline delimited, colon separated
file with metadata about all the packages available in that repository.
Multiline keys are indented with spaces.
An example package looks like:
Package: newmail
Version: 0.5-2
Installed-Size: 76
Maintainer: <NAME> <<EMAIL>>
Architecture: amd64
Depends: libc6 (>= 2.7-1)
Description: Notificator for incoming mail
Homepage: http://www.infodrom.org/projects/newmail/
Description-md5: 49b0168ce625e668ce3031036ad2f541
Tag: interface::commandline, mail::notification, role::program,
scope::utility, works-with::mail
Section: mail
Priority: optional
Filename: pool/main/n/newmail/newmail_0.5-2_amd64.deb
Size: 14154
MD5sum: 5cd31aab55877339145517fb6d5646cb
SHA1: 869934a25a8bb3def0f17fef9221bed2d3a460f9
SHA256: 52ec3ac93cf8ba038fbcefe1e78f26ca1d59356cdc95e60f987c3f52b3f5e7ef
"""
url = "%s/debian/%s/dists/%s/main/binary-%s/Packages.gz" % (
mirror_url,
snapshot,
distro,
arch
)
buf = urllib2.urlopen(url)
with open("Packages.gz", 'w') as f:
f.write(buf.read())
actual_sha256 = util.sha256_checksum("Packages.gz")
if sha256 != actual_sha256:
raise Exception("sha256 of Packages.gz don't match: Expected: %s, Actual:%s" %(sha256, actual_sha256))
with gzip.open("Packages.gz", 'rb') as f:
data = f.read()
metadata = parse_package_metadata(data, mirror_url, snapshot)
with open(PACKAGES_FILE_NAME, 'w') as f:
json.dump(metadata, f)
if __name__ == "__main__":
main()
``` |
{
"source": "jonjohnston/okta-awscli",
"score": 3
} |
#### File: okta-awscli/oktaawscli/okta_auth_mfa_app.py
```python
import time
import sys
from urllib.parse import parse_qs
from urllib.parse import urlparse
from oktaawscli.version import __version__
class OktaAuthMfaApp():
""" Handles per-app Okta MFA """
def __init__(self, logger, session, verify_ssl, auth_url):
self.session = session
self.logger = logger
self._verify_ssl_certs = verify_ssl
self._preferred_mfa_type = None
self._mfa_code = None
self._auth_url = auth_url
def stepup_auth(self, embed_link, state_token=None):
""" Login to Okta using the Step-up authentication flow"""
flow_state = self._get_initial_flow_state(embed_link, state_token)
while flow_state.get('apiResponse').get('status') != 'SUCCESS':
flow_state = self._next_login_step(
flow_state.get('stateToken'), flow_state.get('apiResponse'))
return flow_state['apiResponse']
def _next_login_step(self, state_token, login_data):
""" decide what the next step in the login process is"""
if 'errorCode' in login_data:
self.logger.error("LOGIN ERROR: {} | Error Code: {}".format(login_data['errorSummary'], login_data['errorCode']))
sys.exit(2)
status = login_data['status']
if status == 'UNAUTHENTICATED':
self.logger.error("You are not authenticated -- please try to log in again")
sys.exit(2)
elif status == 'LOCKED_OUT':
self.logger.error("Your Okta access has been locked out due to failed login attempts.")
sys.exit(2)
elif status == 'MFA_ENROLL':
self.logger.error("You must enroll in MFA before using this tool.")
sys.exit(2)
elif status == 'MFA_REQUIRED':
return self._login_multi_factor(state_token, login_data)
elif status == 'MFA_CHALLENGE':
if 'factorResult' in login_data and login_data['factorResult'] == 'WAITING':
return self._check_push_result(state_token, login_data)
else:
return self._login_input_mfa_challenge(state_token, login_data['_links']['next']['href'])
else:
raise RuntimeError('Unknown login status: ' + status)
def _get_initial_flow_state(self, embed_link, state_token=None):
""" Starts the authentication flow with Okta"""
if state_token is None:
response = self.session.get(
embed_link, allow_redirects=False)
url_parse_results = urlparse(response.headers['Location'])
state_token = parse_qs(url_parse_results.query)['stateToken'][0]
response = self.session.post(
self._auth_url,
json={'stateToken': state_token},
headers=self._get_headers(),
verify=self._verify_ssl_certs
)
return {'stateToken': state_token, 'apiResponse': response.json()}
def _get_headers(self):
return {
'User-Agent': "okta-awscli/%s" % __version__,
'Accept': 'application/json',
'Content-Type': 'application/json'
}
def _choose_factor(self, factors):
""" gets a list of available authentication factors and
asks the user to select the factor they want to use """
print("Multi-factor Authentication required for application.")
# filter the factor list down to just the types specified in preferred_mfa_type
if self._preferred_mfa_type is not None:
factors = list(filter(lambda item: item['factorType'] == self._preferred_mfa_type, factors))
if len(factors) == 1:
factor_name = self._build_factor_name(factors[0])
self.logger.info("%s selected" % factor_name)
selection = 0
else:
print("Pick a factor:")
# print out the factors and let the user select
for i, factor in enumerate(factors):
factor_name = self._build_factor_name(factor)
if factor_name:
print('[ %d ] %s' % (i, factor_name))
selection = input("Selection: ")
# make sure the choice is valid
if int(selection) > len(factors):
self.logger.error("You made an invalid selection")
sys.exit(1)
return factors[int(selection)]
@staticmethod
def _build_factor_name(factor):
""" Build the display name for a MFA factor based on the factor type"""
if factor['factorType'] == 'push':
return "Okta Verify App: " + factor['profile']['deviceType'] + ": " + factor['profile']['name']
elif factor['factorType'] == 'sms':
return factor['factorType'] + ": " + factor['profile']['phoneNumber']
elif factor['factorType'] == 'call':
return factor['factorType'] + ": " + factor['profile']['phoneNumber']
elif factor['factorType'] == 'token:software:totp':
return factor['factorType'] + "( " + factor['provider'] + " ) : " + factor['profile']['credentialId']
elif factor['factorType'] == 'token':
return factor['factorType'] + ": " + factor['profile']['credentialId']
else:
return "Unknown MFA type: " + factor['factorType']
def _login_send_sms(self, state_token, factor):
""" Send SMS message for second factor authentication"""
response = self.session.post(
factor['_links']['verify']['href'],
json={'stateToken': state_token},
headers=self._get_headers(),
verify=self._verify_ssl_certs
)
self.logger.info("A verification code has been sent to " + factor['profile']['phoneNumber'])
response_data = response.json()
if 'stateToken' in response_data:
return {'stateToken': response_data['stateToken'], 'apiResponse': response_data}
if 'sessionToken' in response_data:
return {'stateToken': None, 'sessionToken': response_data['sessionToken'], 'apiResponse': response_data}
def _login_send_call(self, state_token, factor):
""" Send Voice call for second factor authentication"""
response = self.session.post(
factor['_links']['verify']['href'],
json={'stateToken': state_token},
headers=self._get_headers(),
verify=self._verify_ssl_certs
)
self.logger.info("You should soon receive a phone call at " + factor['profile']['phoneNumber'])
response_data = response.json()
if 'stateToken' in response_data:
return {'stateToken': response_data['stateToken'], 'apiResponse': response_data}
if 'sessionToken' in response_data:
return {'stateToken': None, 'sessionToken': response_data['sessionToken'], 'apiResponse': response_data}
def _login_send_push(self, state_token, factor):
""" Send 'push' for the Okta Verify mobile app """
response = self.session.post(
factor['_links']['verify']['href'],
json={'stateToken': state_token},
headers=self._get_headers(),
verify=self._verify_ssl_certs
)
self.logger.info("Okta Verify push sent...")
response_data = response.json()
if 'stateToken' in response_data:
return {'stateToken': response_data['stateToken'], 'apiResponse': response_data}
if 'sessionToken' in response_data:
return {'stateToken': None, 'sessionToken': response_data['sessionToken'], 'apiResponse': response_data}
def _login_multi_factor(self, state_token, login_data):
""" handle multi-factor authentication with Okta"""
factor = self._choose_factor(login_data['_embedded']['factors'])
if factor['factorType'] == 'sms':
return self._login_send_sms(state_token, factor)
elif factor['factorType'] == 'call':
return self._login_send_call(state_token, factor)
elif factor['factorType'] == 'token:software:totp':
return self._login_input_mfa_challenge(state_token, factor['_links']['verify']['href'])
elif factor['factorType'] == 'token':
return self._login_input_mfa_challenge(state_token, factor['_links']['verify']['href'])
elif factor['factorType'] == 'push':
return self._login_send_push(state_token, factor)
def _login_input_mfa_challenge(self, state_token, next_url):
""" Submit verification code for SMS or TOTP authentication methods"""
pass_code = self._mfa_code
if pass_code is None:
pass_code = input("Enter MFA verification code: ")
response = self.session.post(
next_url,
json={'stateToken': state_token, 'passCode': pass_code},
headers=self._get_headers(),
verify=self._verify_ssl_certs
)
response_data = response.json()
if 'status' in response_data and response_data['status'] == 'SUCCESS':
if 'stateToken' in response_data:
return {'stateToken': response_data['stateToken'], 'apiResponse': response_data}
if 'sessionToken' in response_data:
return {'stateToken': None, 'sessionToken': response_data['sessionToken'], 'apiResponse': response_data}
else:
return {'stateToken': None, 'sessionToken': None, 'apiResponse': response_data}
def _check_push_result(self, state_token, login_data):
""" Check Okta API to see if the push request has been responded to"""
time.sleep(1)
response = self.session.post(
login_data['_links']['next']['href'],
json={'stateToken': state_token},
headers=self._get_headers(),
verify=self._verify_ssl_certs
)
response_data = response.json()
if 'stateToken' in response_data:
return {'stateToken': response_data['stateToken'], 'apiResponse': response_data}
if 'sessionToken' in response_data:
return {'stateToken': None, 'sessionToken': response_data['sessionToken'], 'apiResponse': response_data}
``` |
{
"source": "jonjomckay/plugin.audio.bbcsounds",
"score": 2
} |
#### File: jonjomckay/plugin.audio.bbcsounds/addon.py
```python
import datetime
import json
import os
import sys
import urllib
import urlparse
from collections import OrderedDict
from time import mktime
import dateutil.parser
import feedparser
import requests
import xbmc
import xbmcaddon
import xbmcgui
import xbmcplugin
from bs4 import BeautifulSoup
stations = {
'p00fzl68': {'name': 'BBC Asian Network', 'image': 'bbc_asian_network_colour'},
'p00fzl78': {'name': 'BBC Coventry & Warwickshire', 'image': 'bbc_radio_coventry_warwickshire_colour'},
'p00fzl7f': {'name': 'BBC Essex', 'image': 'bbc_radio_essex_colour'},
'p00fzl7q': {'name': 'BBC Hereford & Worcester', 'image': 'bbc_radio_hereford_worcester_colour'},
'p00fzl82': {'name': 'BBC Newcastle', 'image': 'bbc_radio_newcastle_colour'},
'p00fzl86': {'name': 'BBC Radio 1', 'image': 'bbc_radio_one_colour'},
'p00fzl64': {'name': 'BBC Radio 1Xtra', 'image': 'bbc_1xtra_colour'},
'p00fzl8v': {'name': 'BBC Radio 2', 'image': 'bbc_radio_two_colour'},
'p00fzl8t': {'name': 'BBC Radio 3', 'image': 'bbc_radio_three_colour'},
'p00fzl7j': {'name': 'BBC Radio 4 FM', 'image': 'bbc_radio_fourfm_colour'},
'p00fzl7k': {'name': 'BBC Radio 4 LW', 'image': 'bbc_radio_four_colour'},
'p00fzl7l': {'name': 'BBC Radio 4 Extra', 'image': 'bbc_radio_four_extra_colour'},
'p00fzl7g': {'name': 'BBC Radio 5 live', 'image': 'bbc_radio_five_live_colour'},
'p00fzl7h': {'name': 'BBC Radio 5 live sports extra', 'image': 'bbc_radio_five_live_sports_extra_colour'},
'p00fzl65': {'name': 'BBC Radio 6 Music', 'image': 'bbc_6music_colour'},
'p00fzl74': {'name': 'BBC Radio Berkshire', 'image': 'bbc_radio_berkshire_colour'},
'p00fzl75': {'name': 'BBC Radio Bristol', 'image': 'bbc_radio_bristol_colour'},
'p00fzl76': {'name': 'BBC Radio Cambridgeshire', 'image': 'bbc_radio_cambridge_colour'},
'p00fzl77': {'name': 'BBC Radio Cornwall', 'image': 'bbc_radio_cornwall_colour'},
'p00fzl79': {'name': 'BBC Radio Cumbria', 'image': 'bbc_radio_cumbria_colour'},
'p00fzl7b': {'name': 'BBC Radio Cymru', 'image': 'bbc_radio_cymru_colour'},
'p00fzl7c': {'name': 'BBC Radio Derby', 'image': 'bbc_radio_derby_colour'},
'p00fzl7d': {'name': 'BBC Radio Devon', 'image': 'bbc_radio_devon_colour'},
'p00fzl7m': {'name': 'BBC Radio Foyle', 'image': 'bbc_radio_foyle_colour'},
'p00fzl7n': {'name': 'BBC Radio Gloucestershire', 'image': 'bbc_radio_gloucestershire_colour'},
'p00fzl7p': {'name': 'BBC Radio Guernsey', 'image': 'bbc_radio_guernsey_colour'},
'p00fzl7r': {'name': 'BBC Radio Humberside', 'image': 'bbc_radio_humberside_colour'},
'p00fzl7s': {'name': 'BBC Radio Jersey', 'image': 'bbc_radio_jersey_colour'},
'p00fzl7t': {'name': 'BBC Radio Kent', 'image': 'bbc_radio_kent_colour'},
'p00fzl7v': {'name': 'BBC Radio Lancashire', 'image': 'bbc_radio_lancashire_colour'},
'p00fzl7w': {'name': 'BBC Radio Leeds', 'image': 'bbc_radio_leeds_colour'},
'p00fzl7x': {'name': 'BBC Radio Leicester', 'image': 'bbc_radio_leicester_colour'},
'p00fzl7y': {'name': 'BBC Radio Lincolnshire', 'image': 'bbc_radio_lincolnshire_colour'},
'p00fzl6f': {'name': 'BBC Radio London', 'image': 'bbc_london_colour'},
'p00fzl7z': {'name': 'BBC Radio Manchester', 'image': 'bbc_radio_manchester_colour'},
'p00fzl80': {'name': 'BBC Radio Merseyside', 'image': 'bbc_radio_merseyside_colour'},
'p00fzl81': {'name': 'BBC Radio Nan Gaidheal', 'image': 'bbc_radio_nan_gaidheal_colour'},
'p00fzl83': {'name': 'BBC Radio Norfolk', 'image': 'bbc_radio_norfolk_colour'},
'p00fzl84': {'name': 'BBC Radio Northampton', 'image': 'bbc_radio_northampton_colour'},
'p00fzl85': {'name': 'BBC Radio Nottingham', 'image': 'bbc_radio_nottingham_colour'},
'p00fzl8c': {'name': 'BBC Radio Oxford', 'image': 'bbc_radio_oxford_colour'},
'p00fzl8d': {'name': 'BBC Radio Scotland (FM)', 'image': 'bbc_radio_scotland_fm_colour'},
'p00fzl8g': {'name': 'BBC Radio Scotland (MW)', 'image': 'bbc_radio_scotland_colour'},
'p00fzl8b': {'name': 'BBC Radio Scotland (Orkney)', 'image': 'bbc_radio_scotland_colour'},
'p00fzl8j': {'name': 'BBC Radio Scotland (Shetland)', 'image': 'bbc_radio_scotland_colour'},
'p00fzl8h': {'name': 'BBC Radio Sheffield', 'image': 'bbc_radio_sheffield_colour'},
'p00fzl8k': {'name': 'BBC Radio Shropshire', 'image': 'bbc_radio_shropshire_colour'},
'p00fzl8l': {'name': 'BBC Radio Solent', 'image': 'bbc_radio_solent_colour'},
'p00fzl8n': {'name': 'BBC Radio Stoke', 'image': 'bbc_radio_stoke_colour'},
'p00fzl8p': {'name': 'BBC Radio Suffolk', 'image': 'bbc_radio_suffolk_colour'},
'p00fzl8w': {'name': 'BBC Radio Ulster', 'image': 'bbc_radio_ulster_colour'},
'p00fzl8y': {'name': 'BBC Radio Wales (FM)', 'image': 'bbc_radio_wales_fm_colour'},
'p00fzl8x': {'name': 'BBC Radio Wales (LW)', 'image': 'bbc_radio_wales_colour'},
'p00fzl90': {'name': 'BBC Radio York', 'image': 'bbc_radio_york_colour'},
'p00fzl8m': {'name': 'BBC Somerset', 'image': 'bbc_radio_somerset_sound_colour'},
'p00fzl8q': {'name': 'BBC Surrey', 'image': 'bbc_radio_surrey_colour'},
'p00fzl8r': {'name': 'BBC Sussex', 'image': 'bbc_radio_sussex_colour'},
'p00fzl93': {'name': 'BBC Tees', 'image': 'bbc_tees_colour'},
'p00fzl96': {'name': 'BBC Three Counties Radio', 'image': 'bbc_three_counties_radio_colour'},
'p00fzl8z': {'name': 'BBC Wiltshire', 'image': 'bbc_radio_wiltshire_colour'},
'p00fzl9f': {'name': 'BBC WM 95.6', 'image': 'bbc_wm_colour'},
'p02zbmb3': {'name': 'BBC World Service', 'image': 'bbc_world_service_colour'},
'p02jf21y': {'name': 'CBeebies Radio', 'image': 'cbeebies_radio_colour'},
}
stations_ordered = OrderedDict(sorted(stations.items(), key=lambda x: x[1]['name']))
def get_page(url):
# download the source HTML for the page using requests
# and parse the page using BeautifulSoup
return BeautifulSoup(requests.get(url).text, 'html.parser')
__addon__ = xbmcaddon.Addon()
__addonname__ = __addon__.getAddonInfo('name')
# Parse the stuff passed into the addon
base_url = sys.argv[0]
addon_handle = int(sys.argv[1])
args = dict(urlparse.parse_qsl(sys.argv[2][1:]))
xbmcplugin.setContent(addon_handle, 'audio')
def build_url(query):
return base_url + '?' + urllib.urlencode(query)
def mode_default():
categories = {
'podcasts': 'Podcasts',
'stations': 'Stations'
}
for mode, category in categories.items():
url = build_url({'mode': mode})
li = xbmcgui.ListItem(category)
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url, listitem=li, isFolder=True)
xbmcplugin.endOfDirectory(addon_handle)
def mode_episode(pid):
programme = requests.get('https://www.bbc.co.uk/programmes/' + pid + '.json')
programme_json = programme.json()["programme"]
picked_url = None
for version in programme_json["versions"]:
playlist = requests.get(
'https://open.live.bbc.co.uk/mediaselector/6/select/version/2.0/mediaset/iptv-all/vpid/' + version["pid"] + '/format/json')
playlist_json = playlist.json()
if "media" not in playlist_json:
# TODO
continue
# Filter by only audio items, and order with the highest bitrate first
audio_items = [item for item in playlist_json['media'] if item['kind'] == 'audio']
audio_items.sort(key=lambda x: x['bitrate'], reverse=True)
xbmc.log('Found {0} audio items for the programme version {1}'.format(len(audio_items), version['pid']), level=xbmc.LOGNOTICE)
# Pick the first stream available for the highest bitrate item
picked_stream = audio_items[0]
picked_url = picked_stream["connection"][1]["href"]
xbmc.log('Picked the {0} stream with the bitrate {1}'.format(picked_stream['encoding'], picked_stream['bitrate']), level=xbmc.LOGNOTICE)
play_item = xbmcgui.ListItem(path=picked_url)
play_item.setArt({
'thumb': 'https://ichef.bbci.co.uk/images/ic/480xn/' + programme_json["image"]["pid"] + '.jpg',
'icon': 'https://ichef.bbci.co.uk/images/ic/480xn/' + programme_json["image"]["pid"] + '.jpg'
})
play_item.setInfo('music', {
'title': programme_json["display_title"]["title"],
'artist': programme_json["display_title"]["subtitle"],
'album': programme_json["ownership"]["service"]["title"],
'comment': programme_json["short_synopsis"]
})
xbmcplugin.setResolvedUrl(addon_handle, True, listitem=play_item)
if picked_url is None:
xbmcgui.Dialog().notification(__addonname__, "Episode not available to stream", icon=xbmcgui.NOTIFICATION_ERROR)
def mode_podcasts():
podcasts = requests.get('https://www.bbc.co.uk/podcasts.json')
podcasts_json = podcasts.json()["podcasts"]
# Sort the podcasts by title
podcasts_ordered = sorted(podcasts_json, key=lambda x: x["title"])
for podcast in podcasts_ordered:
url = build_url({'mode': 'podcast', 'pid': podcast["shortTitle"]})
li = xbmcgui.ListItem(podcast["title"])
li.setInfo('video', {'plot': podcast["description"]})
if "imageUrl" in podcast:
li.setThumbnailImage(podcast["imageUrl"].replace('{recipe}', '624x624'))
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url, listitem=li, isFolder=True)
xbmcplugin.endOfDirectory(addon_handle)
def mode_podcast(pid):
podcast = feedparser.parse('https://podcasts.files.bbci.co.uk/' + pid + '.rss')
image_url = None
if "image" in podcast.feed:
image_url = podcast.feed.image.url
for entry in podcast.entries:
entry_pid = entry.ppg_canonical.split('/')
entry_date = datetime.datetime.fromtimestamp(mktime(entry.published_parsed)).strftime('%Y-%m-%d')
entry_title = entry_date + ": " + entry.title
if len(entry_pid) > 2:
url = build_url({'mode': 'episode', 'pid': entry_pid[2]})
li = xbmcgui.ListItem(entry_title)
li.setInfo('video', {'plot': entry.description})
li.setThumbnailImage(image_url)
li.setProperty('IsPlayable', 'true')
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url, listitem=li)
else:
xbmc.log('No pid could be found for the item at ' + entry.link, level=xbmc.LOGERROR)
xbmcplugin.endOfDirectory(addon_handle)
def mode_stations():
for pid, station in stations_ordered.items():
url = build_url({'mode': 'station', 'pid': pid})
li = xbmcgui.ListItem(station['name'])
li.setThumbnailImage(xbmc.translatePath(os.path.join(__addon__.getAddonInfo('path'), 'resources', station['image'] + '.png')))
xbmcplugin.addDirectoryItem(handle=addon_handle, url=url, listitem=li, isFolder=True)
xbmcplugin.endOfDirectory(addon_handle)
def mode_station(pid):
base = datetime.datetime.today()
# Create a range of the last 30 days
for delta in range(30):
date = base - datetime.timedelta(days=delta)
year = '%04d' % date.year
month = '%02d' % date.month
day = '%02d' % date.day
url = build_url({'mode': 'station_date', 'pid': pid, 'year': year, 'month': month, 'day': day})
list_item = xbmcgui.ListItem(date.strftime('%Y-%m-%d (%A)'))
xbmcplugin.addDirectoryItem(addon_handle, url, list_item, isFolder=True)
xbmcplugin.endOfDirectory(addon_handle)
def mode_station_date(pid, year, month, day):
# Load the schedules for the station
schedule = get_page('https://www.bbc.co.uk/schedules/' + pid + '/' + year + '/' + month + '/' + day)
result = None
for tag in schedule.find_all('script', type='application/ld+json'):
if 'RadioEpisode' in tag.contents[0]:
result = json.loads(tag.contents[0])
if result is None:
xbmcgui.Dialog().notification(__addonname__, "Something went wrong parsing the station's schedule",
icon=xbmcgui.NOTIFICATION_ERROR)
return
for episode in result["@graph"]:
date = dateutil.parser.parse(episode["publication"]["startDate"])
time = date.strftime('%Y-%m-%d, %H:%M')
if "partOfSeries" in episode:
title = time + ": " + episode["partOfSeries"]["name"] + " - " + episode["name"]
else:
title = time + ": " + episode["name"]
url = build_url({'mode': 'episode', 'pid': episode["identifier"]})
list_item = xbmcgui.ListItem(title)
list_item.setInfo('video', {'plot': episode["description"]})
list_item.setPath(url)
list_item.setProperty('IsPlayable', "true")
list_item.setThumbnailImage(episode["image"])
xbmcplugin.addDirectoryItem(addon_handle, url, list_item, isFolder=False)
xbmcplugin.endOfDirectory(addon_handle)
mode = args.get('mode', None)
if mode is None:
mode_default()
elif mode == 'episode':
mode_episode(args['pid'])
elif mode == 'podcasts':
mode_podcasts()
elif mode == 'podcast':
mode_podcast(args['pid'])
elif mode == 'stations':
mode_stations()
elif mode == 'station':
mode_station(args['pid'])
elif mode == 'station_date':
mode_station_date(args['pid'], args['year'], args['month'], args['day'])
``` |
{
"source": "jonjon33/sandbox",
"score": 3
} |
#### File: 2010-africa/a-StoreCredit/storeCredit.py
```python
ifile = 'input.txt' # simple input
ofile = 'output.txt' # simple output
#ifile = 'A-large-practice.in' # official input
#ofile = 'A-large-practice.out' # official output
caselist = [] # list containing cases
#
# Problem State (Case) Class
#
class CredCase(object):
def __init__(self,credit,itemCount,items): # Initialize:
self.credit = int(credit) # credit amount
self.itemCount = int(itemCount) # item count
self.items = list(map(int,items.split())) # item values list
self.cost = -1 # total cost
self.solution = [] # output list
def trySolution(self,indices):
cost = self.items[indices[0]] + self.items[indices[1]]
if (cost <= self.credit) and (cost > self.cost):
self.cost = cost
self.solution = [x+1 for x in indices]
#
# Read Input File
#
with open(ifile) as f:
cases = int(f.readline())
for n in range(0,cases):
case = CredCase(f.readline(),f.readline(),f.readline())
caselist.append(case)
#
# Conduct Algorithm
#
for n in range(0,cases):
case = caselist[n]
for i in range(0,case.itemCount):
for j in range( (i+1) , case.itemCount ):
case.trySolution( [i,j] )
if case.credit == case.cost:
break
if case.credit == case.cost:
break
#
# Write Output File
#
with open(ofile,'w') as f:
for n in range(0,cases):
case = caselist[n]
casestr = 'Case #'+str(n+1)+': '
casestr = casestr+str(case.solution[0])+' '+str(case.solution[1])+'\n'
checkstr = 'Check: Credit='+str(case.credit)
checkstr += ' Cost='+str(case.cost)
checkstr += ' Item'+str(case.solution[0])+'='
checkstr += str(case.items[case.solution[0]-1])
checkstr += ' Item'+str(case.solution[1])+'='
checkstr += str(case.items[case.solution[1]-1])+'\n'
f.write(casestr)
#f.write(checkstr)
```
#### File: dsbook/1.13-class/frac.py
```python
class Fraction(object):
'''pure fraction implementation'''
def __init__(self, top, bot):
self.num = top
self.den = bot
def __str__(self):
return str(self.num)+'/'+str(self.den)
def __add__(self, other):
newnum = self.num * other.den + other.num * self.den
newden = self.den * other.den
comden = gcd(newnum, newden)
return Fraction(newnum//comden, newden//comden)
def __sub__(self, other):
newnum = self.num * other.den - other.num * self.den
newden = self.den * other.den
comden = gcd(newnum, newden)
return Fraction(newnum//comden, newden//comden)
def __mul__(self, other):
newnum = self.num * other.num
newden = self.den * other.den
comden = gcd(newnum, newden)
return Fraction(newnum//comden, newden//comden)
def __div__(self, other):
newnum = self.num * other.den
newden = self.den * other.num
comden = gcd(newnum, newden)
return Fraction(newnum//comden, newden//comden)
def __eq__(self, other):
val1 = self.num * other.den
val2 = self.den * other.num
return val1 == val2
def __lt__(self, other):
val1 = self.num * other.den
val2 = self.den * other.num
return val1 < val2
def __gt__(self, other):
val1 = self.num * other.den
val2 = self.den * other.num
return val1 > val2
def gcd(den1, den2):
'''find greatest common denominator'''
while den1 % den2 != 0:
prevden1 = den1
prevden2 = den2
den1 = prevden2
den2 = prevden1%prevden2
return den2
def main():
'''main area'''
frac1 = Fraction(1, 2)
frac2 = Fraction(1, 4)
frac3 = frac1 + frac2
frac4 = frac1 - frac2
frac5 = frac2 - frac3
frac6 = frac1 * frac2
frac7 = frac2 / frac1
print frac1
print frac2
print frac3
print frac4
print frac5
print frac6
print frac7
print frac1 == frac7
print frac1 > frac2
print frac2 < frac3
print frac1 == frac2
print frac2 > frac3
print frac1 < frac2
main()
```
#### File: dsbook/1.13-class/logicgates.py
```python
class LogicGate(object):
'''general gate definition'''
def __init__(self, name):
self.label = name
self.output = None
def getlabel(self):
'''get gate label'''
return self.label
def getoutput(self):
'''get gate logical output'''
self.output = self.dogatelogic()
return self.output
class BinaryGate(LogicGate):
'''gate with two input pins'''
def __init__(self, name):
LogicGate.__init__(self, name)
self.pin1 = None
self.pin2 = None
def getpin1(self):
'''get value on pin1'''
return self.pin1
def getpin2(self):
'''get value on pin2'''
return self.pin2
class ANDGate(BinaryGate):
'''ANDing logical gate'''
def __init__(self, name):
BinaryGate.__init__(self, name)
def dogatelogic(self):
'''AND gate logic implementation'''
if self.pin1 == 1 and self.pin2 == 1:
return 1
else:
return 0
def main():
'''main area'''
gate1 = ANDGate("G1")
print gate1.getoutput()
main()
``` |
{
"source": "JonJonHuang/Hoops",
"score": 3
} |
#### File: JonJonHuang/Hoops/edge_detection.py
```python
import numpy as np
import argparse
import imutils
import cv2
from matplotlib import pyplot as plt
# remove extraneous lines return tuple of (horizontal line, vertical line)
def trim_lines(lines, avg_vert, avg_horiz):
# y0 (perp line from origin) - x0 * slope
# slope = float(y2 - y1) / float(x2 - x1)
vert_lines = []
horiz_lines = []
# separate into horizontal and vertical lines
for line in lines:
for rho, theta in line:
a = np.cos(theta)
b = np.sin(theta)
x0 = a*rho
y0 = b*rho
x1 = int(x0 + 1000*(-b))
y1 = int(y0 + 1000*(a))
x2 = int(x0 - 1000*(-b))
y2 = int(y0 - 1000*(a))
slope = float(y2-y1)/float(x2-x1)
if abs(slope) < .1:
horiz_lines.append(line)
elif abs(slope) > .5:
vert_lines.append(line)
# grab the most extreme vertical line and highest horizontal line
horiz_lines.sort(key=lambda line: get_y_intercept(line))
horiz = horiz_lines[0] if len(horiz_lines) > 0 else avg_horiz
vert = vert_lines[0] if len(vert_lines) > 0 else avg_vert
return horiz, vert
def get_y_intercept(line):
for rho, theta in line:
a = np.cos(theta)
b = np.sin(theta)
x0 = a*rho
y0 = b*rho
x1 = int(x0 + 1000*(-b))
y1 = int(y0 + 1000*(a))
x2 = int(x0 - 1000*(-b))
y2 = int(y0 - 1000*(a))
cv2.line(img,(x1,y1),(x2,y2),(0,0,255),1)
if (get_y_intercept(horiz) > 1.07 * get_y_intercept(avg_horiz) or get_y_intercept(horiz) <.93 * get_y_intercept(avg_horiz)):
horiz = avg_horiz
if __name__ == "__main__":
img, minSlope, minB, maxSlope, maxB = getCourtLineInfo('assets/images/still_cropped.png')
for x in range(0, img.shape[1]-1):
for y in range(0, img.shape[0]-1):
if y < getY(minSlope, minB, x) or y < getY(maxSlope, maxB, x):
img[y][x][0] = 0
img[y][x][1] = 0
img[y][x][2] = 0
out.write(img)
j += 1
else:
break
cap.release()
out.release()
cv2.destroyAllWindows()
```
#### File: JonJonHuang/Hoops/tracking_people_video.py
```python
from __future__ import print_function
from imutils.object_detection import non_max_suppression
from imutils import paths
import numpy as np
import argparse
import imutils
import cv2
from matplotlib import pyplot as plt
def filterLargeBoxes(boxArr):
arr = boxArr[:]
for i in range(len(arr)-1, -1, -1):
x1, y1, w1, h1 = arr[i]
for j in range(len(arr)-1, -1, -1):
x2, y2, w2, h2 = arr[j]
if (x2 < x1 and y2 < y1 and x2+w2 > x1+w1 and y2+h2 > y1+h1):
arr = np.delete(arr, j, 0)
return arr
def getY(line, x):
if line is not None:
rho, theta = line[0]
slope = np.tan(theta-np.pi/2)
x0 = np.cos(theta)*rho
y0 = np.sin(theta)*rho
b = y0 - x0 * slope
return slope*x + b
return 0
# remove extraneous lines return tuple of (horizontal line, vertical line)
def trim_lines(lines, avg_vert, avg_horiz):
# y0 (perp line from origin) - x0 * slope
# slope = float(y2 - y1) / float(x2 - x1)
vert_lines = []
horiz_lines = []
# separate into horizontal and vertical lines
for line in lines:
for rho, theta in line:
a = np.cos(theta)
b = np.sin(theta)
x0 = a*rho
y0 = b*rho
x1 = int(x0 + 1000*(-b))
y1 = int(y0 + 1000*(a))
x2 = int(x0 - 1000*(-b))
y2 = int(y0 - 1000*(a))
slope = float(y2-y1)/float(x2-x1)
if abs(slope) < .1:
horiz_lines.append(line)
elif abs(slope) > .5:
vert_lines.append(line)
# grab the most extreme vertical line and highest horizontal line
horiz_lines.sort(key=lambda line: get_y_intercept(line))
horiz = horiz_lines[0] if len(horiz_lines) > 0 else avg_horiz
vert = vert_lines[0] if len(vert_lines) > 0 else avg_vert
return horiz, vert
def get_y_intercept(line):
for rho, theta in line:
a = np.cos(theta)
b = np.sin(theta)
x0 = a*rho
y0 = b*rho
x1 = int(x0 + 1000*(-b))
y1 = int(y0 + 1000*(a))
x2 = int(x0 - 1000*(-b))
y2 = int(y0 - 1000*(a))
slope = float(y2-y1)/float(x2-x1)
intercept = y1 - x1 * slope
return intercept
cap = cv2.VideoCapture('assets/video/trimmed_example.mp4')
i = 0
j = 0
fourcc = cv2.VideoWriter_fourcc(*'MJPG')
trim_upper_factor = .9
trim_lower_factor = .25
out = cv2.VideoWriter('output.avi',fourcc, 30, (960,int(540 * trim_upper_factor - 540 * trim_lower_factor)))
avg_horiz = None
avg_vert = None
while cap.isOpened():
i += 1
ret, img = cap.read()
if ret:
# find the middle of the image
if img is None:
print('Img is none')
img = imutils.resize(img, width=min(960, img.shape[1]))
y_upper = int(img.shape[0] * .9)
y_lower = int(img.shape[0] * .25)
img = img[y_lower : y_upper, 0 : img.shape[1]]
#img = cv2.flip(img, 1)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
blurred = cv2.GaussianBlur(gray, (3, 3), 255)
edges = cv2.Canny(blurred, 0, 150)
lines = cv2.HoughLines(edges, 1, np.pi/180, 235)
if lines is not None:
# for line in lines:
# for rho, theta in line:
# a = np.cos(theta)
# b = np.sin(theta)
# x0 = a*abs(rho)
# y0 = b*abs(rho)
# x1 = int(x0 + 1000*(-b))
# y1 = int(y0 + 1000*(a))
# x2 = int(x0 - 1000*(-b))
# y2 = int(y0 - 1000*(a))
# slope = (y2-y1)/(x2-x1)
# cv2.line(img,(x1,y1),(x2,y2),(0,0,255),1)
horiz, vert = trim_lines(lines, None, None)
if avg_horiz is not None and horiz is None:
horiz = avg_horiz
if horiz is not None:
if avg_horiz is None or (get_y_intercept(horiz) < 1.07 * get_y_intercept(avg_horiz) and get_y_intercept(horiz) > .93 * get_y_intercept(avg_horiz)):
avg_horiz = horiz
if (get_y_intercept(horiz) > 1.07 * get_y_intercept(avg_horiz) or get_y_intercept(horiz) <.93 * get_y_intercept(avg_horiz)):
horiz = avg_horiz
for rho, theta in horiz:
a = np.cos(theta)
b = np.sin(theta)
x0 = a*rho
y0 = b*rho
x1 = int(x0 + 1000*(-b))
y1 = int(y0 + 1000*(a))
x2 = int(x0 - 1000*(-b))
y2 = int(y0 - 1000*(a))
slope = (y2-y1)/(x2-x1)
cv2.line(img,(x1,y1),(x2,y2),(0,255,0),1)
if avg_vert is not None and vert is None:
vert = avg_vert
if vert is not None:
avg_vert = vert
for rho, theta in vert:
a = np.cos(theta)
b = np.sin(theta)
x0 = a*rho
y0 = b*rho
x1 = int(x0 + 1000*(-b))
y1 = int(y0 + 1000*(a))
x2 = int(x0 - 1000*(-b))
y2 = int(y0 - 1000*(a))
slope = (y2-y1)/(x2-x1)
cv2.line(img,(x1,y1),(x2,y2),(0,255,0),1)
hog = cv2.HOGDescriptor()
hog.setSVMDetector(cv2.HOGDescriptor_getDefaultPeopleDetector())
img = imutils.resize(img, width=min(960, img.shape[1]))
orig = img.copy()
(rects, weights) = hog.detectMultiScale(img, winStride=(4,4), padding=(8,8), scale=1.05)
rects = filterLargeBoxes(rects)
rects = np.array([[x,y,x+w,y+h] for (x,y,w,h) in rects])
pick = non_max_suppression(rects, probs=None, overlapThresh=0.65)
for (xA,yA,xB,yB) in pick:
if (yB > getY(avg_horiz, xA) and yB > getY(avg_horiz, xB)) and (yB > getY(avg_vert, xA) and yB > getY(avg_vert, xB)):
cv2.rectangle(img, (xA, yA), (xB, yB), (0, 255, 0), 2)
out.write(img)
j += 1
else:
break
cap.release()
out.release()
cv2.destroyAllWindows()
``` |
{
"source": "jonjonw/particles",
"score": 3
} |
#### File: book/resampling/compare_tv_distance_resampling.py
```python
from __future__ import division, print_function
from matplotlib import pyplot as plt
import numpy as np
import seaborn as sb
from scipy import stats
from particles import resampling as rs
N = 10**4 # number of samples
ntrials = 100
taus = np.linspace(0., 10, 500)
rs_schemes = ['multinomial', 'residual', 'stratified', 'systematic']
bias = 1.
def tv_distance(x, y):
""" TV distance between two discrete distributions.
x, y: the weights
"""
return 0.5 * sum(abs(x - y))
results = {key: np.zeros((ntrials, len(taus))) for key in rs_schemes}
for i in range(ntrials):
x = stats.norm.rvs(size=N)
for j, tau in enumerate(taus):
lw = -.5 * tau * (bias - x)**2
W = rs.exp_and_normalise(lw)
for scheme in rs_schemes:
A = rs.resampling(scheme, W)
counts = np.bincount(A, minlength=N)
# counts start at 0
results[scheme][i, j] = tv_distance(W, counts / N)
# PLOTS
# =====
savefigs = False
plt.style.use('ggplot')
sb.set_palette(sb.dark_palette("lightgray", n_colors=4, reverse=True))
# Actual figure
plt.figure()
for k, scheme in enumerate(rs_schemes):
plt.plot(taus, np.mean(results[scheme], axis=0), label=scheme,
linewidth=3)
plt.legend()
plt.xlabel('tau')
plt.ylabel('TV distance')
if savefigs:
plt.savefig('resampling_comparison.pdf')
# 80% confidence intervals (not very interesting, as variance is very small
plt.figure()
col = {'multinomial': 'red', 'residual': 'green', 'stratified': 'yellow',
'systematic': 'black'}
for k, scheme in enumerate(rs_schemes):
plt.fill_between(taus, np.percentile(results[scheme], 0.90, axis=0),
np.percentile(results[scheme], 0.10, axis=0),
facecolor=col[scheme])
plt.show()
```
#### File: book/smoothing/online_smoothing.py
```python
from __future__ import division, print_function
from matplotlib import pyplot as plt
import numpy as np
import particles
from particles import distributions as dists
from particles import state_space_models
def psit(t, xp, x):
""" score of the model (gradient of log-likelihood at theta=theta_0)
"""
if t == 0:
return -0.5 / sigma0**2 + \
(0.5 * (1. - phi0**2) / sigma0**4) * (x - mu0)**2
else:
return -0.5 / sigma0**2 + (0.5 / sigma0**4) * \
((x - mu0) - phi0 * (xp - mu0))**2
class DiscreteCox_with_addf(state_space_models.DiscreteCox):
""" A discrete Cox model:
Y_t ~ Poisson(e^{X_t})
X_t - mu = phi(X_{t-1}-mu)+U_t, U_t ~ N(0,1)
X_0 ~ N(mu,sigma^2/(1-phi**2))
"""
def upper_bound_log_pt(self, t):
return -0.5 * log(2 * np.pi) - log(self.sigma)
def add_func(self, t, xp, x):
return psit(t, xp, x)
# set up models, simulate data
nruns = 25 # how many runs for each algorithm
T = 10**4 # sample size
mu0 = 0. # true parameters
phi0 = 0.9
sigma0 = .5
ssm = DiscreteCox_with_addf(mu=mu0, phi=phi0, sigma=sigma0)
true_states, data = ssm.simulate(T)
fkmod = state_space_models.Bootstrap(ssm=ssm, data=data)
# plot data
plt.figure()
plt.plot(data)
plt.title('data')
methods = ['ON2', 'naive']
attr_names = {k: k + '_online_smooth' for k in methods}
long_names = {'ON2': r'$O(N^2)$ forward-only',
'naive': r'naive, $O(N)$ forward-only'}
runs = {}
avg_cpu = {}
Ns = {'ON2': 100, 'naive': 10**4} # for naive N is rescaled later
for method in methods:
N = Ns[method]
if method == 'naive':
# rescale N to match CPU time
pf = particles.SMC(fk=fkmod, N=N, naive_online_smooth=True)
pf.run()
Ns['naive'] = int(N * avg_cpu['ON2'] / pf.cpu_time)
print('rescaling N to %i to match CPU time' % Ns['naive'])
long_names[method] += r', N=%i' % Ns[method]
print(long_names[method])
outf = lambda pf: {'result': getattr(pf.summaries, attr_names[method]),
'cpu': pf.cpu_time}
args_smc = {'fk': fkmod, 'nruns': nruns, 'nprocs': 0, 'N': N,
attr_names[method]: True, 'out_func': outf}
runs[method] = particles.multiSMC(**args_smc)
avg_cpu[method] = np.mean([r['cpu'] for r in runs[method]])
print('average cpu time (across %i runs): %f' %(nruns, avg_cpu[method]))
# Plots
# =====
savefigs = False # toggle this to save the plots as PDFs
plt.style.use('ggplot')
colors = {'ON2':'gray', 'naive':'black'}
# IQR (inter-quartile ranges) as a function of time: Figure 11.3
plt.figure()
estimates = {method: np.array([r['result'] for r in results])
for method, results in runs.items()}
plt.xlabel(r'$t$')
plt.ylabel('IQR (smoothing estimate)')
plt.yscale('log')
plt.xscale('log')
for method in methods:
est = estimates[method]
delta = np.percentile(est, 75., axis=0) - np.percentile(est, 25., axis=0)
plt.plot(np.arange(T), delta, colors[method], label=long_names[method])
plt.legend(loc=4)
if savefigs:
plt.savefig('online_iqr_vs_t_logscale.pdf')
# actual estimates
plt.figure()
mint, maxt = 0, T
miny = np.min([est[:, mint:maxt].min() for est in estimates.values()])
maxy = np.max([est[:, mint:maxt].max() for est in estimates.values()])
inflat = 1.1
ax = [mint, maxt, maxy - inflat * (maxy - miny), miny + inflat * (maxy - miny)]
for i, method in enumerate(methods):
plt.subplot(1, len(methods), i + 1)
plt.axis(ax)
plt.xlabel(r'$t$')
plt.ylabel('smoothing estimate')
plt.title(long_names[method])
est = estimates[method]
for j in range(nruns):
plt.plot(est[j, :])
if savefigs:
plt.savefig('online_est_vs_t.pdf')
plt.show()
``` |
{
"source": "jonjozwiak/ansible-optools",
"score": 2
} |
#### File: plugins/ceph/base.py
```python
import collectd
import datetime
import traceback
class Base(object):
def __init__(self):
self.verbose = False
self.debug = False
self.prefix = ''
self.cluster = 'ceph'
self.testpool = 'test'
self.interval = 60.0
def config_callback(self, conf):
"""Takes a collectd conf object and fills in the local config."""
for node in conf.children:
if node.key == "Verbose":
if node.values[0] in ['True', 'true']:
self.verbose = True
elif node.key == "Debug":
if node.values[0] in ['True', 'true']:
self.debug = True
elif node.key == "Prefix":
self.prefix = node.values[0]
elif node.key == 'Cluster':
self.cluster = node.values[0]
elif node.key == 'TestPool':
self.testpool = node.values[0]
elif node.key == 'Interval':
self.interval = float(node.values[0])
else:
collectd.warning("%s: unknown config key: %s" % (self.prefix, node.key))
def dispatch(self, stats):
"""
Dispatches the given stats.
stats should be something like:
{'plugin': {'plugin_instance': {'type': {'type_instance': <value>, ...}}}}
"""
if not stats:
collectd.error("%s: failed to retrieve stats" % self.prefix)
return
self.logdebug("dispatching %d new stats :: %s" % (len(stats), stats))
try:
for plugin in stats.keys():
for plugin_instance in stats[plugin].keys():
for type in stats[plugin][plugin_instance].keys():
type_value = stats[plugin][plugin_instance][type]
if not isinstance(type_value, dict):
self.dispatch_value(plugin, plugin_instance, type, None, type_value)
else:
for type_instance in stats[plugin][plugin_instance][type].keys():
self.dispatch_value(plugin, plugin_instance,
type, type_instance,
stats[plugin][plugin_instance][type][type_instance])
except Exception as exc:
collectd.error("%s: failed to dispatch values :: %s :: %s"
% (self.prefix, exc, traceback.format_exc()))
def dispatch_value(self, plugin, plugin_instance, type, type_instance, value):
"""Looks for the given stat in stats, and dispatches it"""
self.logdebug("dispatching value %s.%s.%s.%s=%s"
% (plugin, plugin_instance, type, type_instance, value))
val = collectd.Values(type='gauge')
val.plugin=plugin
val.plugin_instance=plugin_instance
if type_instance is not None:
val.type_instance="%s-%s" % (type, type_instance)
else:
val.type_instance=type
val.values=[value]
val.interval = self.interval
val.dispatch()
self.logdebug("sent metric %s.%s.%s.%s.%s"
% (plugin, plugin_instance, type, type_instance, value))
def read_callback(self):
try:
start = datetime.datetime.now()
stats = self.get_stats()
self.logverbose("collectd new data from service :: took %d seconds"
% (datetime.datetime.now() - start).seconds)
except Exception as exc:
collectd.error("%s: failed to get stats :: %s :: %s"
% (self.prefix, exc, traceback.format_exc()))
self.dispatch(stats)
def get_stats(self):
collectd.error('Not implemented, should be subclassed')
def logverbose(self, msg):
if self.verbose:
collectd.info("%s: %s" % (self.prefix, msg))
def logdebug(self, msg):
if self.debug:
collectd.info("%s: %s" % (self.prefix, msg))
```
#### File: plugins/ceph/ceph_latency_plugin.py
```python
import collectd
import traceback
import subprocess
import base
class CephLatencyPlugin(base.Base):
def __init__(self):
base.Base.__init__(self)
self.prefix = 'ceph'
def get_stats(self):
"""Retrieves stats regarding latency to write to a test pool"""
ceph_cluster = "%s-%s" % (self.prefix, self.cluster)
data = { ceph_cluster: {} }
output = None
try:
output = subprocess.check_output(
"timeout 30s rados --cluster "+ self.cluster +" -p " + self.testpool + " bench 10 write -t 1 -b 65536 2>/dev/null | grep -i latency | awk '{print 1000*$3}'", shell=True)
except Exception as exc:
collectd.error("ceph-latency: failed to run rados bench :: %s :: %s"
% (exc, traceback.format_exc()))
return
if output is None:
collectd.error('ceph-latency: failed to run rados bench :: output was None')
results = output.split('\n')
# push values
data[ceph_cluster]['cluster'] = {}
data[ceph_cluster]['cluster']['avg_latency'] = results[0]
data[ceph_cluster]['cluster']['stddev_latency'] = results[1]
data[ceph_cluster]['cluster']['max_latency'] = results[2]
data[ceph_cluster]['cluster']['min_latency'] = results[3]
return data
try:
plugin = CephLatencyPlugin()
except Exception as exc:
collectd.error("ceph-latency: failed to initialize ceph latency plugin :: %s :: %s"
% (exc, traceback.format_exc()))
def configure_callback(conf):
"""Received configuration information"""
plugin.config_callback(conf)
def read_callback():
"""Callback triggerred by collectd on read"""
plugin.read_callback()
collectd.register_config(configure_callback)
collectd.register_read(read_callback, plugin.interval)
```
#### File: plugins/ceph/ceph_pg_plugin.py
```python
import collectd
import json
import traceback
import subprocess
import base
class CephPGPlugin(base.Base):
def __init__(self):
base.Base.__init__(self)
self.prefix = 'ceph'
def get_stats(self):
"""Retrieves stats from ceph pgs"""
ceph_cluster = "%s-%s" % (self.prefix, self.cluster)
data = { ceph_cluster: { 'pg': { } } }
output = None
try:
cephpg_cmdline='ceph pg dump --format json --cluster '+ self.cluster
output = subprocess.check_output(cephpg_cmdline, shell=True)
except Exception as exc:
collectd.error("ceph-pg: failed to ceph pg dump :: %s :: %s"
% (exc, traceback.format_exc()))
return
if output is None:
collectd.error('ceph-pg: failed to ceph osd dump :: output was None')
json_data = json.loads(output)
pg_data = data[ceph_cluster]['pg']
# number of pgs in each possible state
for pg in json_data['pg_stats']:
for state in pg['state'].split('+'):
if not pg_data.has_key(state):
pg_data[state] = 0
pg_data[state] += 1
# osd perf data
for osd in json_data['osd_stats']:
osd_id = "osd-%s" % osd['osd']
data[ceph_cluster][osd_id] = {}
data[ceph_cluster][osd_id]['kb_used'] = osd['kb_used']
data[ceph_cluster][osd_id]['kb_total'] = osd['kb']
data[ceph_cluster][osd_id]['snap_trim_queue_len'] = osd['snap_trim_queue_len']
data[ceph_cluster][osd_id]['num_snap_trimming'] = osd['num_snap_trimming']
data[ceph_cluster][osd_id]['apply_latency_ms'] = osd['fs_perf_stat']['apply_latency_ms']
data[ceph_cluster][osd_id]['commit_latency_ms'] = osd['fs_perf_stat']['commit_latency_ms']
return data
try:
plugin = CephPGPlugin()
except Exception as exc:
collectd.error("ceph-pg: failed to initialize ceph pg plugin :: %s :: %s"
% (exc, traceback.format_exc()))
def configure_callback(conf):
"""Received configuration information"""
plugin.config_callback(conf)
def read_callback():
"""Callback triggerred by collectd on read"""
plugin.read_callback()
collectd.register_config(configure_callback)
collectd.register_read(read_callback, plugin.interval)
``` |
{
"source": "jonjozwiak/openstack",
"score": 2
} |
#### File: director-examples/sahara-osp8/utils.py
```python
from __future__ import print_function
import base64
import hashlib
import json
import logging
import os
import passlib.utils as passutils
import six
import struct
import subprocess
import time
from heatclient.common import event_utils
from heatclient.exc import HTTPNotFound
from openstackclient.i18n import _
from tripleoclient import exceptions
_MIN_PASSWORD_SIZE = 25
_PASSWORD_NAMES = (
"OVERCLOUD_ADMIN_PASSWORD",
"OVERCLOUD_ADMIN_TOKEN",
"OVERCLOUD_CEILOMETER_PASSWORD",
"OVERCLOUD_CEILOMETER_SECRET",
"OVERCLOUD_CINDER_PASSWORD",
"OVERCLOUD_DEMO_PASSWORD",
"OVERCLOUD_GLANCE_PASSWORD",
"OVERCLOUD_HAPROXY_STATS_PASSWORD",
"OVERCLOUD_HEAT_PASSWORD",
"OVERCLOUD_HEAT_STACK_DOMAIN_PASSWORD",
"OVERCLOUD_NEUTRON_PASSWORD",
"OVERCLOUD_NOVA_PASSWORD",
"OVERCLOUD_RABBITMQ_PASSWORD",
"OVERCLOUD_REDIS_PASSWORD",
"OVERCLOUD_SAHARA_PASSWORD",
"OVERCLOUD_SWIFT_HASH",
"OVERCLOUD_SWIFT_PASSWORD",
"NEUTRON_METADATA_PROXY_SHARED_SECRET",
)
def generate_overcloud_passwords(output_file="tripleo-overcloud-passwords"):
"""Create the passwords needed for the overcloud
This will create the set of passwords required by the overcloud, store
them in the output file path and return a dictionary of passwords. If the
file already exists the existing passwords will be returned instead,
"""
passwords = {}
if os.path.isfile(output_file):
with open(output_file) as f:
passwords = dict(line.split('=') for line in f.read().splitlines())
for name in _PASSWORD_NAMES:
if not passwords.get(name):
passwords[name] = passutils.generate_password(
size=_MIN_PASSWORD_SIZE)
with open(output_file, 'w') as f:
for name, password in passwords.items():
f.write("{0}={1}\n".format(name, password))
return passwords
def check_hypervisor_stats(compute_client, nodes=1, memory=0, vcpu=0):
"""Check the Hypervisor stats meet a minimum value
Check the hypervisor stats match the required counts. This is an
implementation of a command in TripleO with the same name.
:param compute_client: Instance of Nova client
:type compute_client: novaclient.client.v2.Client
:param nodes: The number of nodes to wait for, defaults to 1.
:type nodes: int
:param memory: The amount of memory to wait for in MB, defaults to 0.
:type memory: int
:param vcpu: The number of vcpus to wait for, defaults to 0.
:type vcpu: int
"""
statistics = compute_client.hypervisors.statistics().to_dict()
if all([statistics['count'] >= nodes,
statistics['memory_mb'] >= memory,
statistics['vcpus'] >= vcpu]):
return statistics
else:
return None
def wait_for_stack_ready(orchestration_client, stack_name, marker=None,
action='CREATE', verbose=False):
"""Check the status of an orchestration stack
Get the status of an orchestration stack and check whether it is complete
or failed.
:param orchestration_client: Instance of Orchestration client
:type orchestration_client: heatclient.v1.client.Client
:param stack_name: Name or UUID of stack to retrieve
:type stack_name: string
:param marker: UUID of the last stack event before the current action
:type marker: string
:param action: Current action to check the stack for COMPLETE
:type action: string
:param verbose: Whether to print events
:type verbose: boolean
"""
stack = get_stack(orchestration_client, stack_name)
if not stack:
return False
stack_name = stack.stack_name
while True:
events = event_utils.get_events(orchestration_client,
stack_id=stack_name, nested_depth=2,
event_args={'sort_dir': 'asc',
'marker': marker})
if len(events) >= 1:
# set marker to last event that was received.
marker = getattr(events[-1], 'id', None)
if verbose:
events_log = event_log_formatter(events)
print(events_log)
stack = get_stack(orchestration_client, stack_name)
stack_status = stack.stack_status
if stack_status == '%s_COMPLETE' % action:
print("Stack %(name)s %(status)s" % dict(
name=stack_name, status=stack_status))
return True
elif stack_status == '%s_FAILED' % action:
print("Stack %(name)s %(status)s" % dict(
name=stack_name, status=stack_status))
return False
time.sleep(5)
def event_log_formatter(events):
"""Return the events in log format."""
event_log = []
log_format = ("%(event_time)s "
"[%(rsrc_name)s]: %(rsrc_status)s %(rsrc_status_reason)s")
for event in events:
event_time = getattr(event, 'event_time', '')
log = log_format % {
'event_time': event_time.replace('T', ' '),
'rsrc_name': getattr(event, 'resource_name', ''),
'rsrc_status': getattr(event, 'resource_status', ''),
'rsrc_status_reason': getattr(event, 'resource_status_reason', '')
}
event_log.append(log)
return "\n".join(event_log)
def nodes_in_states(baremetal_client, states):
"""List the introspectable nodes with the right provision_states."""
nodes = baremetal_client.node.list(maintenance=False, associated=False)
return [node for node in nodes if node.provision_state in states]
def wait_for_provision_state(baremetal_client, node_uuid, provision_state,
loops=10, sleep=1):
"""Wait for a given Provisioning state in Ironic
Updating the provisioning state is an async operation, we
need to wait for it to be completed.
:param baremetal_client: Instance of Ironic client
:type baremetal_client: ironicclient.v1.client.Client
:param node_uuid: The Ironic node UUID
:type node_uuid: str
:param provision_state: The provisioning state name to wait for
:type provision_state: str
:param loops: How many times to loop
:type loops: int
:param sleep: How long to sleep between loops
:type sleep: int
:raises exceptions.StateTransitionFailed: if node.last_error is set
"""
for _l in range(0, loops):
node = baremetal_client.node.get(node_uuid)
if node is None:
# The node can't be found in ironic, so we don't need to wait for
# the provision state
return
if node.provision_state == provision_state:
return
# node.last_error should be None after any successful operation
if node.last_error:
raise exceptions.StateTransitionFailed(
"Error transitioning node %(uuid)s to provision state "
"%(state)s: %(error)s. Now in state %(actual)s." % {
'uuid': node_uuid,
'state': provision_state,
'error': node.last_error,
'actual': node.provision_state
}
)
time.sleep(sleep)
raise exceptions.Timeout(
"Node %(uuid)s did not reach provision state %(state)s. "
"Now in state %(actual)s." % {
'uuid': node_uuid,
'state': provision_state,
'actual': node.provision_state
}
)
def wait_for_node_introspection(inspector_client, auth_token, inspector_url,
node_uuids, loops=220, sleep=10):
"""Check the status of Node introspection in Ironic inspector
Gets the status and waits for them to complete.
:param inspector_client: Ironic inspector client
:type inspector_client: ironic_inspector_client
:param node_uuids: List of Node UUID's to wait for introspection
:type node_uuids: [string, ]
:param loops: How many times to loop
:type loops: int
:param sleep: How long to sleep between loops
:type sleep: int
"""
log = logging.getLogger(__name__ + ".wait_for_node_introspection")
node_uuids = node_uuids[:]
for _l in range(0, loops):
for node_uuid in node_uuids:
status = inspector_client.get_status(
node_uuid,
base_url=inspector_url,
auth_token=auth_token)
if status['finished']:
log.debug("Introspection finished for node {0} "
"(Error: {1})".format(node_uuid, status['error']))
node_uuids.remove(node_uuid)
yield node_uuid, status
if not len(node_uuids):
raise StopIteration
time.sleep(sleep)
if len(node_uuids):
log.error("Introspection didn't finish for nodes {0}".format(
','.join(node_uuids)))
def create_environment_file(path="~/overcloud-env.json",
control_scale=1, compute_scale=1,
ceph_storage_scale=0, block_storage_scale=0,
swift_storage_scale=0):
"""Create a heat environment file
Create the heat environment file with the scale parameters.
:param control_scale: Scale value for control roles.
:type control_scale: int
:param compute_scale: Scale value for compute roles.
:type compute_scale: int
:param ceph_storage_scale: Scale value for ceph storage roles.
:type ceph_storage_scale: int
:param block_storage_scale: Scale value for block storage roles.
:type block_storage_scale: int
:param swift_storage_scale: Scale value for swift storage roles.
:type swift_storage_scale: int
"""
env_path = os.path.expanduser(path)
with open(env_path, 'w+') as f:
f.write(json.dumps({
"parameter_defaults": {
"ControllerCount": control_scale,
"ComputeCount": compute_scale,
"CephStorageCount": ceph_storage_scale,
"BlockStorageCount": block_storage_scale,
"ObjectStorageCount": swift_storage_scale}
}))
return env_path
def set_nodes_state(baremetal_client, nodes, transition, target_state,
skipped_states=()):
"""Make all nodes available in the baremetal service for a deployment
For each node, make it available unless it is already available or active.
Available nodes can be used for a deployment and an active node is already
in use.
:param baremetal_client: Instance of Ironic client
:type baremetal_client: ironicclient.v1.client.Client
:param nodes: List of Baremetal Nodes
:type nodes: [ironicclient.v1.node.Node]
:param transition: The state to set for a node. The full list of states
can be found in ironic.common.states.
:type transition: string
:param target_state: The expected result state for a node. For example when
transitioning to 'manage' the result is 'manageable'
:type target_state: string
:param skipped_states: A set of states to skip, for example 'active' nodes
are already deployed and the state can't always be
changed.
:type skipped_states: iterable of strings
:param error_states: Node states treated as error for this transition
:type error_states: collection of strings
:param error_message: Optional message to append to an error message
:param error_message: str
:raises exceptions.StateTransitionFailed: if a node enters any of the
states in error_states
:raises exceptions.Timeout: if a node takes too long to reach target state
"""
log = logging.getLogger(__name__ + ".set_nodes_state")
for node in nodes:
if node.provision_state in skipped_states:
continue
log.debug(
"Setting provision state from {0} to '{1} for Node {2}"
.format(node.provision_state, transition, node.uuid))
baremetal_client.node.set_provision_state(node.uuid, transition)
try:
wait_for_provision_state(baremetal_client, node.uuid, target_state)
except exceptions.StateTransitionFailed as e:
log.error("FAIL: State transition failed for Node {0}. {1}"
.format(node.uuid, e))
except exceptions.Timeout as e:
log.error("FAIL: Timeout waiting for Node {0}. {1}"
.format(node.uuid, e))
yield node.uuid
def get_hiera_key(key_name):
"""Retrieve a key from the hiera store
:param password_name: Name of the key to retrieve
:type password_name: type
"""
command = ["hiera", key_name]
p = subprocess.Popen(command, stdout=subprocess.PIPE)
out, err = p.communicate()
return out
def get_config_value(section, option):
p = six.moves.configparser.ConfigParser()
p.read(os.path.expanduser("~/undercloud-passwords.conf"))
return p.get(section, option)
def get_overcloud_endpoint(stack):
for output in stack.to_dict().get('outputs', {}):
if output['output_key'] == 'KeystoneURL':
return output['output_value']
def get_service_ips(stack):
service_ips = {}
for output in stack.to_dict().get('outputs', {}):
service_ips[output['output_key']] = output['output_value']
return service_ips
__password_cache = None
def get_password(pass_name):
"""Retrieve a password by name, such as 'OVERCLOUD_ADMIN_PASSWORD'.
Raises KeyError if password does not exist.
"""
global __password_cache
if __password_cache is None:
__password_cache = generate_overcloud_passwords()
return __password_cache[pass_name]
def get_stack(orchestration_client, stack_name):
"""Get the ID for the current deployed overcloud stack if it exists.
Caller is responsible for checking if return is None
"""
try:
stack = orchestration_client.stacks.get(stack_name)
return stack
except HTTPNotFound:
pass
def remove_known_hosts(overcloud_ip):
"""For a given IP address remove SSH keys from the known_hosts file"""
known_hosts = os.path.expanduser("~/.ssh/known_hosts")
if os.path.exists(known_hosts):
command = ['ssh-keygen', '-R', overcloud_ip, '-f', known_hosts]
subprocess.check_call(command)
def create_cephx_key():
# NOTE(gfidente): Taken from
# https://github.com/ceph/ceph-deploy/blob/master/ceph_deploy/new.py#L21
key = os.urandom(16)
header = struct.pack("<hiih", 1, int(time.time()), 0, len(key))
return base64.b64encode(header + key)
def run_shell(cmd):
return subprocess.call([cmd], shell=True)
def all_unique(x):
"""Return True if the collection has no duplications."""
return len(set(x)) == len(x)
def file_checksum(filepath):
"""Calculate md5 checksum on file
:param filepath: Full path to file (e.g. /home/stack/image.qcow2)
:type filepath: string
"""
if not os.path.isfile(filepath):
raise ValueError("The given file {0} is not a regular "
"file".format(filepath))
checksum = hashlib.md5()
with open(filepath, 'rb') as f:
while True:
fragment = f.read(65536)
if not fragment:
break
checksum.update(fragment)
return checksum.hexdigest()
def check_nodes_count(baremetal_client, stack, parameters, defaults):
"""Check if there are enough available nodes for creating/scaling stack"""
count = 0
if stack:
for param in defaults:
try:
current = int(stack.parameters[param])
except KeyError:
raise ValueError(
"Parameter '%s' was not found in existing stack" % param)
count += parameters.get(param, current)
else:
for param, default in defaults.items():
count += parameters.get(param, default)
# We get number of nodes usable for the stack by getting already
# used (associated) nodes and number of nodes which can be used
# (not in maintenance mode).
# Assumption is that associated nodes are part of the stack (only
# one overcloud is supported).
associated = len(baremetal_client.node.list(associated=True))
available = len(baremetal_client.node.list(associated=False,
maintenance=False))
ironic_nodes_count = associated + available
if count > ironic_nodes_count:
raise exceptions.DeploymentError(
"Not enough nodes - available: {0}, requested: {1}".format(
ironic_nodes_count, count))
else:
return True
def ensure_run_as_normal_user():
"""Check if the command runs under normal user (EUID!=0)"""
if os.geteuid() == 0:
raise exceptions.RootUserExecution(
'This command cannot run under root user.'
' Switch to a normal user.')
def capabilities_to_dict(caps):
"""Convert the Node's capabilities into a dictionary."""
if not caps:
return {}
return dict([key.split(':', 1) for key in caps.split(',')])
def dict_to_capabilities(caps_dict):
"""Convert a dictionary into a string with the capabilities syntax."""
return ','.join(["%s:%s" % (key, value)
for key, value in caps_dict.items()
if value is not None])
def node_get_capabilities(node):
"""Get node capabilities."""
return capabilities_to_dict(node.properties.get('capabilities'))
def node_add_capabilities(bm_client, node, **updated):
"""Add or replace capabilities for a node."""
caps = node_get_capabilities(node)
caps.update(updated)
converted_caps = dict_to_capabilities(caps)
node.properties['capabilities'] = converted_caps
bm_client.node.update(node.uuid, [{'op': 'add',
'path': '/properties/capabilities',
'value': converted_caps}])
return caps
def assign_and_verify_profiles(bm_client, flavors,
assign_profiles=False, dry_run=False):
"""Assign and verify profiles for given flavors.
:param bm_client: ironic client instance
:param flavors: map flavor name -> (flavor object, required count)
:param assign_profiles: whether to allow assigning profiles to nodes
:param dry_run: whether to skip applying actual changes (only makes sense
if assign_profiles is True)
:returns: tuple (errors count, warnings count)
"""
log = logging.getLogger(__name__ + ".assign_and_verify_profiles")
predeploy_errors = 0
predeploy_warnings = 0
# nodes available for deployment and scaling (including active)
bm_nodes = {node.uuid: node
for node in bm_client.node.list(maintenance=False,
detail=True)
if node.provision_state in ('available', 'active')}
# create a pool of unprocessed nodes and record their capabilities
free_node_caps = {uu: node_get_capabilities(node)
for uu, node in bm_nodes.items()}
# TODO(dtantsur): use command-line arguments to specify the order in
# which profiles are processed (might matter for assigning profiles)
profile_flavor_used = False
for flavor_name, (flavor, scale) in flavors.items():
if not scale:
log.debug("Skipping verification of flavor %s because "
"none will be deployed", flavor_name)
continue
profile = flavor.get_keys().get('capabilities:profile')
# If there's only a single flavor, then it's expected for it to have
# no profile assigned.
if not profile and len(flavors) > 1:
predeploy_errors += 1
log.error(
'Error: The %s flavor has no profile associated', flavor_name)
log.error(
'Recommendation: assign a profile with openstack flavor '
'set --property "capabilities:profile"="PROFILE_NAME" %s',
flavor_name)
continue
profile_flavor_used = True
# first collect nodes with known profiles
assigned_nodes = [uu for uu, caps in free_node_caps.items()
if caps.get('profile') == profile]
required_count = scale - len(assigned_nodes)
if required_count < 0:
log.warning('%d nodes with profile %s won\'t be used '
'for deployment now', -required_count, profile)
predeploy_warnings += 1
required_count = 0
elif required_count > 0 and assign_profiles:
# find more nodes by checking XXX_profile capabilities that are
# set by ironic-inspector or manually
capability = '%s_profile' % profile
more_nodes = [
uu for uu, caps in free_node_caps.items()
# use only nodes without a know profile
if not caps.get('profile') and
caps.get(capability, '').lower() in ('1', 'true') and
# do not assign profiles for active nodes
bm_nodes[uu].provision_state == 'available'
][:required_count]
assigned_nodes.extend(more_nodes)
required_count -= len(more_nodes)
for uu in assigned_nodes:
# make sure these nodes are not reused for other profiles
node_caps = free_node_caps.pop(uu)
# save profile for newly assigned nodes, but only if we
# succeeded in finding enough of them
if not required_count and not node_caps.get('profile'):
node = bm_nodes[uu]
if not dry_run:
node_add_capabilities(bm_client, node, profile=profile)
log.info('Node %s was assigned profile %s', uu, profile)
else:
log.debug('Node %s has profile %s', uu, profile)
if required_count > 0:
log.error(
"Error: only %s of %s requested ironic nodes are tagged "
"to profile %s (for flavor %s)",
scale - required_count, scale, profile, flavor_name
)
log.error(
"Recommendation: tag more nodes using ironic node-update "
"<NODE ID> replace properties/capabilities=profile:%s,"
"boot_option:local", profile)
predeploy_errors += 1
nodes_without_profile = [uu for uu, caps in free_node_caps.items()
if not caps.get('profile')]
if nodes_without_profile and profile_flavor_used:
predeploy_warnings += 1
log.warning(
"There are %d ironic nodes with no profile that will "
"not be used: %s", len(nodes_without_profile),
', '.join(nodes_without_profile)
)
return predeploy_errors, predeploy_warnings
def add_deployment_plan_arguments(parser):
"""Add deployment plan arguments (flavors and scales) to a parser"""
parser.add_argument('--control-scale', type=int,
help=_('New number of control nodes.'))
parser.add_argument('--compute-scale', type=int,
help=_('New number of compute nodes.'))
parser.add_argument('--ceph-storage-scale', type=int,
help=_('New number of ceph storage nodes.'))
parser.add_argument('--block-storage-scale', type=int,
help=_('New number of cinder storage nodes.'))
parser.add_argument('--swift-storage-scale', type=int,
help=_('New number of swift storage nodes.'))
parser.add_argument('--control-flavor',
help=_("Nova flavor to use for control nodes."))
parser.add_argument('--compute-flavor',
help=_("Nova flavor to use for compute nodes."))
parser.add_argument('--ceph-storage-flavor',
help=_("Nova flavor to use for ceph storage "
"nodes."))
parser.add_argument('--block-storage-flavor',
help=_("Nova flavor to use for cinder storage "
"nodes."))
parser.add_argument('--swift-storage-flavor',
help=_("Nova flavor to use for swift storage "
"nodes."))
def get_roles_info(parsed_args):
"""Get flavor name and scale for all deployment roles.
:returns: dict role name -> (flavor name, scale)
"""
return {
'control': (parsed_args.control_flavor, parsed_args.control_scale),
'compute': (parsed_args.compute_flavor, parsed_args.compute_scale),
'ceph-storage': (parsed_args.ceph_storage_flavor,
parsed_args.ceph_storage_scale),
'block-storage': (parsed_args.block_storage_flavor,
parsed_args.block_storage_scale),
'swift-storage': (parsed_args.swift_storage_flavor,
parsed_args.swift_storage_scale)
}
```
#### File: openstack/scripts/check_orphans.py
```python
import os
import sys
import prettytable
import keystoneclient.v2_0.client as ksclient
import neutronclient.v2_0.client as neutronclient
import novaclient.client as novaclient
import glanceclient.v2.client as glanceclient
import heatclient.exc as hc_exc
import heatclient.client as heatclient
import cinderclient.v2.client as cinderclient
import prettytable
def usage():
print "listorphans.py <object> where object is one or more of",
print "'all'"
print "Neutron: 'networks', 'routers', 'subnets', 'floatingips', 'security_groups', or 'ports'"
print "Nova: 'servers', 'keypairs'"
print "Glance: 'images'"
print "Heat: 'stacks'"
print "Cinder: 'volumes', 'snapshots'"
def get_credentials():
d = {}
d['username'] = os.environ['OS_USERNAME']
d['password'] = os.environ['OS_PASSWORD']
d['auth_url'] = os.environ['OS_AUTH_URL']
d['tenant_name'] = os.environ['OS_TENANT_NAME']
if 'OS_REGION_NAME' in os.environ:
d['region_name'] = os.environ['OS_REGION_NAME']
return d
def get_nova_credentials():
d = {}
d['username'] = os.environ['OS_USERNAME']
d['api_key'] = os.environ['OS_PASSWORD']
d['auth_url'] = os.environ['OS_AUTH_URL']
d['project_id'] = os.environ['OS_TENANT_NAME']
if 'OS_REGION_NAME' in os.environ:
d['region_name'] = os.environ['OS_REGION_NAME']
return d
credentials = get_credentials()
novacredentials = get_nova_credentials()
keystone = ksclient.Client(**credentials)
cinder = cinderclient.Client(**novacredentials)
neutron = neutronclient.Client(**credentials)
nova = novaclient.Client('2', **novacredentials)
glance_endpoint = keystone.service_catalog.url_for(service_type='image',endpoint_type='publicURL')
glance = glanceclient.Client(glance_endpoint,token=keystone.auth_token)
heat_endpoint = keystone.service_catalog.url_for(service_type='orchestration',endpoint_type='publicURL')
heat = heatclient.Client('1', endpoint=heat_endpoint, token=keystone.auth_token)
def get_tenantids():
return [tenant.id for tenant in keystone.tenants.list()]
def get_userids():
return [user.id for user in keystone.users.list()]
def get_orphaned_neutron_objects(object):
objects = getattr(neutron, 'list_' + object)()
tenantids = get_tenantids()
orphans = []
names_to_skip = ['HA network tenant', 'HA subnet tenant', 'HA port tenant']
device_owner_to_skip = ['network:floatingip', 'network:router_gateway', 'network:router_ha_interface']
for object in objects.get(object):
if object['tenant_id'] not in tenantids:
skip=False
for skipname in names_to_skip:
if skipname in object['name']:
skip=True
for key, value in object.items():
if key == 'device_owner':
for skipname in device_owner_to_skip:
if skipname in object['device_owner']:
skip=True
if not skip:
orphans.append([object['id'], object['name']])
return orphans
def get_orphaned_floatingips(object):
objects = getattr(neutron, 'list_' + object)()
tenantids = get_tenantids()
orphans = []
for object in objects.get(object):
if object['tenant_id'] not in tenantids:
orphans.append([object['id'], object['fixed_ip_address'], object['floating_ip_address']])
return orphans
#def get_orphaned_nova_objects(object):
def get_orphaned_nova_instances():
objects = nova.servers.list(search_opts={'all_tenants': 1})
tenantids = get_tenantids()
orphans = []
for object in objects:
if object.tenant_id not in tenantids:
orphans.append([object.id, object.name])
return orphans
def get_orphaned_keypairs():
objects = nova.keypairs.list()
userids = get_userids()
orphans = []
for object in objects:
kp = nova.keypairs.get(object)
#print kp.id, kp.name, kp.user_id
if kp.user_id not in userids:
orphans.append([kp.id, kp.user_id])
return orphans
def get_orphaned_images():
objects = glance.images.list()
tenantids = get_tenantids()
orphans = []
for object in objects:
if object.owner not in tenantids:
orphans.append([object.id, object.name])
return orphans
def get_orphaned_stacks():
# Note special policy is needed to allow listing global stacks
# in /etc/heat/policy.json: "stacks:global_index": "rule:deny_everybody",
# Needs to be: "stacks:global_index": "rule:context_is_admin",
### http://www.gossamer-threads.com/lists/openstack/dev/46973
kwargs = {'global_tenant': True}
#try:
# objects = heat.stacks.list(**kwargs)
#except Exception, e:
# raise e
# print "/etc/heat/policy.json must have the following line to allow global heat listing by admin"
# print 'stacks:global_index": "rule:context_is_admin",'
#except exc.HTTPForbidden:
# print "bla"
# raise 'bla'
#except:
# print 'stupid'
# raise
objects = heat.stacks.list(**kwargs)
tenantids = get_tenantids()
orphans = []
try:
for object in objects:
if object.project not in tenantids:
orphans.append([object.id, object.stack_name])
return orphans
except hc_exc.HTTPForbidden:
print "***** Listing heat stacks for all users is forbidden!!! *****"
print "Check /etc/heat/policy.json. The stacks:global_index should read:"
print ' "stacks:global_index": "rule:context_is_admin",'
print "\n"
print "Alternatively, get the data direct from the database with:"
print "mysql -e 'SELECT id, name FROM stack WHERE deleted_at IS NULL AND tenant NOT IN (SELECT id FROM keystone.project);' heat"
print "*************************************************************"
return orphans
def get_orphaned_cinder_objects(object):
if object == "volumes":
objects = cinder.volumes.list(search_opts={'all_tenants': 1})
elif object == "snapshots":
objects = cinder.volume_snapshots.list(search_opts={'all_tenants': 1})
elif object == "backups":
objects = cinder.backups.list(search_opts={'all_tenants': 1})
tenantids = get_tenantids()
orphans = []
for obj in objects:
if object == "volumes":
tenant_id = getattr(obj, 'os-vol-tenant-attr:tenant_id')
elif object == "snapshots":
tenant_id = obj.project_id
elif object == "backups":
#print dir(obj)
#print obj
tenant_id = obj.project_id
if tenant_id not in tenantids:
orphans.append([obj.id, obj.name])
return orphans
def print_result(objs, objtype, fields):
print len(objs), 'orphaned', objtype
if len(objs) != 0:
pt = prettytable.PrettyTable([f for f in fields], caching=False)
pt.align = 'l'
for obj in objs:
pt.add_row(obj)
print(pt.get_string())
print '\n'
else:
print '\n'
if __name__ == '__main__':
if len(sys.argv) > 1:
if sys.argv[1] == 'all':
objects = [ 'networks', 'routers', 'subnets', 'floatingips', 'security_groups', 'ports', 'keypairs', 'servers', 'images', 'stacks', "volumes", "snapshots" ]
else:
objects = sys.argv[1:]
for object in objects:
if object in [ 'networks', 'routers', 'subnets', 'security_groups', 'ports' ]:
orphans = get_orphaned_neutron_objects(object)
fields = ['ID', 'Name']
elif object == 'floatingips':
orphans = get_orphaned_floatingips(object)
fields = ['ID', 'Fixed IP Address', 'Floating IP Address']
elif object in [ 'volumes', 'snapshots' ]:
orphans = get_orphaned_cinder_objects(object)
fields = ['ID', 'Name']
elif object == 'servers':
orphans = get_orphaned_nova_instances()
fields = ['ID', 'Name']
elif object == 'keypairs':
print "NOTE: Check for Orphaned Keypairs does not work"
print "Do this instead: mysql -e 'SELECT name, user_id FROM key_pairs WHERE deleted_at IS NULL AND user_id NOT IN (SELECT id FROM keystone.user);' nova"
print ""
orphans = get_orphaned_keypairs()
fields = ['ID', 'User ID ']
elif object == 'images':
orphans = get_orphaned_images()
fields = ['ID', 'Name']
elif object == 'stacks':
orphans = get_orphaned_stacks()
fields = ['ID', 'Name']
else:
print 'object type (', object, ') not recognized'
sys.exit()
print_result(orphans, object, fields)
else:
usage()
sys.exit(1)
``` |
{
"source": "Jonjump/sdm",
"score": 3
} |
#### File: sdm/domain/donationType.py
```python
from enum import Enum, unique
@unique
class DonationType(Enum):
ONEOFF = 1
MONTHLY = 2
ANNUAL = 3
@classmethod
def fromString(cls, s):
s = s.strip().upper()
if s.find("MONTH") > -1:
return cls.MONTHLY
if s.find("YEAR") > -1:
return cls.ANNUAL
return cls[s]
```
#### File: sdm/domain/Types.py
```python
Amount = float
BenefitName = str
Email = str
Donor = Email
PaymentId = str
def isnotemptyinstance(value, type):
if not isinstance(value, type):
return False # None returns false
if isinstance(value, str):
return (len(value.strip()) != 0)
elif isinstance(value, int):
return (value != 0)
elif isinstance(value, float):
return (value != 0.0)
else:
raise NotImplementedError
```
#### File: sdm/infrastructure/transactionimporter.py
```python
import sys
from datetime import datetime
from abc import ABC, abstractmethod
from typing import Dict
from dataclasses import dataclass
from email.utils import parseaddr
from domain import PaymentProvider, Donation, DonationType, Money
import csv
def isEmail(s):
return '@' in parseaddr(s)[1]
class FilteredRow(Exception):
pass
@dataclass(frozen=True)
class CsvConfig():
delimiter: str
doublequote: bool
escapechar: bool
lineterminator: str
quotechar: str
quoting: int
skipinitialspace: bool
class TransactionImporter(ABC):
CSVCONFIG: CsvConfig
def __init__(self, csvfile):
"""
open file handle with newline='
"""
self._reader = csv.DictReader(
csvfile,
delimiter=self.CSVCONFIG.delimiter,
doublequote=self.CSVCONFIG.doublequote,
escapechar=self.CSVCONFIG.escapechar,
lineterminator=self.CSVCONFIG.lineterminator,
quotechar=self.CSVCONFIG.quotechar,
quoting=self.CSVCONFIG.quoting,
skipinitialspace=self.CSVCONFIG.skipinitialspace,
)
self.badRows = []
def __iter__(self):
return self
def __next__(self):
while True:
row = self._reader.__next__()
try:
return self.parser(self._filter(row))
except StopIteration as e:
raise e
except Exception:
self.badRows.append((self._reader.line_num, sys.exc_info()[0], row))
@abstractmethod
def parser(self, row):
pass
@abstractmethod
def _filter(self, row):
return row
class GocardlessTransactionImporter (TransactionImporter):
SOURCE = PaymentProvider.GOCARDLESS
HEADER = 'id,created_at,charge_date,amount,description,currency,status,amount_refunded,reference,transaction_fee,payout_date,app_fee,links.mandate,links.creditor,links.customer,links.payout,links.subscription,customers.id,customers.created_at,customers.email,customers.given_name,customers.family_name,customers.company_name,customers.address_line1,customers.address_line2,customers.address_line3,customers.city,customers.region,customers.postal_code,customers.country_code,customers.language,customers.swedish_identity_number,customers.active_mandates\n' # noqa: E501
CSVCONFIG = CsvConfig(
delimiter=',',
doublequote=True,
escapechar=None,
lineterminator='\r\n',
quotechar='"',
quoting=csv.QUOTE_MINIMAL,
skipinitialspace=False,
)
def parser(self, row):
return Donation(
source=self.SOURCE,
paymentId=row["id"],
donor=row["customers.email"],
paymentDate=datetime.strptime(row["charge_date"], '%d/%m/%Y').date(),
type=DonationType.MONTHLY,
money=Money.fromString(row["currency"]+row["amount"])
)
def _filter(self, row):
if float(row["amount"]) <= 0:
raise FilteredRow('gross le 0')
return row
class PaypalTransactionImporter (TransactionImporter):
SOURCE = PaymentProvider.PAYPAL
HEADER = '"Date","Time","Time zone","Name","Type","Status","Currency","Gross","Fee","Net","From Email Address","To Email Address","Transaction ID","Counterparty Status","Shipping Address","Address Status","Option 1 Name","Reference Txn ID","Quantity","Receipt ID","Country","Contact Phone Number","Subject","Balance Impact","Buyer Wallet"\n' # noqa: E501
CSVCONFIG = CsvConfig(
delimiter=',',
doublequote=True,
escapechar=None,
lineterminator='\r\n',
quotechar='"',
quoting=csv.QUOTE_MINIMAL,
skipinitialspace=False,
)
DONATIONTYPES: Dict[str, DonationType] = {
"General Payment": DonationType.ONEOFF,
"Donation Payment": DonationType.ONEOFF,
"Subscription Payment": DonationType.MONTHLY,
}
def parser(self, row) -> Donation:
return Donation(
source=self.SOURCE,
paymentId=row["Transaction ID"],
donor=row["From Email Address"],
paymentDate=datetime.strptime(row["Date"], '%d/%m/%Y').date(),
type=self.DONATIONTYPES[row["Type"]],
money=Money.fromString(row["Currency"]+row["Gross"])
)
def _filter(self, row):
if float(row["Gross"]) <= 0:
raise FilteredRow('gross le 0')
if row["Status"] != 'Completed':
raise FilteredRow('not completed status')
return row
class StripeTransactionImporter (TransactionImporter):
SOURCE = PaymentProvider.STRIPE
HEADER = "id,Description,Seller Message,Created (UTC),Amount,Amount Refunded,Currency,Converted Amount,Converted Amount Refunded,Fee,Tax,Converted Currency,Mode,Status,Statement Descriptor,Customer ID,Customer Description,Customer Email,Captured,Card ID,Card Last4,Card Brand,Card Funding,Card Exp Month,Card Exp Year,Card Name,Card Address Line1,Card Address Line2,Card Address City,Card Address State,Card Address Country,Card Address Zip,Card Issue Country,Card Fingerprint,Card CVC Status,Card AVS Zip Status,Card AVS Line1 Status,Card Tokenization Method,Disputed Amount,Dispute Status,Dispute Reason,Dispute Date (UTC),Dispute Evidence Due (UTC),Invoice ID,Payment Source Type,Destination,Transfer,Interchange Costs,Merchant Service Charge,Transfer Group,PaymentIntent ID\n"
CSVCONFIG = CsvConfig(
delimiter=',',
doublequote=True,
escapechar=None,
lineterminator='\r\n',
quotechar='"',
quoting=csv.QUOTE_MINIMAL,
skipinitialspace=False,
)
def __init__(self, csvfile):
super().__init__(csvfile)
def _getDonationType(self, row):
if row["Description"][:19].lower().strip() == "payment for invoice":
return DonationType.MONTHLY
return DonationType.ONEOFF
def _filter(self, row):
if float(row["Amount"]) <= 0:
raise FilteredRow('gross le 0')
if row["Status"].lower().strip() != "paid":
raise FilteredRow('not paid')
return row
def _getEmail(self, row):
if isEmail(row["Customer Email"]):
return row["Customer Email"]
if isEmail(row["Card Name"]):
return row["Card Name"]
raise Exception("No Email found")
def parser(self, row) -> Donation:
return Donation(
source=self.SOURCE,
paymentId=row["id"],
donor=self._getEmail(row),
paymentDate=datetime.strptime(row["Created (UTC)"], '%d/%m/%Y %H:%M').date(),
type=self._getDonationType(row),
money=Money.fromString(row["Currency"].upper()+row["Amount"])
)
``` |
{
"source": "jonkb/phys.mtrl",
"score": 3
} |
#### File: phys.mtrl/src/toolbars.py
```python
import tkinter as tk
import math
from member import *
from region import *
from support import Support
num_e_wid = 8
#Toolbar to add a new member
class Add_mem:
def __init__(self, main_frm):
self.tb_frm = tk.Frame(main_frm)
self.tb_frm.config(highlightcolor="grey", highlightbackground="grey", highlightthickness=1)
self.tb_frm.pack(side=tk.TOP, fill=tk.X)
self.tb_frm.grid_rowconfigure(1,weight=1)
#First label
tb_lbl = tk.Label(self.tb_frm, text="Add New\nMember")
tb_lbl.grid(row=0, column=0, rowspan=2)
#Choose material label
matl_lbl = tk.Label(self.tb_frm, text="Material:")
matl_lbl.grid(row=0, column=1)
#Choose material pulldown
self.matl = tk.StringVar(self.tb_frm)
self.matl.set(Materials.materials[0])
matl_option = tk.OptionMenu(self.tb_frm, self.matl, *Materials.materials)
matl_option.config(width=12)
matl_option.grid(row=1, column=1)
#Choose xsection label
xsec_lbl = tk.Label(self.tb_frm, text="Cross Section Type:")
xsec_lbl.grid(row=0, column=2)
#Choose xsection pulldown
self.xsec = tk.StringVar(self.tb_frm)
self.xsec.set(Region.regions[0])
self.xsec.trace("w", self.update_xparam)
xsec_option = tk.OptionMenu(self.tb_frm, self.xsec, *Region.regions)
xsec_option.config(width=12)
xsec_option.grid(row=1, column=2)
#Frame that adjusts itself to the chosen xsection to have the needed parameters
self.xparam_frm = tk.Frame(self.tb_frm)
self.xparam_frm.config(borderwidth=2, relief=tk.SUNKEN)
self.xparam_frm.grid(row=0, column=3, rowspan=2, sticky=tk.N+tk.S)
self.xparam_frm.grid_rowconfigure(1,weight=1)
self.xparam_entries = []
self.update_xparam()
#Length label
L_lbl = tk.Label(self.tb_frm, text="Length (m):")
L_lbl.grid(row=0, column=4)
#Length entry
self.L_entry = tk.Entry(self.tb_frm)
self.L_entry.config(width=num_e_wid)
self.L_entry.grid(row=1, column=4)
#Angle label
th_lbl = tk.Label(self.tb_frm, text="Angle (\u00B0):")
th_lbl.grid(row=0, column=5)
#Angle entry
self.th_entry = tk.Entry(self.tb_frm)
self.th_entry.config(width=num_e_wid)
self.th_entry.grid(row=1, column=5)
self.th_entry.insert(0, "0")
#Button to add the new member
self.add_btn = tk.Button(self.tb_frm, text="Add")
#self.add_btn.config(command=self.toggle_add)
self.add_btn.grid(row=0, column=6, padx=2, pady=2, ipadx=8, rowspan=2, sticky=tk.N+tk.S)
def update_xparam(self, *args):
region = self.xsec.get()
self.xparam_entries.clear()
for widget in self.xparam_frm.winfo_children():
widget.destroy()
if(region == "circle"):
rad_lbl = tk.Label(self.xparam_frm, text="Radius (mm):")
rad_lbl.grid(row=0, column=0)
rad_entry = tk.Entry(self.xparam_frm, width=num_e_wid)
rad_entry.grid(row=1, column=0)
self.xparam_entries.append(rad_entry)
if(region == "rectangle"):
b_lbl = tk.Label(self.xparam_frm, text="Base (mm):")
b_lbl.grid(row=0, column=0)
b_entry = tk.Entry(self.xparam_frm, width=num_e_wid)
b_entry.grid(row=1, column=0)
self.xparam_entries.append(b_entry)
h_lbl = tk.Label(self.xparam_frm, text="Height (mm):")
h_lbl.grid(row=0, column=1)
h_entry = tk.Entry(self.xparam_frm, width=num_e_wid)
h_entry.grid(row=1, column=1)
self.xparam_entries.append(h_entry)
if(region == "I-beam"):
d_lbl = tk.Label(self.xparam_frm, text="Depth (mm):")
d_lbl.grid(row=0, column=0)
d_entry = tk.Entry(self.xparam_frm, width=num_e_wid)
d_entry.grid(row=1, column=0)
self.xparam_entries.append(d_entry)
w_lbl = tk.Label(self.xparam_frm, text="Width (mm):")
w_lbl.grid(row=0, column=1)
w_entry = tk.Entry(self.xparam_frm, width=num_e_wid)
w_entry.grid(row=1, column=1)
self.xparam_entries.append(w_entry)
tf_lbl = tk.Label(self.xparam_frm, text="Flange t (mm):")
tf_lbl.grid(row=0, column=2)
tf_entry = tk.Entry(self.xparam_frm, width=num_e_wid)
tf_entry.grid(row=1, column=2)
self.xparam_entries.append(tf_entry)
tw_lbl = tk.Label(self.xparam_frm, text="Web t (mm):")
tw_lbl.grid(row=0, column=3)
tw_entry = tk.Entry(self.xparam_frm, width=num_e_wid)
tw_entry.grid(row=1, column=3)
self.xparam_entries.append(tw_entry)
if(region == "annulus"):
ro_lbl = tk.Label(self.xparam_frm, text="Outer Radius (mm):")
ro_lbl.grid(row=0, column=0)
ro_entry = tk.Entry(self.xparam_frm, width=num_e_wid)
ro_entry.grid(row=1, column=0)
self.xparam_entries.append(ro_entry)
ri_lbl = tk.Label(self.xparam_frm, text="Inner Radius (mm):")
ri_lbl.grid(row=0, column=1)
ri_entry = tk.Entry(self.xparam_frm, width=num_e_wid)
ri_entry.grid(row=1, column=1)
self.xparam_entries.append(ri_entry)
#Return half of the height of the beam being added (in m)
def half_h(self):
xsec = self.xsec.get()
return Region.half_h(xsec, self.get_xparams())
#Return the parameters for the cross section
def get_xparams(self):
params = []
for param in self.xparam_entries:
params.append(float(param.get()))
return params
def get_L(self):
return float(self.L_entry.get())
def get_matl(self):
return self.matl.get()
def get_xsec(self):
return self.xsec.get()
def get_th(self):
return float(self.th_entry.get())
#Return true if all fields have numbers. Also returns false if any dim<=0
def has_float_vals(self):
try:
if float(self.L_entry.get()) <= 0:
return False
for v in self.xparam_entries:
if float(v.get()) <= 0:
return False
float(self.th_entry.get())
except ValueError:
#Flash fields red here?
return False
else:
return True
#Add support toolbar
class Add_sup:
add_sp_txt = "Add New\nSupport"
add_jt_txt = "Add New\nJoint"
def __init__(self, main_frm):
self.tb_frm = tk.Frame(main_frm)
self.tb_frm.config(highlightcolor="grey", highlightbackground="grey", highlightthickness=1)
self.tb_frm.pack(side=tk.TOP, fill=tk.X)
#Add Support or Joint
self.sp_jt = tk.IntVar(self.tb_frm)
self.sp_jt.set(0)
sp_btn = tk.Radiobutton(self.tb_frm, variable=self.sp_jt, value=0, command=self.setsp)
sp_btn.config(indicatoron=0, text="Support")
sp_btn.grid(row=0, column=0, sticky=tk.W+tk.E)
jt_btn = tk.Radiobutton(self.tb_frm, variable=self.sp_jt, value=1, command=self.setjt)
jt_btn.config(indicatoron=0, text="Joint")
jt_btn.grid(row=1, column=0, sticky=tk.W+tk.E)
#First label
self.tb_lbl = tk.Label(self.tb_frm, text=self.add_sp_txt)
self.tb_lbl.grid(row=0, column=1, rowspan=2)
next_col = 2
#Radio buttons for support type
self.sup_type = tk.IntVar(self.tb_frm)
self.sup_type.set(0)
for val, txt in Support.sup_types.items():
s_btn = tk.Radiobutton(self.tb_frm, variable=self.sup_type, value=val)
s_btn.config(indicatoron=0, text=txt, width=8)
s_btn.grid(row=0, column=next_col, rowspan=2)
next_col += 1
#Angle label
th_lbl = tk.Label(self.tb_frm, text="Angle (\u00B0):")
th_lbl.grid(row=0, column=next_col)
#Angle entry
self.th_entry = tk.Entry(self.tb_frm)
self.th_entry.config(width=num_e_wid)
self.th_entry.grid(row=1, column=next_col)
self.th_entry.insert(0, "0")
next_col += 1
#TO DO: Add option to snap to axis angle of member
# - Angle: [ --Auto-- ] : darkened out - not accepting input
#Button to add the new support
self.add_btn = tk.Button(self.tb_frm, text="Add")
#self.add_btn.config(command=self.toggle_add)
self.add_btn.grid(row=0, column=next_col, rowspan=2, padx=2, pady=2, ipadx=8, sticky=tk.N+tk.S)
def get_sup_type(self):
return self.sup_type.get()
def get_th(self):
if self.th_entry["state"] == "disabled":
return "[auto]"
try:
th = float(self.th_entry.get())
except ValueError:
return None
return th
def is_jt(self):
return self.sp_jt.get()
def setsp(self):
self.tb_lbl.config(text=self.add_sp_txt)
def setjt(self):
self.tb_lbl.config(text=self.add_jt_txt)
#Turn on or off auto angle mode
# onoff - boolean to say if it should be turned on or off
def auto_th(self, onoff):
if onoff:
self.th_entry.delete(0, "end")
self.th_entry.insert(0, "[auto]")
self.th_entry.config(state="disabled")
else:
self.th_entry.config(state="normal")
self.th_entry.delete(0, "end")
self.th_entry.insert(0, "0")
#Add load toolbar
class Add_load:
xctext = "x-comp. (kN):"
yctext = "y-comp. (kN):"
rtext = "load (kN):"
thtext = "angle (deg):"
qxtext = "x-comp. (kN/m)" # Append q0 or q1 to the start for these four
qytext = "y-comp. (kN/m)"
qrtext = "mag (kN/m)"
qthtext = "angle (deg)"
def __init__(self, main_frm):
self.tb_frm = tk.Frame(main_frm)
self.tb_frm.config(highlightcolor="grey", highlightbackground="grey", highlightthickness=1)
self.tb_frm.pack(side=tk.TOP, fill=tk.X)
#First label
tb_lbl = tk.Label(self.tb_frm, text="Add New\nLoad")
tb_lbl.grid(row=0, column=0, rowspan=2)
#Choose point or distributed
self.pt_ds = tk.IntVar(self.tb_frm)
self.pt_ds.set(0)
pt_btn = tk.Radiobutton(self.tb_frm, variable=self.pt_ds, value=0, command=self.setpt)
pt_btn.config(indicatoron=0, text="Point")
pt_btn.grid(row=0, column=1, sticky=tk.W+tk.E)
ds_btn = tk.Radiobutton(self.tb_frm, variable=self.pt_ds, value=1, command=self.setds)
ds_btn.config(indicatoron=0, text="Distributed")
ds_btn.grid(row=1, column=1, sticky=tk.W+tk.E)
#Choose components or r,theta
self.c_p = tk.IntVar(self.tb_frm)
self.c_p.set(0)
c_btn = tk.Radiobutton(self.tb_frm, variable=self.c_p, value=0, command=self.setcomp)
c_btn.config(indicatoron=0, text="Components")
c_btn.grid(row=0, column=2, sticky=tk.W+tk.E)
p_btn = tk.Radiobutton(self.tb_frm, variable=self.c_p, value=1, command=self.setpol)
p_btn.config(indicatoron=0, text="Polar")
p_btn.grid(row=1, column=2, sticky=tk.W+tk.E)
#Frame that adjusts itself to have the needed fields
self.comp_frm = tk.Frame(self.tb_frm)
self.comp_frm.config(borderwidth=2, relief=tk.SUNKEN)
self.comp_frm.grid(row=0, column=3, rowspan=2, sticky=tk.N+tk.S)
self.comp_frm.grid_rowconfigure(1,weight=1)
self.setpt()
#Button to add the new load
self.add_btn = tk.Button(self.tb_frm, text="Add")
self.add_btn.grid(row=0, column=5, padx=2, pady=2, ipadx=8, rowspan=2, sticky=tk.N+tk.S)
def is_ds(self):
return self.pt_ds.get()
def setcomp(self):
if self.pt_ds.get() == 0: #Pt. load comp. labels
self.Pc1_lbl.config(text=self.xctext)
self.Pc2_lbl.config(text=self.yctext)
elif self.pt_ds.get() == 1: #Distr. load comp. labels
self.Pc1_lbl.config(text="q0 "+self.qxtext)
self.Pc2_lbl.config(text="q0 "+self.qytext)
self.Pc3_lbl.config(text="q1 "+self.qxtext)
self.Pc4_lbl.config(text="q1 "+self.qytext)
def setpol(self):
if self.pt_ds.get() == 0: #Pt. load polar labels
self.Pc1_lbl.config(text=self.rtext)
self.Pc2_lbl.config(text=self.thtext)
if self.pt_ds.get() == 1: #Distr. load polar labels
self.Pc1_lbl.config(text="q0 "+self.rtext)
self.Pc2_lbl.config(text="q0 "+self.thtext)
self.Pc3_lbl.config(text="q1 "+self.qrtext)
self.Pc4_lbl.config(text="q1 "+self.qthtext)
def setpt(self):
for widget in self.comp_frm.winfo_children():
widget.destroy()
#Load comp. 1 label
self.Pc1_lbl = tk.Label(self.comp_frm, text=self.xctext)
self.Pc1_lbl.grid(row=0, column=0)
#Load comp. 1 entry
self.Pc1_entry = tk.Entry(self.comp_frm)
self.Pc1_entry.config(width=num_e_wid)
self.Pc1_entry.grid(row=1, column=0)
#Load comp. 2 label
self.Pc2_lbl = tk.Label(self.comp_frm, text=self.yctext)
self.Pc2_lbl.grid(row=0, column=1)
#Load comp. 2 entry
self.Pc2_entry = tk.Entry(self.comp_frm)
self.Pc2_entry.config(width=num_e_wid)
self.Pc2_entry.grid(row=1, column=1)
#If in polar mode, fix the labels.
if self.c_p.get():
self.setpol()
def setds(self):
for widget in self.comp_frm.winfo_children():
widget.destroy()
#Q 0 comp. 1 label
self.Pc1_lbl = tk.Label(self.comp_frm, text="q0 "+self.qxtext)
self.Pc1_lbl.grid(row=0, column=0)
#Q 0 comp. 1 entry
self.Pc1_entry = tk.Entry(self.comp_frm)
self.Pc1_entry.config(width=num_e_wid)
self.Pc1_entry.grid(row=1, column=0)
#Q 0 comp. 2 label
self.Pc2_lbl = tk.Label(self.comp_frm, text="q0 "+self.qytext)
self.Pc2_lbl.grid(row=0, column=1)
#Q 0 comp. 2 entry
self.Pc2_entry = tk.Entry(self.comp_frm)
self.Pc2_entry.config(width=num_e_wid)
self.Pc2_entry.grid(row=1, column=1)
#Q 1 comp. 1 label
self.Pc3_lbl = tk.Label(self.comp_frm, text="q1 "+self.qxtext)
self.Pc3_lbl.grid(row=0, column=2)
#Q 1 comp. 1 entry
self.Pc3_entry = tk.Entry(self.comp_frm)
self.Pc3_entry.config(width=num_e_wid)
self.Pc3_entry.grid(row=1, column=2)
#Q 1 comp. 2 label
self.Pc4_lbl = tk.Label(self.comp_frm, text="q1 "+self.qytext)
self.Pc4_lbl.grid(row=0, column=3)
#Q 1 comp. 2 entry
self.Pc4_entry = tk.Entry(self.comp_frm)
self.Pc4_entry.config(width=num_e_wid)
self.Pc4_entry.grid(row=1, column=3)
#If in polar mode, fix the labels.
if self.c_p.get():
self.setpol()
#Returns the components of the load in N
def get_P(self):
try:
Pc1 = float(self.Pc1_entry.get())
Pc2 = float(self.Pc2_entry.get())
except ValueError:
return ("NaN","NaN")
if self.pt_ds.get() == 0:
if self.c_p.get() == 0: #"Components"
return (Pc1*1000, Pc2*1000)
if self.c_p.get() == 1: #"Polar"
return self.p_to_c(Pc1*1000, Pc2)
elif self.pt_ds.get() == 1:
try:
Pc3 = float(self.Pc3_entry.get())
Pc4 = float(self.Pc4_entry.get())
except ValueError:
return ("NaN","NaN")
if self.c_p.get() == 0:
return ( (Pc1*1000, Pc2*1000), (Pc3*1000, Pc4*1000) )
if self.c_p.get() == 1:
return ( self.p_to_c(Pc1*1000, Pc2), self.p_to_c(Pc3*1000, Pc4))
def mag(self):
if self.pt_ds.get() == 0:
x,y = self.get_P()
if self.c_p.get() == 0:
return math.sqrt(x**2+y**2)
if self.c_p.get() == 1:
return x
if self.pt_ds.get() == 1:
(x0,y0), (x1,y1) = self.get_P()
if self.c_p.get() == 0:
return ( math.sqrt(x0**2+y0**2), math.sqrt(x1**2+y1**2) )
if self.c_p.get() == 1:
return ( x0, x1 )
#Polar to Coords. th in deg
@staticmethod
def p_to_c(r, th):
x = r*math.cos(math.radians(th))
y = r*math.sin(math.radians(th))
return (x,y)
#Return true if all fields have numbers. Also returns false if mag==0
def has_float_vals(self):
#print(361)
try:
if self.mag() == 0:
return False
float(self.Pc1_entry.get())
float(self.Pc2_entry.get())
if self.pt_ds.get() == 1:
float(self.Pc3_entry.get())
float(self.Pc4_entry.get())
except ValueError:
return False
#print(371)
return True
``` |
{
"source": "JonKDahl/physical_constants_and_units",
"score": 3
} |
#### File: physical_constants_and_units/constants/constants.py
```python
from numpy import pi, e, array
import codecs
import urllib.request
import os.path
import os
import multiprocessing
from urllib.error import HTTPError
def download_html(data_directory, which="all"):
"""
Download the html files with mass data.
Parameters
----------
data_directory : string
Name of the folder of downloaded data.
which : list, string
A list of element names by their abbreviations, or a single
element as a string.
"""
element_names_list = ["H", "He", "Li", "Be", "B", "C", "N", "O", "F",
"Ne", "Na", "Mg", "Al", "Si", "P", "S", "Cl", "Ar", "K", "Ca",
"Sc", "Ti", "V", "Cr", "Mn", "Fe", "Co", "Ni", "Cu", "Zn", "Ga",
"Ge", "As", "Se", "Br", "Kr", "Rb", "Sr", "Y", "Zr", "Nb", "Mo",
"Tc", "Ru", "Rh", "Pd", "Ag", "Cd", "In", "Sn", "Sb", "Te", "I",
"Xe", "Cs", "Ba", "La", "Ce", "Pr", "Nd", "Pm", "Sm", "Eu",
"Gd", "Tb", "Dy", "Ho", "Er", "Tm", "Yb", "Lu", "Hf", "Ta", "W",
"Re", "Os", "Ir", "Pt", "Au", "Hg", "Tl", "Pb", "Bi", "Po",
"At", "Rn", "Fr", "Ra", "Ac", "Th", "Pa", "U", "Np", "Pu", "Am",
"Cm", "Bk", "Cf", "Es", "Fm", "Md", "No", "Lr", "Rf", "Db",
"Sg", "Bh", "Hs", "Mt", "Ds", "Rg", "Cn", "Nh", "Fl", "Mc",
"Lv", "Ts", "Og", "all"]
element_names_dict = {"H":1, "He":2, "Li":3, "Be":4, "B":5, "C":6, "N":7,
"O":8, "F":9, "Ne":10, "Na":11, "Mg":12, "Al":13, "Si":14, "P":15,
"S":16, "Cl":17, "Ar":18, "K":19, "Ca":20, "Sc":21, "Ti":22, "V":23,
"Cr":24, "Mn":25, "Fe":26, "Co":27, "Ni":28, "Cu":29, "Zn":30, "Ga":31,
"Ge":32, "As":33, "Se":34, "Br":35, "Kr":36, "Rb":37, "Sr":38, "Y":39,
"Zr":40, "Nb":41, "Mo":42, "Tc":43, "Ru":44, "Rh":45, "Pd":46, "Ag":47,
"Cd":48, "In":49, "Sn":50, "Sb":51, "Te":52, "I":53, "Xe":54, "Cs":55,
"Ba":56, "La":57, "Ce":58, "Pr":59, "Nd":60, "Pm":61, "Sm":62, "Eu":63,
"Gd":64, "Tb":65, "Dy":66, "Ho":67, "Er":68, "Tm":69, "Yb":70, "Lu":71,
"Hf":72, "Ta":73, "W":74, "Re":75, "Os":76, "Ir":77, "Pt":78, "Au":79,
"Hg":80, "Tl":81, "Pb":82, "Bi":83, "Po":84, "At":85, "Rn":86, "Fr":87,
"Ra":88, "Ac":89, "Th":90, "Pa":91, "U":92, "Np":93, "Pu":94, "Am":95,
"Cm":96, "Bk":97, "Cf":98, "Es":99, "Fm":100, "Md":101, "No":102,
"Lr":103, "Rf":104, "Db":105, "Sg":106, "Bh":107, "Hs":108, "Mt":109,
"Ds":110, "Rg":111, "Cn":112, "Nh":113, "Fl":114, "Mc":115, "Lv":116,
"Ts":117, "Og":118}
if which == "all": which = element_names_list[:-1]
if isinstance(which, str): which = [which]
elif isinstance(which, list):
if not set(which) < set(element_names_list):
print("You have specified invalid elements. " +
" Please use the two letter abbreviations, either in a list" +
" or a single element as a string.")
print(element_names_list)
return
else:
print(f"Valid input types are {list} and {str}, got {type(which)}.")
return
if not os.path.isdir(data_directory): os.mkdir(data_directory)
pool = multiprocessing.Pool()
pool.map(fetch_data_in_parallel,
((i, which, element_names_dict, data_directory) for i in range(len(which))))
def fetch_data_in_parallel(index_list_dict_folder):
"""
The delay between request and recieve makes fetching data for many
elements slow. This function is parallelised with the built-in
multiprocessing module for fetching mass data in parallel.
Parameters
----------
index_list_dict_folder : tuple
Since pool.map only supports a single function argument, all
objects needed inside this function is passed as a tuple.
i : int
Current index of the input list of elements to fetch.
which : list
A list of elements to fetch.
element_names_dict : dictionary
A dictionary where key = element name, value = element
number.
data_directory : string
Name of the sub-directory where the mass data are stored.
"""
i, which, element_names_dict, data_directory = index_list_dict_folder
element_number = f"{element_names_dict[which[i]]:03d}"
filepath = data_directory + "/" + element_number + "-" + which[i] + "-masses.html"
element_number = f"{element_names_dict[which[i]]:02d}"
if not os.path.isfile(filepath):
urlname = "https://wwwndc.jaea.go.jp/cgi-bin/nucltab14?" + element_number
try:
urllib.request.urlretrieve(urlname, filepath)
print(filepath + " downloaded")
except HTTPError:
print(f"Unable to download {urlname}. Invalid URL.")
def extract_mass_from_html(filename):
"""
Read (pre-downloaded) html files from 'wwwndc.jaea.go.jp' with mass
data.
"""
data = {}
names = []
with codecs.open(filename, "r") as infile:
for i in range(3): infile.readline() # Skip non-interesting lines.
infile.readline()[4:14] # Extract the name of the element.
infile.readline()
categories = infile.readline() # Extract the titles of the table.
infile.readline()
max_iterations = 1000 # Avoid infinite loop.
iterations = 0
while True: # Iterate through the file, line by line.
content = infile.readline()
content = content.split()
if (content[0][0:4] == "----"): break # End of file.
if (iterations > max_iterations):
print("Reached maximum number of iterations. The program did"
+ " most likely not find the end of the file.")
break
name = ""
reference_point = 0 # Index of list element reference point where to find value and name data.
name_location_1 = 0 # Substring index of (partial or entire) name of the isotope.
for elem in content:
"""
Use '</a>' as a point of reference. Mass value is
always the next element in the list 'content' from
'</a>'.
"""
if (elem[-4:] == "</a>"):
if (len(elem) > 4):
"""
The name of the element is sometimes (partially
or entirely) in the same string as '</a>', eg:
'98</a>', so we extract all characters before
'<', which has index -4, if the length of elem
is greater than 4.
"""
name = elem[0:-4]
for char in elem[:-1]:
"""
Sometimes the entire name is in the same
string as '</a>', eg: '...>Tc-102</a>', and
the name is always preceeded by '>'. We
look for '>' to extract only the name of the
isotope. If the name is only partially
contained with '</a>' like '98</a>', then
there is no nothing to strip away in front,
but we must make sure that the final '>' is
not mistaken as the character preceeding the
name, hence 'elem[:-1].
"""
name_location_1 += 1
if (char == ">"):
name = elem[name_location_1:-4]
break
break # Break the loop if we find '</a>'.
reference_point += 1
try:
"""
If the next element of 'content' can be converted to
float, then it is the mass of the isotope.
"""
value = float(content[reference_point+1])
except ValueError:
"""
If the next element of 'content' cannot be converted to
float, then the line is not interesting and can be
skipped entirely.
"""
continue
name_location_2 = 0 # Substring index of (partial or entire) name of the isotope.
try:
"""
If the previous element of 'content' can be converted to
int, then that element is a part of the name of the
isotope, ex. [..., ...H-, 1, </a>], and the rest of the
name is located 2 steps back in the list.
"""
name = int(content[reference_point-1])
name = str(name)
reference_point -= 2
except ValueError:
"""
If the previous element of 'content' cannot be converted
to int, then the rest of the name is located 1 step back
in the list.
"""
reference_point -= 1
for char in content[reference_point]:
"""
Using '</a>' to find the start of the name of the
isotope.
"""
name_location_2 += 1
if (char == ">"): break
name = content[reference_point][name_location_2:] + name
name = name.replace("-", "")
names.append(name)
data[name] = value
iterations += 1
# for key in data:
# print(key, data[key])
return data, names
class MassesClass(dict):
def __init__(self, which="all", unit="kg", suppress_print=False):
"""
Parameters
----------
which : string, list
A single string, or a list of strings, containing the
abbreviated names of the elements you wish to download and
load the mass data for. Note that the download only happens
the first time you run the program, so it is probably the
best to just leave this to the default value of 'all'. Input
is case sensitive. Arbitrary ordering of input elements is
allowed.
Examples:
which = 'all' - download and load all elements.
which = 'U' - download and load mass data for uranium.
which = ['H', 'He', 'Au'] - download and load mass data
for hydrogen, helium, and gold.
"""
self.amu_to_kg = 1.66053904e-27 # 1 amu in kg.
self.u_to_kg = self.amu_to_kg # Alias.
self.kg_to_amu = 1/self.amu_to_kg # 1 kg in amu.
self.kg_to_u = self.kg_to_amu # Alias.
self.amu_to_eV = 931.49410242e6 # 1 amu in eV/c**2.
self.amu_to_ev = self.amu_to_eV # Alias.
self.u_to_eV = self.amu_to_eV # Alias.
self.u_to_ev = self.amu_to_eV # Alias.
self.eV_to_amu = 1/self.amu_to_eV # 1 eV/c**2 in amu.
self.ev_to_amu = self.eV_to_amu # Alias.
self.eV_to_u = self.eV_to_amu # Alias.
self.ev_to_u = self.eV_to_amu # Alias.
self.eV_to_kg = 1.782662e-36 # 1 eV/c**2 in kg.
self.ev_to_kg = self.eV_to_kg # Alias.
self.kg_to_eV = 1/self.eV_to_kg # 1 kg in eV/c**2.
self.kg_to_ev = self.kg_to_eV # Alias.
self.mass_data_in_amu = {}
self.mass_data_in_eV = {}
self.mass_data_in_kg = {}
self.names = []
self.current_unit = ""
# NUCLEAR DATA PART IS CURRENTLY NOT WORKING. MUST BE DEBUGGED.
# mass_data_tmp = {}
# names_tmp = []
# data_directory = "nuclear_data/"
# if __name__ == "__main__":
# """
# Does not work in an interactive session. Run the script
# directly before interactive usage.
# """
# download_html(data_directory, which)
# else:
# if not os.path.isdir(data_directory):
# print("Mass data files from https://wwwndc.jaea.go.jp/NuC/"
# + " must be downloaded once first by directly running this script.")
# print(f"Script located at {os.path.dirname(os.path.realpath(__file__))}")
# return
# elif (len(os.listdir(data_directory)) == 0):
# print("Mass data files from https://wwwndc.jaea.go.jp/NuC/"
# + " must be downloaded once first by directly running this script.")
# print(f"Script located at {os.path.dirname(os.path.realpath(__file__))}")
# return
# for filename in os.listdir(data_directory):
# if filename.endswith(".html"):
# mass_data_tmp, names_tmp = extract_mass_from_html(data_directory + filename)
# self.mass_data_in_amu.update(mass_data_tmp)
# self.names += names_tmp
# for key in self.mass_data_in_amu:
# self.mass_data_in_eV[key] = self.mass_data_in_amu[key]*self.amu_to_eV
# self.mass_data_in_kg[key] = self.mass_data_in_amu[key]*self.amu_to_kg
# self.names = sorted(self.names)
self.set_unit(unit, suppress_print=True)
def set_unit(self, unit, suppress_print=False):
"""
Set the mass unit.
Parameters
----------
unit : string
Valid inputs are: 'ev', 'eV', 'ev/c^2', 'ev/c**2', 'eV/c^2',
'eV/c**2', 'amu', 'u', 'kg'. Defaults to 'ev'.
"""
if (unit == "ev") or (unit == "eV") or (unit == "ev/c^2") or (unit == "ev/c**2") or (unit == "eV/c^2") or (unit == "eV/c**2"):
self.current_unit = "eV"
self.set_attributes(self.mass_data_in_eV)
fac = self.kg_to_eV # Base unit of all other masses is kg.
if not suppress_print:
print(f"Unit set to 'eV/c**2'.")
elif (unit == "amu") or (unit == "u"):
self.current_unit = "amu"
self.set_attributes(self.mass_data_in_amu)
fac = self.kg_to_amu # Base unit of all other masses is kg.
if not suppress_print:
print(f"Unit set to 'amu'.")
elif (unit == "kg"):
self.current_unit = "kg"
self.set_attributes(self.mass_data_in_kg)
fac = 1 # Base unit of all other masses is kg.
if not suppress_print:
print(f"Unit set to 'kg'.")
else:
print(f"Invalid unit. Use 'eV', 'amu', or 'kg'. Got {unit}.")
return
# Nuclear elements (mass of nuclei).
self.alpha = 6.644657230e-27*fac # Mass of alpha particle, base [kg].
# Hadrons.
self.p = 1.6726219e-27*fac # mass of proton, base [kg].
self.proton = self.p
self.n = 1.67492749804e-27*fac # mass of neutron, base [kg].
self.neutron = self.n
# Leptons.
self.e = 9.10938356e-31*fac # mass of electron, base [kg].
self.electron = self.e
self.mu = 1.8835315972221714e-28*fac # mass of muon, base [kg].
self.muon = self.mu
self.tau = 3.16754080132e-27*fac # mass of tau, base [kg].
self.tauon = self.tau
# Mesons.
self.pion = array([2.4880613534e-28, 2.406176557092e-28, 2.4880613534e-28])*fac # pi minus, pi zero, pi plus masses, base [kg].
self.pi = self.pion
self.pi_m = self.pion[0] # Alias.
self.pim = self.pion[0] # Alias.
self.pi_z = self.pion[1] # Alias.
self.piz = self.pion[1] # Alias.
self.pi_0 = self.pion[1] # Alias.
self.pi0 = self.pion[1] # Alias.
self.pi_n = self.pion[1] # Alias.
self.pin = self.pion[1] # Alias.
self.pi_p = self.pion[2] # Alias.
self.pip = self.pion[2] # Alias.
self.kaon = array([8.80059228174e-28, 8.87138178976e-28, 8.80059228174e-28])*fac # K minus, K zero, K plus masses, base [kg].
self.K_m = self.kaon[0] # Alias.
self.Km = self.kaon[0] # Alias.
self.K_z = self.kaon[1] # Alias.
self.Kz = self.kaon[1] # Alias.
self.K_0 = self.kaon[1] # Alias.
self.K0 = self.kaon[1] # Alias.
self.K_n = self.kaon[1] # Alias.
self.Kn = self.kaon[1] # Alias.
self.K_p = self.kaon[2] # Alias.
self.Kp = self.kaon[2] # Alias.
## Astronomy.
self.sol = 1.9891e30*fac # mass of sun, base [kg]
self.sun = self.sol
self.earth = 5.972e24*fac # mass of earth, base [kg]
def print_isotopes(self, which="all"):
"""
Print a given set of available isotopes.
Parameters
----------
which : string
Choose element for which to list all available isotopes and
masses. Defaults to all.
"""
if (self.current_unit == "eV"):
current_mass_data = self.mass_data_in_eV
elif (self.current_unit == "amu"):
current_mass_data = self.mass_data_in_amu
elif (self.current_unit == "kg"):
current_mass_data = self.mass_data_in_kg
for name in self.names:
if (name.startswith(which)) or (which == "all"):
print(f"{name}: {current_mass_data[name]}")
def set_attributes(self, *args, **kwargs):
"""
Update class attributes with entries in input dictionary. Each
key will be the name of the attribute and each value will be
the value of the attribute.
"""
super(MassesClass, self).__init__(*args, **kwargs)
self.__dict__.update(self)
class LifetimeClass:
# Leptons.
mu = 2.1969811e-6 # Muon lifetime, [s].
muon = mu
tau = 2.903e-13 # Tau lifetime, [s].
tauon = tau
# Mesons.
pion = array([2.6e-8, 8.4e-17, 2.6e-8]) # pi minus, pi zero, pi plus lifetimes, [s].
pi = pion
pi_m = pion[0]
pi_z = pion[1]
pi_n = pi_z
pi_p = pion[2]
def __init__(self):
pass
class HalfLifeClass:
Mo99 = 237513.6 # Half-life of molybdenum, [s].
mo99 = Mo99 # Alias.
Tc99 = 6661667078600 # Half-life of technetium, [s].
tc99 = Tc99 # Alias.
Tc99m = 216241200 # Half-life of first excited state of technetium, [s].
tc99m = Tc99m # Alias.
def __init__(self):
pass
class UnitsClass:
"""
Class of non-SI units converted to the base SI equivalent.
"""
curie = 3.7e10 # Unit of radioactivity, [1/s].
ci = curie
Ci = curie
barn = 1e-28 # Unit for expressing the cross-sectional area of nuclear reactions, [m**2].
b = barn
def __init__(self):
pass
m = MassesClass()
_m_internal = MassesClass(suppress_print=True) # For use inside this file.
t = LifetimeClass()
hl = HalfLifeClass()
u = UnitsClass()
man = "constants.m for masses. constants.t for lifetimes. constants.u for units. constants.hl for half-lives."
manual = man
# constants
c = 299792458 # Speed of light in vacuum, [m/s]
h = 6.62607004e-34 # Planck's constant, [m^2*kg/s]
hbar = h/(2*pi) # reduced Planck's constant, [m^2*kg/s]
h_ev = 4.135667696e-15 # Planck's constant, [eV*s]
hbar_ev = h_ev/(2*pi) # Reduced Planck's constant, [eV*s]
kb = 1.38064852e-23 # Boltzmann's constant, [m^2*kg/(k*s^2)]
kb_ev = 8.6173303e-5 # Boltzmann's constant, [ev/K]
G = 6.67408e-11 # gravitational constant, [m^3/(kg*s^2)]
g = 9.807 # gravitational acceleration, [m/s**2]
R = 8.3144598 # gas constant, [J/(K mol)]
pi = pi # Ratio between circumference and diameter of a circle, [unitless].
e = e # Eulers number, [unitless].
sigma = 2*pi**5*kb**4/(15*c**2*h**3) # Stefan-Boltzmanns constant, [W/(m^2*K^4)].
# constants, units
c_unit = "speed of light m/s"
h_unit = "planck constant m^2*kg/s"
hbar_unit = "reduced planck constant m^2*kg/s"
kb_unit = "boltzmann constant m^2*kg/(K*s^2) = J/K"
kb_ev_unit = "boltzmann constant ev/K"
G_unit = "universal gravitational constant m^3/(kg*s^2)"
g_unit = "gravitational acceleration m*s^-2"
R_unit = "gas constant J/(K mol)"
pi_unit = "ratio between circumference and diameter of a circle unitless"
e_unit = "eulers number unitless"
sigma_unit = "stefan-boltzmann constant W/(m^2*K^4)"
# lengths
AU = 149597871 # astronomical unit, [km]
ly = 9.4605284e15 # light year, [m]
pc = 3.08567758e16 # parsec, [m]
r_sol = 695508e3 # solar radius, [m]
r_earth = 6371e3 # earth radius, [m]
# lengths, units
AU_unit = "km"
ly_unit = "m"
pc_unit = "m"
r_sol_unit = "m"
r_earth_unit = "m"
# energies
eV_J = 1.60217662e-19 # electron volt to joule, [J]
ev_j = eV_J # Alias.
ev_J = eV_J # Alias.
ev_joule = eV_J # Alias.
# energies, units
eV_J_unit = "J"
# unsorted
atm = 101325 # atmospheric pressure, [Pa]
mol = 6.0221415e23 # mole
T_sol = 1.57e7 # core temp of the sun, [K]
L_sol = 3.828e26 # solar luminosity, [W]
yr = 31556926 # seconds in a year, [s]
ec = 1.60217662e-19 # elementary charge, [coulomb]
alpha = 0.0072973525693 # fine structure constant
mu_b = ec*hbar/(2*_m_internal.electron)
# unsorted, units
atm_unit = "atmospheric pressure [Pa]"
mol_unit = "mole [unitless]"
T_sol_unit = "core temperature of the sun [K]"
L_sol_unit = "solar luminosity [W]"
yr_unit = "seconds in a year [s]"
ec_unit = "elementary charge [C]"
alpha_unit = "fine structure constant [unitless]"
mu_b_unit = "bohr magneton [J/T]"
``` |
{
"source": "jonkeane/conbench",
"score": 2
} |
#### File: conbench/api/contexts.py
```python
from ..api import rule
from ..api._endpoint import ApiEndpoint, maybe_login_required
from ..entities._entity import NotFound
from ..entities.context import Context, ContextSerializer
class ContextListAPI(ApiEndpoint):
serializer = ContextSerializer()
@maybe_login_required
def get(self):
"""
---
description: Get a list of contexts.
responses:
"200": "ContextList"
"401": "401"
tags:
- Contexts
"""
contexts = Context.all(order_by=Context.id.asc(), limit=500)
return self.serializer.many.dump(contexts)
class ContextEntityAPI(ApiEndpoint):
serializer = ContextSerializer()
def _get(self, context_id):
try:
context = Context.one(id=context_id)
except NotFound:
self.abort_404_not_found()
return context
@maybe_login_required
def get(self, context_id):
"""
---
description: Get a context.
responses:
"200": "ContextEntity"
"401": "401"
"404": "404"
parameters:
- name: context_id
in: path
schema:
type: string
tags:
- Contexts
"""
context = self._get(context_id)
return self.serializer.one.dump(context)
context_entity_view = ContextEntityAPI.as_view("context")
context_list_view = ContextListAPI.as_view("contexts")
rule(
"/contexts/<context_id>/",
view_func=context_entity_view,
methods=["GET"],
)
rule(
"/contexts/",
view_func=context_list_view,
methods=["GET"],
)
```
#### File: conbench/api/_google.py
```python
import json
import os
import flask as f
import requests
def get_google_config():
client_id = os.environ.get("GOOGLE_CLIENT_ID", None)
client_secret = os.environ.get("GOOGLE_CLIENT_SECRET", None)
discovery_url = "https://accounts.google.com/.well-known/openid-configuration"
return discovery_url, client_id, client_secret
def get_google_client():
from oauthlib.oauth2 import WebApplicationClient
discovery_url, client_id, _ = get_google_config()
google = requests.get(discovery_url).json()
client = WebApplicationClient(client_id)
return client, google
def auth_google_user():
client, google = get_google_client()
redirect_uri = f.url_for("api.callback", _external=True, _scheme="https")
return client.prepare_request_uri(
google["authorization_endpoint"],
redirect_uri=redirect_uri,
scope=["openid", "email", "profile"],
)
def get_google_user():
client, google = get_google_client()
_, client_id, client_secret = get_google_config()
token_url, headers, body = client.prepare_token_request(
google["token_endpoint"],
authorization_response=f.request.url.replace("http://", "https://"),
redirect_url=f.request.base_url.replace("http://", "https://"),
code=f.request.args.get("code"),
)
token_response = requests.post(
token_url,
headers=headers,
data=body,
auth=(client_id, client_secret),
)
client.parse_request_body_response(json.dumps(token_response.json()))
uri, headers, body = client.add_token(google["userinfo_endpoint"])
return requests.get(
uri,
headers=headers,
data=body,
).json()
```
#### File: conbench/api/users.py
```python
import flask_login
import marshmallow
from ..api import rule
from ..api._docs import spec
from ..api._endpoint import ApiEndpoint
from ..config import Config
from ..entities._entity import NotFound
from ..entities.user import User, UserSchema, UserSerializer
class UserValidationMixin:
def validate_user(self, schema, user=None):
data = self.validate(schema)
email = data.get("email")
# user update case (no change to email)
if user and user.email == email:
return data
other = User.first(email=email)
if other:
message = "Email address already in use."
self.abort_400_bad_request({"email": [message]})
return data
class UserEntityAPI(ApiEndpoint, UserValidationMixin):
serializer = UserSerializer()
schema = UserSchema()
def _get(self, user_id):
try:
user = User.one(id=user_id)
except NotFound:
self.abort_404_not_found()
return user
@flask_login.login_required
def get(self, user_id):
"""
---
description: Get a user.
responses:
"200": "UserEntity"
"401": "401"
"404": "404"
parameters:
- name: user_id
in: path
schema:
type: string
tags:
- Users
"""
user = self._get(user_id)
return self.serializer.one.dump(user)
@flask_login.login_required
def delete(self, user_id):
"""
---
description: Delete a user.
responses:
"204": "204"
"401": "401"
"404": "404"
parameters:
- name: user_id
in: path
schema:
type: string
tags:
- Users
"""
user = self._get(user_id)
user.delete()
return self.response_204_no_content()
@flask_login.login_required
def put(self, user_id):
"""
---
description: Edit a user.
responses:
"200": "UserEntity"
"401": "401"
"404": "404"
parameters:
- name: user_id
in: path
schema:
type: string
requestBody:
content:
application/json:
schema: UserUpdate
tags:
- Users
"""
user = self._get(user_id)
data = self.validate_user(self.schema.update, user)
password = data.pop("password", None)
if password:
user.set_password(password)
user.update(data)
return self.serializer.one.dump(user)
class UserListAPI(ApiEndpoint, UserValidationMixin):
serializer = UserSerializer()
schema = UserSchema()
@flask_login.login_required
def get(self):
"""
---
description: Get a list of users.
responses:
"200": "UserList"
"401": "401"
tags:
- Users
"""
users = User.all()
return self.serializer.many.dump(users)
@flask_login.login_required
def post(self):
"""
---
description: Create a user.
responses:
"201": "UserCreated"
"400": "400"
"401": "401"
requestBody:
content:
application/json:
schema: UserCreate
tags:
- Users
"""
data = self.validate_user(self.schema.create)
user = User.create(data)
return self.response_201_created(self.serializer.one.dump(user))
class RegisterSchema(marshmallow.Schema):
email = marshmallow.fields.Email(required=True)
password = marshmallow.fields.String(required=True)
name = marshmallow.fields.String(required=True)
secret = marshmallow.fields.String(required=True)
class RegisterAPI(ApiEndpoint, UserValidationMixin):
schema = RegisterSchema()
serializer = UserSerializer()
def post(self):
"""
---
description: Sign up for a user account.
responses:
"201": "UserCreated"
"400": "400"
requestBody:
content:
application/json:
schema: Register
tags:
- Authentication
"""
data = self.validate_user(self.schema)
if data.get("secret") != Config.REGISTRATION_KEY:
message = "Invalid registration key."
self.abort_400_bad_request({"secret": [message]})
user = User.create(data)
return self.response_201_created(self.serializer.one.dump(user))
user_entity_view = UserEntityAPI.as_view("user")
user_list_view = UserListAPI.as_view("users")
rule(
"/users/",
view_func=user_list_view,
methods=["GET", "POST"],
)
rule(
"/users/<user_id>/",
view_func=user_entity_view,
methods=["GET", "DELETE", "PUT"],
)
rule(
"/register/",
view_func=RegisterAPI.as_view("register"),
methods=["POST"],
)
spec.components.schema("UserCreate", schema=UserSchema.create)
spec.components.schema("UserUpdate", schema=UserSchema.update)
spec.components.schema("Register", schema=RegisterSchema)
```
#### File: conbench/app/_util.py
```python
import re
from ..hacks import set_display_batch, set_display_name
from ..units import formatter_for_unit
def augment(benchmark, contexts=None):
set_display_name(benchmark)
set_display_time(benchmark)
set_display_batch(benchmark)
set_display_mean(benchmark)
set_display_language(benchmark, contexts)
set_display_error(benchmark)
tags = benchmark["tags"]
if "dataset" in tags:
tags["dataset"] = dataset_name(tags["dataset"])
def dataset_name(name):
return name.replace("_", " ")
def display_time(t):
return t.split(".")[0].replace("T", " ").rsplit(":", 1)[0] if t else ""
def set_display_language(benchmark, contexts):
if contexts is not None and benchmark["links"]["context"] in contexts:
url = benchmark["links"]["context"]
benchmark["display_language"] = contexts[url]["benchmark_language"]
else:
benchmark["display_language"] = "unknown"
def set_display_time(benchmark):
benchmark["display_timestamp"] = display_time(benchmark["timestamp"])
def set_display_mean(benchmark):
if not benchmark["stats"]["mean"]:
return ""
unit = benchmark["stats"]["unit"]
mean = float(benchmark["stats"]["mean"])
fmt = formatter_for_unit(unit)
benchmark["display_mean"] = fmt(mean, unit)
def set_display_error(benchmark):
if not benchmark["error"]:
benchmark["error"] = ""
def display_message(message):
# truncate git shas in commit message
for m in re.findall(r"\b[0-9a-f]{40}\b", message):
message = message.replace(m, m[:7])
return message
```
#### File: conbench/entities/context.py
```python
import flask as f
import sqlalchemy as s
from sqlalchemy.dialects import postgresql
from ..entities._entity import (
Base,
EntityMixin,
EntitySerializer,
NotNull,
generate_uuid,
)
class Context(Base, EntityMixin):
__tablename__ = "context"
id = NotNull(s.String(50), primary_key=True, default=generate_uuid)
tags = NotNull(postgresql.JSONB)
s.Index("context_index", Context.tags, unique=True)
class _Serializer(EntitySerializer):
def _dump(self, context):
result = {
"id": context.id,
"links": {
"list": f.url_for("api.contexts", _external=True),
"self": f.url_for("api.context", context_id=context.id, _external=True),
},
}
result.update(context.tags)
return result
class ContextSerializer:
one = _Serializer()
many = _Serializer(many=True)
```
#### File: conbench/entities/summary.py
```python
import decimal
import flask as f
import marshmallow
import sqlalchemy as s
from sqlalchemy import CheckConstraint as check
from sqlalchemy.dialects import postgresql
from sqlalchemy.orm import relationship
from ..entities._comparator import z_improvement, z_regression
from ..entities._entity import (
Base,
EntityMixin,
EntitySerializer,
NotNull,
Nullable,
generate_uuid,
)
from ..entities.case import Case
from ..entities.commit import Commit, get_github_commit, repository_to_url
from ..entities.context import Context
from ..entities.data import Data
from ..entities.distribution import update_distribution
from ..entities.hardware import Cluster, ClusterSchema, Machine, MachineSchema
from ..entities.info import Info
from ..entities.run import Run
from ..entities.time import Time
class Summary(Base, EntityMixin):
__tablename__ = "summary"
id = NotNull(s.String(50), primary_key=True, default=generate_uuid)
case_id = NotNull(s.String(50), s.ForeignKey("case.id"))
info_id = NotNull(s.String(50), s.ForeignKey("info.id"))
context_id = NotNull(s.String(50), s.ForeignKey("context.id"))
run_id = NotNull(s.Text, s.ForeignKey("run.id"))
case = relationship("Case", lazy="joined")
info = relationship("Info", lazy="joined")
context = relationship("Context", lazy="joined")
run = relationship("Run", lazy="select")
data = relationship(
"Data",
lazy="joined",
cascade="all, delete",
passive_deletes=True,
)
times = relationship(
"Time",
lazy="joined",
cascade="all, delete",
passive_deletes=True,
)
unit = Nullable(s.Text)
time_unit = Nullable(s.Text)
batch_id = Nullable(s.Text)
timestamp = NotNull(s.DateTime(timezone=False))
iterations = Nullable(s.Integer)
min = Nullable(s.Numeric, check("min>=0"))
max = Nullable(s.Numeric, check("max>=0"))
mean = Nullable(s.Numeric, check("mean>=0"))
median = Nullable(s.Numeric, check("median>=0"))
stdev = Nullable(s.Numeric, check("stdev>=0"))
q1 = Nullable(s.Numeric, check("q1>=0"))
q3 = Nullable(s.Numeric, check("q3>=0"))
iqr = Nullable(s.Numeric, check("iqr>=0"))
error = Nullable(postgresql.JSONB)
@staticmethod
def create(data):
tags = data["tags"]
has_error = "error" in data
if has_error:
summary_data = {"error": data["error"]}
else:
summary_data = data["stats"]
values = summary_data.pop("data")
times = summary_data.pop("times")
name = tags.pop("name")
# create if not exists
c = {"name": name, "tags": tags}
case = Case.first(**c)
if not case:
case = Case.create(c)
# create if not exists
hardware_type, field_name = (
(Machine, "machine_info")
if "machine_info" in data
else (Cluster, "cluster_info")
)
hardware = hardware_type.upsert(**data[field_name])
# create if not exists
if "context" not in data:
data["context"] = {}
context = Context.first(tags=data["context"])
if not context:
context = Context.create({"tags": data["context"]})
# create if not exists
if "info" not in data:
data["info"] = {}
info = Info.first(tags=data["info"])
if not info:
info = Info.create({"tags": data["info"]})
sha, repository = None, None
if "github" in data:
sha = data["github"]["commit"]
repository = repository_to_url(data["github"]["repository"])
# create if not exists
commit = Commit.first(sha=sha, repository=repository)
if not commit:
github = get_github_commit(repository, sha)
if github:
commit = Commit.create_github_context(sha, repository, github)
elif sha or repository:
commit = Commit.create_unknown_context(sha, repository)
else:
commit = Commit.create_no_context()
# create if not exists
run_id = data["run_id"]
run_name = data.pop("run_name", None)
run = Run.first(id=run_id)
if run:
if has_error:
run.has_errors = True
run.save()
else:
run = Run.create(
{
"id": run_id,
"name": run_name,
"commit_id": commit.id,
"hardware_id": hardware.id,
"has_errors": has_error,
}
)
summary_data["run_id"] = data["run_id"]
summary_data["batch_id"] = data["batch_id"]
summary_data["timestamp"] = data["timestamp"]
summary_data["case_id"] = case.id
summary_data["info_id"] = info.id
summary_data["context_id"] = context.id
summary = Summary(**summary_data)
summary.save()
if "error" in data:
return summary
values = [decimal.Decimal(x) for x in values]
bulk = []
for i, x in enumerate(values):
bulk.append(Data(result=x, summary_id=summary.id, iteration=i + 1))
Data.bulk_save_objects(bulk)
times = [decimal.Decimal(x) for x in times]
bulk = []
for i, x in enumerate(times):
bulk.append(Time(result=x, summary_id=summary.id, iteration=i + 1))
Time.bulk_save_objects(bulk)
update_distribution(summary, 100)
return summary
s.Index("summary_run_id_index", Summary.run_id)
s.Index("summary_case_id_index", Summary.case_id)
s.Index("summary_batch_id_index", Summary.batch_id)
s.Index("summary_info_id_index", Summary.info_id)
s.Index("summary_context_id_index", Summary.context_id)
class SummaryCreate(marshmallow.Schema):
data = marshmallow.fields.List(marshmallow.fields.Decimal, required=True)
times = marshmallow.fields.List(marshmallow.fields.Decimal, required=True)
unit = marshmallow.fields.String(required=True)
time_unit = marshmallow.fields.String(required=True)
iterations = marshmallow.fields.Integer(required=True)
min = marshmallow.fields.Decimal(required=False)
max = marshmallow.fields.Decimal(required=False)
mean = marshmallow.fields.Decimal(required=False)
median = marshmallow.fields.Decimal(required=False)
stdev = marshmallow.fields.Decimal(required=False)
q1 = marshmallow.fields.Decimal(required=False)
q3 = marshmallow.fields.Decimal(required=False)
iqr = marshmallow.fields.Decimal(required=False)
class SummarySchema:
create = SummaryCreate()
def to_float(value):
return float(value) if value else None
class _Serializer(EntitySerializer):
def _dump(self, summary):
by_iteration_data = sorted([(x.iteration, x.result) for x in summary.data])
data = [result for _, result in by_iteration_data]
by_iteration_times = sorted([(x.iteration, x.result) for x in summary.times])
times = [result for _, result in by_iteration_times]
z_score = float(summary.z_score) if summary.z_score else None
case = summary.case
tags = {"id": case.id, "name": case.name}
tags.update(case.tags)
return {
"id": summary.id,
"run_id": summary.run_id,
"batch_id": summary.batch_id,
"timestamp": summary.timestamp.isoformat(),
"tags": tags,
"stats": {
"data": [float(x) for x in data],
"times": [float(x) for x in times],
"unit": summary.unit,
"time_unit": summary.time_unit,
"iterations": summary.iterations,
"min": to_float(summary.min),
"max": to_float(summary.max),
"mean": to_float(summary.mean),
"median": to_float(summary.median),
"stdev": to_float(summary.stdev),
"q1": to_float(summary.q1),
"q3": to_float(summary.q3),
"iqr": to_float(summary.iqr),
"z_score": z_score,
"z_regression": z_regression(summary.z_score),
"z_improvement": z_improvement(summary.z_score),
},
"error": summary.error,
"links": {
"list": f.url_for("api.benchmarks", _external=True),
"self": f.url_for(
"api.benchmark", benchmark_id=summary.id, _external=True
),
"info": f.url_for("api.info", info_id=summary.info_id, _external=True),
"context": f.url_for(
"api.context", context_id=summary.context_id, _external=True
),
"run": f.url_for("api.run", run_id=summary.run_id, _external=True),
},
}
class SummarySerializer:
one = _Serializer()
many = _Serializer(many=True)
class GitHubCreate(marshmallow.Schema):
commit = marshmallow.fields.String(required=True)
repository = marshmallow.fields.String(required=True)
class _BenchmarkFacadeSchemaCreate(marshmallow.Schema):
run_id = marshmallow.fields.String(required=True)
run_name = marshmallow.fields.String(required=False)
batch_id = marshmallow.fields.String(required=True)
timestamp = marshmallow.fields.DateTime(required=True)
machine_info = marshmallow.fields.Nested(MachineSchema().create, required=False)
cluster_info = marshmallow.fields.Nested(ClusterSchema().create, required=False)
stats = marshmallow.fields.Nested(SummarySchema().create, required=False)
error = marshmallow.fields.Dict(required=False)
tags = marshmallow.fields.Dict(required=True)
info = marshmallow.fields.Dict(required=True)
context = marshmallow.fields.Dict(required=True)
github = marshmallow.fields.Nested(GitHubCreate(), required=False)
@marshmallow.validates_schema
def validate_hardware_info_fields(self, data, **kwargs):
if "machine_info" not in data and "cluster_info" not in data:
raise marshmallow.ValidationError(
"Either machine_info or cluster_info field is required"
)
if "machine_info" in data and "cluster_info" in data:
raise marshmallow.ValidationError(
"machine_info and cluster_info fields can not be used at the same time"
)
@marshmallow.validates_schema
def validate_stats_or_error_field_is_present(self, data, **kwargs):
if "stats" not in data and "error" not in data:
raise marshmallow.ValidationError("Either stats or error field is required")
if "stats" in data and "error" in data:
raise marshmallow.ValidationError(
"stats and error fields can not be used at the same time"
)
class BenchmarkFacadeSchema:
create = _BenchmarkFacadeSchemaCreate()
```
#### File: tests/api/test_history.py
```python
from ...api._examples import _api_history_entity
from ...tests.api import _asserts, _fixtures
def _expected_entity(summary):
return _api_history_entity(
summary.id,
summary.case_id,
summary.context_id,
summary.run.name,
)
class TestHistoryGet(_asserts.GetEnforcer):
url = "/api/history/{}/"
public = True
def _create(self):
return _fixtures.summary()
def test_get_history(self, client):
self.authenticate(client)
summary = self._create()
response = client.get(f"/api/history/{summary.id}/")
self.assert_200_ok(response, contains=_expected_entity(summary))
```
#### File: tests/api/test_info.py
```python
from ...api._examples import _api_info_entity
from ...tests.api import _asserts, _fixtures
def _expected_entity(info):
return _api_info_entity(info.id)
def create_info():
summary = _fixtures.summary()
return summary.info
class TestInfoGet(_asserts.GetEnforcer):
url = "/api/info/{}/"
public = True
def _create(self):
return create_info()
def test_get_info(self, client):
self.authenticate(client)
info = self._create()
response = client.get(f"/api/info/{info.id}/")
self.assert_200_ok(response, _expected_entity(info))
class TestInfoList(_asserts.ListEnforcer):
url = "/api/info/"
public = True
def _create(self):
return create_info()
def test_info_list(self, client):
self.authenticate(client)
info = self._create()
response = client.get("/api/info/")
self.assert_200_ok(response, contains=_expected_entity(info))
```
#### File: tests/api/test_users.py
```python
import copy
import pytest
from ...api._examples import _api_user_entity
from ...config import TestConfig
from ...entities._entity import NotFound
from ...entities.user import User
from ...tests.api import _asserts
def _expected_entity(user):
return _api_user_entity(user)
class TestUserGet(_asserts.GetEnforcer):
url = "/api/users/{}/"
def test_get_user(self, client):
self.authenticate(client)
other = self.create_random_user()
response = client.get(f"/api/users/{other.id}/")
self.assert_200_ok(response, _expected_entity(other))
class TestUserDelete(_asserts.DeleteEnforcer):
url = "/api/users/{}/"
def test_delete_user(self, client):
self.authenticate(client)
other = self.create_random_user()
# can get before delete
User.one(id=other.id)
# delete
response = client.delete(f"/api/users/{other.id}/")
self.assert_204_no_content(response)
# cannot get after delete
with pytest.raises(NotFound):
User.one(id=other.id)
class TestUserList(_asserts.ListEnforcer):
url = "/api/users/"
def test_user_list(self, client):
self.authenticate(client)
response = client.get("/api/users/")
self.assert_200_ok(response, contains=_expected_entity(self.fixture_user))
class TestUserPost(_asserts.PostEnforcer):
url = "/api/users/"
required_fields = ["email", "password", "name"]
valid_payload = {
"email": "<EMAIL>",
"password": "<PASSWORD>",
"name": "New name",
}
def setup(self):
User.delete_all()
def test_create_user(self, client):
self.authenticate(client)
response = client.post("/api/users/", json=self.valid_payload)
new_id = response.json["id"]
user = User.one(id=new_id)
location = "http://localhost/api/users/%s/" % new_id
self.assert_201_created(response, _expected_entity(user), location)
def test_invalid_email_address(self, client):
self.authenticate(client)
data = copy.deepcopy(self.valid_payload)
data["email"] = "not an email address"
response = client.post("/api/users/", json=data)
message = {"email": ["Not a valid email address."]}
self.assert_400_bad_request(response, message)
def test_duplicate_email_address(self, client):
self.authenticate(client)
client.post("/api/users/", json=self.valid_payload)
response = client.post("/api/users/", json=self.valid_payload)
message = {"email": ["Email address already in use."]}
self.assert_400_bad_request(response, message)
class TestUserPut(_asserts.PutEnforcer):
url = "/api/users/{}/"
valid_payload = {
"email": "<EMAIL>",
"password": "<PASSWORD>",
"name": "Updated name",
}
def setup(self):
User.delete_all()
def _create_entity_to_update(self):
user = User(name="Bryce", email="<EMAIL>")
user.set_password("<PASSWORD>")
user.save()
return user
def test_update_one_field(self, client):
self.authenticate(client)
# before
before = User.one(id=self.fixture_user.id)
assert before.name == "Fixture name"
assert before.email == "<EMAIL>"
assert before.check_password("fixture")
# update name
data = {"name": "Updated name"}
response = client.put(f"/api/users/{self.fixture_user.id}/", json=data)
# after
after = User.one(id=self.fixture_user.id)
self.assert_200_ok(response, _expected_entity(after))
assert after.name == "<NAME>"
assert after.email == "<EMAIL>"
assert after.check_password("<PASSWORD>")
def test_update_all_fields(self, client):
self.authenticate(client)
# before
before = User.one(id=self.fixture_user.id)
assert before.name == "<NAME>"
assert before.email == "<EMAIL>"
assert before.check_password("<PASSWORD>")
# update
data = self.valid_payload
response = client.put(f"/api/users/{self.fixture_user.id}/", json=data)
# after
after = User.one(id=self.fixture_user.id)
self.assert_200_ok(response, _expected_entity(after))
assert after.name == "<NAME>"
assert after.email == "<EMAIL>"
assert after.check_password("<PASSWORD>")
def test_invalid_email_address(self, client):
self.authenticate(client)
other = self.create_random_user()
data = copy.deepcopy(self.valid_payload)
data["email"] = "not an email address"
response = client.put(f"/api/users/{other.id}/", json=data)
message = {"email": ["Not a valid email address."]}
self.assert_400_bad_request(response, message)
def test_duplicate_email_address(self, client):
self.authenticate(client)
other = self.create_random_user()
data = {"email": self.fixture_user.email}
response = client.put(f"/api/users/{other.id}/", json=data)
message = {"email": ["Email address already in use."]}
self.assert_400_bad_request(response, message)
def test_unchanged_email_address(self, client):
self.authenticate(client)
other = self._create_entity_to_update()
data = {"email": other.email}
response = client.put(f"/api/users/{other.id}/", json=data)
self.assert_200_ok(response, _expected_entity(other))
class TestRegisterPost(_asserts.PostEnforcer):
url = "/api/register/"
required_fields = ["email", "password", "name", "secret"]
valid_payload = {
"email": "<EMAIL>",
"password": "<PASSWORD>",
"name": "Casey",
"secret": TestConfig.REGISTRATION_KEY,
}
def setup(self):
User.delete_all()
def test_register(self, client):
self.authenticate(client)
response = client.post("/api/register/", json=self.valid_payload)
new_id = response.json["id"]
user = User.one(id=new_id)
location = "http://localhost/api/users/%s/" % new_id
self.assert_201_created(response, _expected_entity(user), location)
def test_unauthenticated(self, client):
response = client.post("/api/register/", json=self.valid_payload)
new_id = response.json["id"]
user = User.one(id=new_id)
location = "http://localhost/api/users/%s/" % new_id
self.assert_201_created(response, _expected_entity(user), location)
def test_invalid_secret(self, client):
data = copy.deepcopy(self.valid_payload)
data["secret"] = "not the right registration code"
response = client.post("/api/register/", json=data)
message = {"secret": ["Invalid registration key."]}
self.assert_400_bad_request(response, message)
def test_invalid_email_address(self, client):
data = copy.deepcopy(self.valid_payload)
data["email"] = "not an email address"
response = client.post("/api/register/", json=data)
message = {"email": ["Not a valid email address."]}
self.assert_400_bad_request(response, message)
def test_duplicate_email_address(self, client):
client.post("/api/register/", json=self.valid_payload)
response = client.post("/api/register/", json=self.valid_payload)
message = {"email": ["Email address already in use."]}
self.assert_400_bad_request(response, message)
```
#### File: tests/app/test_auth.py
```python
from ...config import TestConfig
from ...tests.app import _asserts
class TestRegister(_asserts.AppEndpointTest):
def test_get_register_page_authenticated(self, client):
self.authenticate(client)
response = client.get("/register/", follow_redirects=True)
self.assert_index_page(response)
def test_register(self, client):
# go to register page
response = client.get("/register/")
self.assert_registration_page(response)
# register
data = {
"email": "<EMAIL>",
"name": "Register",
"password": "<PASSWORD>",
"password2": "<PASSWORD>",
"secret": TestConfig.REGISTRATION_KEY,
"csrf_token": self.get_csrf_token(response),
}
response = client.post("/register/", data=data, follow_redirects=True)
self.assert_login_page(response)
# make sure you can login with this new user
data = {
"email": "<EMAIL>",
"password": "<PASSWORD>",
"remember_me": True,
"csrf_token": self.get_csrf_token(response),
}
response = client.post("/login/", data=data, follow_redirects=True)
self.assert_index_page(response)
def test_email_address_already_in_use(self, client):
other = self.create_random_user()
# go to register page
response = client.get("/register/")
self.assert_registration_page(response)
# register
data = {
"email": other.email,
"name": "Register",
"password": "<PASSWORD>",
"password2": "<PASSWORD>",
"secret": TestConfig.REGISTRATION_KEY,
"remember_me": True,
"csrf_token": self.get_csrf_token(response),
}
response = client.post("/register/", data=data, follow_redirects=True)
# registration failed (still on the registration page)
self.assert_registration_page(response)
assert b"Email address already in use." in response.data
class TestLogin(_asserts.AppEndpointTest):
def test_get_login_page_authenticated(self, client):
self.authenticate(client)
response = client.get("/login/", follow_redirects=True)
self.assert_index_page(response)
def test_login(self, client):
self._create_fixture_user()
# go to login page
response = client.get("/login/")
self.assert_login_page(response)
# login submit
data = {
"email": "<EMAIL>",
"password": "<PASSWORD>",
"csrf_token": self.get_csrf_token(response),
}
response = client.post("/login/", data=data, follow_redirects=True)
self.assert_index_page(response)
def test_login_failed(self, client):
other = self.create_random_user()
# go to login page
response = client.get("/login/")
self.assert_login_page(response)
# login submit
data = {
"email": other.email,
"password": "<PASSWORD>",
"csrf_token": self.get_csrf_token(response),
}
response = client.post("/login/", data=data, follow_redirects=True)
# login failed (still on the login page)
self.assert_login_page(response)
assert b"Invalid email or password." in response.data
class TestLogout(_asserts.AppEndpointTest):
def test_logout_authenticated(self, client):
self.authenticate(client)
# can get users page before
response = client.get("/users/", follow_redirects=True)
self.assert_page(response, "Users")
# logout
response = client.get("/logout/", follow_redirects=True)
self.assert_index_page(response)
# cannot get users page after
response = client.get("/users/", follow_redirects=True)
self.assert_login_page(response)
def test_logout_unauthenticated(self, client):
# cannot get users page before
response = client.get("/users/", follow_redirects=True)
self.assert_login_page(response)
# logout
response = client.get("/logout/", follow_redirects=True)
self.assert_index_page(response)
# cannot get users page after
response = client.get("/users/", follow_redirects=True)
self.assert_login_page(response)
```
#### File: tests/app/test_runs.py
```python
from ...tests.api import _fixtures
from ...tests.app import _asserts
class TestRunGet(_asserts.GetEnforcer):
url = "/runs/{}/"
title = "Run"
redirect_on_unknown = False
def _create(self, client):
self.create_benchmark(client)
return _fixtures.VALID_PAYLOAD["run_id"]
class TestRunDelete(_asserts.DeleteEnforcer):
def test_authenticated(self, client):
self.create_benchmark(client)
run_id = _fixtures.VALID_PAYLOAD["run_id"]
self.authenticate(client)
response = client.get(f"/runs/{run_id}/")
self.assert_page(response, "Run")
assert f"{run_id}</li>".encode() in response.data
data = {"delete": ["Delete"], "csrf_token": self.get_csrf_token(response)}
response = client.post(f"/runs/{run_id}/", data=data, follow_redirects=True)
self.assert_page(response, "Home")
assert b"Run deleted." in response.data
response = client.get(f"/runs/{run_id}/", follow_redirects=True)
self.assert_page(response, "Run")
assert b"Error getting run." in response.data
def test_unauthenticated(self, client):
self.create_benchmark(client)
run_id = _fixtures.VALID_PAYLOAD["run_id"]
self.logout(client)
data = {"delete": ["Delete"]}
response = client.post(f"/runs/{run_id}/", data=data, follow_redirects=True)
self.assert_login_page(response)
def test_no_csrf_token(self, client):
self.create_benchmark(client)
run_id = _fixtures.VALID_PAYLOAD["run_id"]
self.authenticate(client)
data = {"delete": ["Delete"]}
response = client.post(f"/runs/{run_id}/", data=data, follow_redirects=True)
self.assert_page(response, "Home")
assert b"The CSRF token is missing." in response.data
```
#### File: tests/app/test_users.py
```python
from ...tests.app import _asserts
class TestUsers(_asserts.AppEndpointTest):
def test_user_list_authenticated(self, client):
self.authenticate(client)
other = self.create_random_user()
response = client.get("/users/")
self.assert_page(response, "Users")
assert "{}</td>".format(other.email).encode() in response.data
def test_user_list_unauthenticated(self, client):
response = client.get("/users/", follow_redirects=True)
self.assert_login_page(response)
class TestUser(_asserts.AppEndpointTest):
def test_user_get_authenticated(self, client):
self.authenticate(client)
other = self.create_random_user()
response = client.get(f"/users/{other.id}/")
self.assert_page(response, "User")
assert 'value="{}"'.format(other.name).encode() in response.data
def test_user_get_unauthenticated(self, client):
other = self.create_random_user()
response = client.get(f"/users/{other.id}/", follow_redirects=True)
self.assert_login_page(response)
def test_user_get_unknown(self, client):
self.authenticate(client)
response = client.get("/users/unknown/", follow_redirects=True)
self.assert_index_page(response)
assert b"Error getting user." in response.data
def test_user_update_authenticated(self, client):
self.authenticate(client)
other = self.create_random_user()
# go to user page
response = client.get(f"/users/{other.id}/")
self.assert_page(response, "User")
# update user
data = {
"name": "<NAME>",
"email": other.email,
"csrf_token": self.get_csrf_token(response),
}
response = client.post(f"/users/{other.id}/", data=data, follow_redirects=True)
self.assert_page(response, "User")
assert b"User updated." in response.data
assert b'value="New Name"' in response.data
def test_user_update_unauthenticated(self, client):
other = self.create_random_user()
response = client.post(f"/users/{other.id}/", data={}, follow_redirects=True)
self.assert_login_page(response)
def test_user_update_no_csrf_token(self, client):
self.authenticate(client)
other = self.create_random_user()
response = client.post(f"/users/{other.id}/", data={})
self.assert_page(response, "User")
assert b"The CSRF token is missing." in response.data
# TODO: assert name not updated?
def test_user_update_failed(self, client):
self.authenticate(client)
other = self.create_random_user()
response = client.post(f"/users/{other.id}/", data={"email": "Not an email"})
self.assert_page(response, "User")
assert b"Invalid email address." in response.data
def test_user_delete_authenticated(self, client):
self.authenticate(client)
other = self.create_random_user()
# can get user before
response = client.get(f"/users/{other.id}/")
self.assert_page(response, "User")
assert 'value="{}"'.format(other.name).encode() in response.data
# delete user
data = {"delete": ["Delete"], "csrf_token": self.get_csrf_token(response)}
response = client.post(f"/users/{other.id}/", data=data, follow_redirects=True)
self.assert_page(response, "Users")
assert b"User deleted." in response.data
# cannot get user after
response = client.get(f"/users/{other.id}/", follow_redirects=True)
self.assert_index_page(response)
assert b"Error getting user." in response.data
def test_user_delete_unauthenticated(self, client):
other = self.create_random_user()
data = {"delete": ["Delete"]}
response = client.post(f"/users/{other.id}/", data=data, follow_redirects=True)
self.assert_login_page(response)
def test_user_delete_no_csrf_token(self, client):
self.authenticate(client)
other = self.create_random_user()
data = {"delete": ["Delete"]}
response = client.post(f"/users/{other.id}/", data=data, follow_redirects=True)
self.assert_page(response, "User")
assert b"The CSRF token is missing." in response.data
# TODO: test user not deleted?
class TestUserCreate(_asserts.AppEndpointTest):
def test_user_create_get_authenticated(self, client):
self.authenticate(client)
response = client.get("/users/create/")
self.assert_page(response, "User Create")
def test_user_create_get_unauthenticated(self, client):
response = client.get("/users/create/", follow_redirects=True)
self.assert_login_page(response)
def test_user_create_post_authenticated(self, client):
self.authenticate(client)
# go to user create page
response = client.get("/users/create/")
self.assert_page(response, "User Create")
# create user
data = {
"email": "<EMAIL>",
"name": "<NAME>",
"password": "password",
"csrf_token": self.get_csrf_token(response),
}
response = client.post("/users/create/", data=data, follow_redirects=True)
self.assert_page(response, "Users")
assert b"User created." in response.data
def test_user_create_post_unauthenticated(self, client):
response = client.post("/users/create/", data={}, follow_redirects=True)
self.assert_login_page(response)
def test_user_create_post_no_csrf_token(self, client):
self.authenticate(client)
response = client.post("/users/create/", data={})
self.assert_page(response, "User Create")
assert b"The CSRF token is missing." in response.data
```
#### File: migrations/versions/847b0850ea81_drop_sha_and_repository.py
```python
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "847b0850ea81"
down_revision = "c181484ce40f"
branch_labels = None
depends_on = None
def upgrade():
op.alter_column(
"distribution", "commit_id", existing_type=sa.VARCHAR(length=50), nullable=False
)
op.drop_index("distribution_repository_index", table_name="distribution")
op.drop_index("distribution_sha_index", table_name="distribution")
op.drop_index("distribution_index", table_name="distribution")
op.create_index(
"distribution_index",
"distribution",
["case_id", "context_id", "commit_id", "machine_hash"],
unique=True,
)
op.drop_column("distribution", "sha")
op.drop_column("distribution", "repository")
def downgrade():
op.add_column(
"distribution",
sa.Column(
"repository", sa.VARCHAR(length=100), autoincrement=False, nullable=False
),
)
op.add_column(
"distribution",
sa.Column("sha", sa.VARCHAR(length=50), autoincrement=False, nullable=False),
)
op.drop_index("distribution_index", table_name="distribution")
op.create_index(
"distribution_index",
"distribution",
["sha", "case_id", "context_id", "machine_hash"],
unique=False,
)
op.create_index("distribution_sha_index", "distribution", ["sha"], unique=False)
op.create_index(
"distribution_repository_index", "distribution", ["repository"], unique=False
)
op.alter_column(
"distribution", "commit_id", existing_type=sa.VARCHAR(length=50), nullable=True
)
```
#### File: migrations/versions/8a72866f991c_gpu_info_not_null.py
```python
import sqlalchemy as sa
from alembic import op
from sqlalchemy import MetaData
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision = "<KEY>"
down_revision = "064a0de5f947"
branch_labels = None
depends_on = None
def upgrade():
connection = op.get_bind()
meta = MetaData()
meta.reflect(bind=connection)
machine_table = meta.tables["machine"]
connection.execute(
machine_table.update()
.where(machine_table.c.gpu_count == None) # noqa
.values(gpu_count=0)
)
connection.execute(
machine_table.update()
.where(machine_table.c.gpu_product_names == None) # noqa
.values(gpu_product_names=[])
)
op.alter_column("machine", "gpu_count", existing_type=sa.INTEGER(), nullable=False)
op.alter_column(
"machine",
"gpu_product_names",
existing_type=postgresql.ARRAY(sa.TEXT()),
nullable=False,
)
def downgrade():
op.alter_column(
"machine",
"gpu_product_names",
existing_type=postgresql.ARRAY(sa.TEXT()),
nullable=True,
)
op.alter_column("machine", "gpu_count", existing_type=sa.INTEGER(), nullable=True)
```
#### File: migrations/versions/f542b3646629_info_data.py
```python
import logging
import uuid
from alembic import op
from sqlalchemy import MetaData, distinct, select
# revision identifiers, used by Alembic.
revision = "<KEY>"
down_revision = "3ddd66ca34f2"
branch_labels = None
depends_on = None
def upgrade():
connection = op.get_bind()
meta = MetaData()
meta.reflect(bind=connection)
info_table = meta.tables["info"]
context_table = meta.tables["context"]
summary_table = meta.tables["summary"]
contexts = list(connection.execute(context_table.select()))
num = len(contexts)
for i, context in enumerate(contexts):
logging.info(f"f542b3646629: Migrating context {i + 1} of {num}")
info_tags, context_tags = {}, {}
if "arrow_compiler_flags" in context.tags:
info_tags = dict(context.tags)
context_tags = {
"benchmark_language": info_tags.pop("benchmark_language"),
"arrow_compiler_flags": info_tags.pop("arrow_compiler_flags"),
}
else:
context_tags = dict(context.tags)
keys = [
"description",
"benchmark_language_version",
"data_path",
]
for key in keys:
if key in context_tags:
value = context_tags.pop(key)
if value:
info_tags[key] = value
keys = list(context_tags.keys())
for key in keys:
if key.endswith("_version"):
value = context_tags.pop(key)
if value:
info_tags[key] = value
if not info_tags:
continue
other_info = connection.execute(
info_table.select().where(
info_table.c.tags == info_tags,
)
).fetchone()
if other_info:
connection.execute(
summary_table.update()
.where(summary_table.c.context_id == context.id)
.values(info_id=other_info.id)
)
else:
new_info_id = uuid.uuid4().hex
connection.execute(
info_table.insert().values(
id=new_info_id,
tags=info_tags,
)
)
connection.execute(
summary_table.update()
.where(summary_table.c.context_id == context.id)
.values(info_id=new_info_id)
)
other_context = connection.execute(
context_table.select().where(
context_table.c.tags == context_tags,
)
).fetchone()
if other_context:
connection.execute(
summary_table.update()
.where(summary_table.c.context_id == context.id)
.values(context_id=other_context.id)
)
else:
new_context_id = uuid.uuid4().hex
connection.execute(
context_table.insert().values(
id=new_context_id,
tags=context_tags,
)
)
connection.execute(
summary_table.update()
.where(summary_table.c.context_id == context.id)
.values(context_id=new_context_id)
)
context_ids = []
logging.info("f542b3646629: Getting distinct contexts")
result = connection.execute(select(distinct(summary_table.c.context_id)))
for row in result:
context_id = row[0]
context_ids.append(context_id)
info_ids = []
logging.info("f542b3646629: Getting distinct info")
result = connection.execute(select(distinct(summary_table.c.info_id)))
for row in result:
info_id = row[0]
info_ids.append(info_id)
logging.info("f542b3646629: Deleting unused contexts")
for context in connection.execute(context_table.select()):
if context.id not in context_ids:
connection.execute(
context_table.delete().where(context_table.c.id == context.id)
)
logging.info("f542b3646629: Deleting unused info")
for info in connection.execute(info_table.select()):
if info.id not in info_ids:
connection.execute(info_table.delete().where(info_table.c.id == info.id))
def downgrade():
pass
``` |
{
"source": "jonkeane/makePDF",
"score": 3
} |
#### File: makePDF/tests/test_utils.py
```python
import pytest
from unittest import TestCase
from PIL import Image
import sys, os, shutil, uuid
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath + "/../src/makePDF/") # move to the src directory
import utils
class TestUtiles(TestCase):
def test_has_alpha(self):
assert utils.has_alpha('tests/images/travis-pride.png')
assert not utils.has_alpha('tests/images/travis-pride-sans-alpha.png')
def test_remove_alpha(self):
new_image = utils.remove_alpha('tests/images/travis-pride.png', 'tests/images/travis-pride-temp.png')
img = Image.open(new_image, 'r')
assert img.mode != 'RGBA'
os.unlink(new_image)
def test_which(self):
eless = utils.which("ls")
assert eless.endswith("ls")
not_a_thing = utils.which("thisisntathing")
assert not_a_thing is None
``` |
{
"source": "jonkeane/skeleton",
"score": 3
} |
#### File: skeleton/tests/test_add_one.py
```python
import pytest
from unittest import TestCase
from skeleton import addition
class TestOne(TestCase):
def test_add(self):
assert addition.add_one(1) == 2
assert addition.add_one(2) == 3
def test_add_too_big(self):
with pytest.raises(ValueError):
addition.add_one(100)
``` |
{
"source": "jonkeane/vatic-checker",
"score": 3
} |
#### File: src/vatic_checker/handler.py
```python
import json
from database import session
handlers = {}
import logging
logger = logging.getLogger("vatic.server")
try:
from wsgilog import log as wsgilog
except ImportError:
def wsgilog(*args, **kwargs):
return lambda x: x
def handler(type = "json", jsonify = None, post = False, environ = False):
"""
Decorator to bind a function as a handler in the server software.
type specifies the Content-Type header
jsonify dumps data in json format if true
environ gives handler full control of environ if ture
"""
type = type.lower()
if type == "json" and jsonify is None:
jsonify = True
type == "text/json"
def decorator(func):
handlers[func.__name__] = (func, type, jsonify, post, environ)
return func
return decorator
@wsgilog(tostream=True)
def application(environ, start_response):
"""
Dispatches the server application through a handler. Specify a handler
with the 'handler' decorator.
"""
path = environ.get("PATH_INFO", "").lstrip("/").split("/")
logger.info("Got HTTP request: {0}".format("/".join(path)))
try:
action = path[0]
except IndexError:
raise Error404("Missing action.")
try:
handler, type, jsonify, post, passenviron = handlers[action]
except KeyError:
start_response("200 OK", [("Content-Type", "text/plain")])
return ["Error 404\n", "Action {0} undefined.".format(action)]
try:
args = path[1:]
if post:
postdata = environ["wsgi.input"].read()
if post == "json":
args.append(json.loads(postdata))
else:
args.append(postdata)
if passenviron:
args.append(environ)
try:
response = handler(*args)
finally:
session.remove()
except Error404 as e:
start_response("404 Not Found", [("Content-Type", "text/plain")])
return ["Error 404\n", str(e)]
except Error500 as e:
start_response("500 Internal Serve Error", [("Content-Type", "text/plain")])
return [str(e)]
else:
start_response("200 OK", [("Content-Type", type)])
if jsonify:
logger.debug("Response to " + str("/".join(path)) + ": " +
str(response)[0:100])
return [json.dumps(response)]
else:
return response
class Error404(Exception):
"""
Exception indicating that an 404 error occured.
"""
def __init__(self, message):
Exception.__init__(self, message)
class Error500(Exception):
"""
Exception indicating that an 404 error occured.
"""
def __init__(self, message):
Exception.__init__(self, message)
```
#### File: src/vatic_checker/server.py
```python
import os.path, sys
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from future.standard_library import install_aliases
install_aliases()
from urllib.parse import urlparse, urlencode
from urllib.request import urlopen, Request
from urllib.error import HTTPError
import json, re, datetime, uuid, logging, urllib
from sqlalchemy import and_, func, distinct, desc
from sqlalchemy.orm import aliased
from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound
# import from vatic_checker though this will make testing more difficult
from vatic_checker.handler import handler, application, Error500
from vatic_checker.database import session
import vatic_checker.model as model
import config
# setup handler
ch = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
# setup the vatic parent logger
logger_checker = logging.getLogger("checker-server")
logger_checker.setLevel(logging.ERROR)
logger_checker.addHandler(ch)
# instantiate the server logger
logger = logging.getLogger("checker-server")
# send test to the log to ensure which level is running.
logger.error("This is a 40 (error)")
logger.debug("This is a 10 (debug)")
@handler()
def get_next(userid):
"""
Returns the next video to annotate for a user if the user has completed
training. If they have not completed the training, return a training video
instead.
"""
status = {}
# TODO: error gracefully if not a UUID?
user = session.query(model.User)
user = user.filter(model.User.guid == userid)
# TODO: error gracefully if less than one or more than one returned
user = user.one()
# retrieve training status, defaults to false
status["trained"] = user.completed_training
# check if there are at least N training videos annotated
if status["trained"] == False:
training_check = check_training_completion(user, config.min_training)
if training_check:
# update the database so we don't have to check each time
user.completed_training = training_check
session.commit()
status["trained"] = training_check
if hasVideosLeft(user):
if status["trained"]:
status.update(getNextLeastAnnoedVideo(user))
else:
status.update(getNextTrainingVideo(user))
else:
status["no_videos_left"] = True
return status
def hasVideosLeft(user):
"""
Returns True if the user has videos left to annotate. This allows us to
restrict the annotations to one per video
"""
# check the setting in config, default to true. If we allow duplicate
# annotations (that is duplicate_annotations is True), return True
# quickly without checking if the user has already annotated some.
if config.duplicate_annotations:
return True
current_annotations = session.query(model.Annotation).filter(model.Annotation.user_guid == user.guid)
subq = session.query(model.Annotation).\
filter(model.Annotation.user_guid == user.guid).\
subquery('sub')
videos_annoed = session.query(model.Video, subq.c.text).\
outerjoin(subq, subq.c.video_id == model.Video.id)
if videos_annoed.filter(subq.c.text == None).count() > 0:
return True
else:
return False
def getNextLeastAnnoedVideo(user):
"""
Returns the next video to annotate for a given user. This video will be the
least annotated video by the user or everyone else.
"""
# get the user id
user_guid = user.guid
return_value = {}
# make aliases for the joins later
annotations_current_user = aliased(model.Annotation)
annotations_all = aliased(model.Annotation)
videos = session.query(model.Video)
# add the number of annotations for the current user
videos = videos.add_column(
func.count(distinct(annotations_current_user.user_guid).label('annos_current_user'))).\
outerjoin(annotations_current_user,
and_(annotations_current_user.video_id == model.Video.id,
annotations_current_user.user_guid == user_guid)).\
group_by(model.Video.id)
# add the number of annotations for all users
videos = videos.add_column(
func.count(annotations_all.user_guid.label('total_annos'))).\
outerjoin(annotations_all, annotations_all.video_id == model.Video.id).\
group_by(model.Video.id)
# order so that if the current user hasn't annotated, that is preferred, but
# if there is a tie, then get the newt least annotated video.
# count_1 = annos_current_user
# count_2 = total_annos
videos = videos.order_by('count_1', 'count_2', func.rand())
try:
video = videos.first()
except NoResultFound:
logger.error("Found no videos available for annotations for user: %s" % (user_guid))
return None
return_value["video_id"] = video[0].id
return_value["name"] = video[0].name
return_value["start"] = 0 # the start frame is always 0
return_value["end"] = video[0].num_frames - 1 # bc zero-indexing, need to subtract one
return_value["width"] = video[0].width
return_value["height"] = video[0].height
return return_value
def getNextTrainingVideo(user):
"""
Returns the next training video to annotate for a given user.
"""
# get the user id
user_guid = user.guid
return_value = {}
training_videos = session.query(model.TrainingVideo, model.Training)
# add the number of annotations for the current user
training_videos = training_videos.\
outerjoin(model.Training,
and_(model.Training.video_id == model.TrainingVideo.id,
model.Training.user_guid == user_guid)).\
filter(model.Training.success == None)
# randomize
training_videos = training_videos.order_by(func.rand())
try:
training_videos = training_videos.first()
except NoResultFound:
# TODO: don't try to get the next training video if none are left?
logger.error("Found no training videos available for annotations for user: %s" % (user_guid))
return None
return_value["video_id"] = training_videos[0].id
return_value["name"] = training_videos[0].name
return_value["start"] = 0 # the start frame is always 0
return_value["end"] = training_videos[0].num_frames - 1 # bc zero-indexing, need to subtract one
return_value["width"] = training_videos[0].width
return_value["height"] = training_videos[0].height
return return_value
def parse_annotation(postdata):
out = {}
# TODO: add try/excepts throughout here
# strip any charaters that are not in our annotation set
# SQL alchemy should quote special characters, but this is a good defense as well.
# This allows all letters, numbers, ?, *, [, ], :, #, !, -
out["anno"] = re.sub("[^A-z0-9 \?\*\[\]\:\#\!\\-]", "", postdata.get('anno_value', ''))
out["video_id"] = int(postdata.get('video_id', None))
out["user_guid"] = uuid.UUID(postdata.get('user_guid', None))
return out
def check_training_completion(user, n):
"""
Check if a user has trained enough
"""
# confirm that n is less than or equal to available training videos
training_videos = session.query(model.TrainingVideo)
if training_videos.count() < n:
logger.error("There are fewer training videos than the setting min_training. Change the min_training value in the config file to fix this.")
raise Error500("Server misconfiguration: There are fewer training videos than the setting min_training. Please try reconfiguring and trying again.")
# get the user id
user_guid = user.guid
trainings = session.query(model.Training.success, model.Training.video_id).\
filter(model.Training.user_guid == user_guid).\
group_by(model.Training.video_id).\
filter(model.Training.success == True)
if trainings.count() >= n:
print("setting training to true")
user.trained = True
session.commit()
return True
# return false otherwise
return False
@handler(post = "json")
def save_annotation(video_id, postdata):
"""
Saves annotation for a regular video
"""
data = parse_annotation(postdata)
logger.debug("Saving annotation: annotation {0}; video_id {1}; user id {2}".format(
data["anno"], data["video_id"], data["user_guid"])
)
# TODO: try/catch in case there is an issue with guid or video_id
new_anno = model.Annotation(
text = data["anno"],
user_guid = data["user_guid"],
video_id = data["video_id"],
timestamp = datetime.datetime.utcnow()
)
session.add(new_anno)
session.commit()
logger.debug("Saved")
return True
@handler(post = "json")
def save_training(video_id, postdata):
"""
Saves annotation for a training video
"""
data = parse_annotation(postdata)
# start off skeptical
success = False
# check if the label matches
training_video = session.query(model.TrainingVideo).\
filter(model.TrainingVideo.id == data["video_id"])
# TODO: catch if the video doesn't match anything
training_video = training_video.one()
if data["anno"] == training_video.gold_standard_label:
success = True
if success:
logger.debug(
"Saving training: annotation {0}; video_id {1}; user id {2}".format(
data["anno"], data["video_id"], data["user_guid"])
)
# TODO: try/catch in case there is an issue with guid or video_id
new_anno = model.Training(
text = data["anno"],
user_guid = data["user_guid"],
video_id = data["video_id"],
timestamp = datetime.datetime.utcnow(),
success = success
)
session.add(new_anno)
session.commit()
logger.debug("Saved")
return "all good"
else:
return "borked"
@handler(post = "json")
def login(params, postdata):
"""
Returns the guid of the user if they exist
"""
out = {}
print(postdata)
# verify the recpatcha is correct
recaptcha_url = "https://www.google.com/recaptcha/api/siteverify"
data = {
"secret": config.recaptcha_secret,
"response": postdata.get('recaptcha', '')
}
request = Request(recaptcha_url, data=urllib.urlencode(data))
resp = urlopen(request)
resp = json.loads(resp.read())
# if the captch wasn't verified return false.
if resp['success'] is not True:
logger.debug("The recaptcha has failed, returning an error.")
out['success'] = False
out['reason'] = "Could not login"
return out
# strip all non alphanumeric characters
username = re.sub("[^A-z0-9]", "", postdata.get('username', ''))
user = session.query(model.User)
user = user.filter(model.User.username == username)
# TODO: error gracefully if less than one or more than one returned
try:
user = user.one()
except NoResultFound:
out['success'] = False
out['reason'] = "Incorrect username"
return out
except MultipleResultsFound:
logger.error("There is more than one user with that username. You must delete one from the database.")
raise Error500("More than one user with that name. Please contact the researcher.")
out['success'] = True
out['user_guid'] = str(user.guid)
out['can_see_status'] = str(user.can_see_status)
return out
@handler()
def status(userid):
"""
Returns the information needed for status
"""
status = {}
# TODO: error gracefully if not a UUID?
user = session.query(model.User)
user = user.filter(model.User.guid == userid)
# TODO: error gracefully if less than one or more than one returned
user = user.one()
# retrieve training status, defaults to false
status["can_see_status"] = user.can_see_status
# check if there are at least N training videos annotated
# TODO: should this return a status and have the UI handle this nicely?
if status["can_see_status"] == False:
logger.error("This user isn't authorized to see the status page.")
raise Error500("No access. Please contact the researcher.")
# find the status information for annotations (similar to cli annotationstats)
videos = session.query(model.Video)
total_videos = videos.count()
# grab the users' annotations
current_annotations = session.\
query(model.User.username, model.User.completed_training).\
outerjoin(model.Annotation, model.User.guid == model.Annotation.user_guid).\
group_by(model.User.guid).\
add_column(
func.count(distinct(model.Annotation.video_id).label('annos_current_user')))
anno_results = session.execute(current_annotations)
anno_results_dict = [dict(zip(["username", "completed_training", "annotations_completed"], row)) for row in anno_results]
status["annotation_results"] = anno_results_dict
status["total_videos"] = total_videos
# find the status information for videos (similar to cli videostats)
# grab the users' annotations
users = session.query(model.User)
total_users = users.count()
# grab the videos' annotations
current_videos = session.\
query(model.Video.id, model.Video.name).\
outerjoin(model.Annotation, model.Video.id == model.Annotation.video_id).\
group_by(model.Video.name).\
add_column(
func.count(distinct(model.Annotation.id).label('annos_current_user')))
video_results = session.execute(current_videos)
video_results_dict = [dict(zip(["video_id", "video_name", "annotations_completed"], row)) for row in video_results]
status["video_results"] = video_results_dict
status["total_users"] = total_users
return status
```
#### File: tests/unit/test_server.py
```python
import pytest
import sys, os, shutil, uuid
myPath = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, myPath + "/../../src/vatic_checker/") # move to the src directory
import vatic_checker.model as model
import server
from vatic_checker.handler import Error500
from sqlalchemy import and_, func, distinct, desc
from sqlalchemy.orm import aliased
from alchemy_mock.mocking import UnifiedAlchemyMagicMock, AlchemyMagicMock
import argparse
from unittest import TestCase
try:
from unittest.mock import patch, call # python 3.3+
except ImportError:
from mock import patch, call # python 2.6-3.2
# current working directory
cwd = os.getcwd()
# mock the session object from cli so that the database isn't harmed.
untrained_user = model.User(
guid=uuid.UUID("87599456d52b48b78a6b1b5b0ded7be1"),
username="untrained",
completed_training=False)
trained_user = model.User(
guid=uuid.UUID("ea3f15f611084c3ba84b8068f5234928"),
username="trained",
completed_training=True)
finished_user = model.User(
guid=uuid.UUID("465db3c5171a4ddc82ee35de3d31ab40"),
username="finished",
completed_training=True)
annotations_current_user = aliased(model.Annotation)
annotations_all = aliased(model.Annotation)
session = UnifiedAlchemyMagicMock(data = [
(
[call.query(model.TrainingVideo)],
[model.TrainingVideo(id = 1),model.TrainingVideo(id = 2)]
),
(
[call.query(model.Video)],
[model.Video(id = 1),model.Video(id = 2)]
),
(
[call.query(model.User), call.filter(model.User.guid == untrained_user.guid)],
[untrained_user]
),
(
[call.query(model.User), call.filter(model.User.guid == trained_user.guid)],
[trained_user]
),
# for test_enough_annoed_videos
(
[call.query(model.Training.success, model.Training.video_id),
call.group_by(model.Training.video_id),
call.filter(model.Training.success == True, model.Training.user_guid == untrained_user.guid)],
[model.TrainingVideo(id = 1)]
),
(
[call.query(model.Annotation),
call.filter(model.Annotation.user_guid == trained_user.guid),
call.subquery('sub')],
[model.Annotation(video_id = 1)]
),
])
class TestHandler(TestCase):
@patch('server.session', new=session)
def test_training_warns_if_not_enough_training_videos(self):
session.reset_mock()
with pytest.raises(Error500) as e_info:
status = server.check_training_completion(untrained_user, 100)
@patch('server.session', new=session)
def test_not_enough_annoed_videos(self):
session.reset_mock()
status = server.check_training_completion(untrained_user, 2)
self.assertFalse(status)
@patch('server.session', new=session)
def test_enough_annoed_videos(self):
session.reset_mock()
status = server.check_training_completion(untrained_user, 1)
self.assertTrue(status)
# and there's a commit that is updating the user
session.commit.assert_called_once()
# this needs to be tested with get_next() not with check_training_completion
# @patch('server.session', new=session)
# def test_already_verified(self):
# session.reset_mock()
# status = server.check_training_completion(trained_user, 2)
# self.assertTrue(status)
def test_parse_anno_sql_cleansing(self):
new_guid = uuid.uuid4()
postdata = {'anno_value': "foo;bar", "video_id": 1, "user_guid": str(new_guid)}
out = server.parse_annotation(postdata)
self.assertEqual(out['anno'], "foobar")
self.assertEqual(out['video_id'], 1)
self.assertEqual(out['user_guid'], new_guid)
def test_parse_anno_ok_with_diacritics(self):
new_guid = uuid.uuid4()
postdata = {'anno_value': "foo?bar", "video_id": 1, "user_guid": str(new_guid)}
out = server.parse_annotation(postdata)
self.assertEqual(out['anno'], "foo?bar")
self.assertEqual(out['video_id'], 1)
self.assertEqual(out['user_guid'], new_guid)
``` |
{
"source": "jonkelleyatrackspace/canakit1104-api",
"score": 2
} |
#### File: canakit1104-api/relayserver/app.py
```python
from flask import Flask # Everything kinda builds from here.
from flask import request # Required to access request.method etc
from flask import Response # Lets us get fancy with responses.
########################################################################
# converts http errors to json, nice.
from flask import jsonify
from werkzeug.exceptions import default_exceptions
from werkzeug.exceptions import HTTPException
def make_json_error(ex):
response = jsonify(message=str(ex))
response.status_code = (ex.code
if isinstance(ex, HTTPException)
else 500)
return response
########################################################################
# logging
from classes.logsystem import LogClass # Logging is fanatical.
logclass = LogClass() # Instanciate or whatever you call it.
########################################################################
# Begin app
app = Flask('RelayServer')
# This ensures errors return as json.
for code in default_exceptions.iterkeys():
app.error_handler_spec[None][code] = make_json_error
# Yay, we survived instanciation.
logclass.logger.info('Relayserver starting!')
# Test route.
@app.route('/',methods=['GET'])
def root():
message = "{\n \"message\": \"\033[1;34mACK -> %s, I am \033[1;31mmodula\033[0m \033[1;34m a modular flask based API\033[0m\" \n}" %( str(request.remote_addr) )
return Response(message,200,mimetype='application/json')
import os, imp
def load_blueprints():
"""
This code looks for any modules or packages in the given directory, loads them
and then registers a blueprint - blueprints must be created with the name 'module'
Implemented directory scan
Bulk of the code taken from:
https://github.com/smartboyathome/Cheshire-Engine/blob/master/ScoringServer/utils.py
"""
logclass.logger.info('Registering blueprints!')
path = 'blueprints'
dir_list = os.listdir(path)
mods = {}
for fname in dir_list:
if os.path.isdir(os.path.join(path, fname)) and os.path.exists(os.path.join(path, fname, '__init__.py')):
try:
logclass.logger.info('Registering blueprint (DIRECTORY) ... %s', fname)
f, filename, descr = imp.find_module(fname, [path])
mods[fname] = imp.load_module(fname, f, filename, descr)
app.register_blueprint(getattr(mods[fname], 'module'))
except:
logclass.logger.critical('Blueprint registration in subdir (' + str(fname) +
') failed. Part of your API did not load. Recoverying...' , exc_info=True)
elif os.path.isfile(os.path.join(path, fname)):
name, ext = os.path.splitext(fname)
if ext == '.py' and not name == '__init__':
try:
logclass.logger.info('Registering blueprint ... %s', fname)
f, filename, descr = imp.find_module(name, [path])
mods[fname] = imp.load_module(name, f, filename, descr)
app.register_blueprint(getattr(mods[fname], 'module'))
except:
logclass.logger.critical('Blueprint registration (' + str(fname) +
') failed. Part of your API did not load. Skipping module...', exc_info=True)
load_blueprints()
# def load(app):
# from simple_page import simple_page
# app.register_blueprint(simple_page.module)
if __name__ == '__main__':
app.run(debug=True)
``` |
{
"source": "jonkelleyatrackspace/ops_api",
"score": 2
} |
#### File: ops_api/opsapi/config.py
```python
import copy
import logging
import yaml
log = logging.getLogger(__name__)
# Contains default mappings before yaml loads
default_mappings = {}
default_mappings['disable_debug_console'] = False
default_mappings['output_highlighter'] = False
class Config(dict):
""" Configuration dictionary """
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
def load_file(self, file_name):
log.info("loading config singleton from file {n}".format(n=file_name))
data = yaml.load(open(file_name, 'r'))
if not isinstance(data, dict):
raise Exception("config file not parsed correctly")
log.info("objects in config {objs}".format(objs=data))
deep_merge(self, data)
def deep_merge(orig, other):
""" Modify orig, overlaying information from other """
for key, value in other.items():
if key in orig and isinstance(orig[key], dict) and isinstance(value, dict):
deep_merge(orig[key], value)
else:
orig[key] = value
#
# Singleton Instance
#
config = Config()
``` |
{
"source": "jonkensta/inmate-providers",
"score": 3
} |
#### File: inmate-providers/pymates/fbop.py
```python
import json
import urllib
import logging
from datetime import date, datetime
LOGGER = logging.getLogger("PROVIDERS.FBOP")
URL = "https://www.bop.gov/PublicInfo/execute/inmateloc"
TEXAS_UNITS = {
"BAS",
"BML",
"BMM",
"BMP",
"BSC",
"BIG",
"BRY",
"CRW",
"EDN",
"FTW",
"DAL",
"HOU",
"LAT",
"REE",
"RVS",
"SEA",
"TEX",
"TRV",
}
SPECIAL_UNITS = {"TEMP RELEASE", "IN TRANSIT"}
def query_by_name(first, last, timeout=None):
"""Query the FBOP database with an inmate name."""
LOGGER.debug("Querying with name %s, %s", last, first)
matches = _query_helper(nameFirst=first, nameLast=last, timeout=timeout)
if not matches:
LOGGER.debug("No results were returned")
return []
LOGGER.debug("%d result(s) returned", len(matches))
return matches
def query_by_inmate_id(inmate_id, timeout=None):
"""Query the FBOP database with an inmate ID."""
try:
inmate_id = format_inmate_id(inmate_id)
except ValueError as exc:
msg = f"'{inmate_id}' is not a valid Federal inmate number"
raise ValueError(msg) from exc
LOGGER.debug("Querying with ID %s", inmate_id)
matches = _query_helper(inmateNum=inmate_id, timeout=timeout)
if not matches:
LOGGER.debug("No results were returned")
return []
if len(matches) > 1:
LOGGER.error("Multiple results were returned for an ID query")
return matches
LOGGER.debug("A single result was returned")
return matches
def format_inmate_id(inmate_id):
"""Format FBOP inmate IDs."""
try:
inmate_id = int(str(inmate_id).replace("-", ""))
except ValueError as exc:
raise ValueError("inmate ID must be a number (dashes are okay)") from exc
inmate_id = "{:08d}".format(inmate_id)
if len(inmate_id) != 8:
raise ValueError("inmate ID must be less than 8 digits")
return inmate_id[0:5] + "-" + inmate_id[5:8]
def _query_helper(timeout=None, **kwargs):
"""Private helper for querying FBOP."""
params = {
"age": "",
"inmateNum": "",
"nameFirst": "",
"nameLast": "",
"nameMiddle": "",
"output": "json",
"race": "",
"sex": "",
"todo": "query",
}
params.update(kwargs)
params = urllib.parse.urlencode(params).encode("ascii")
try:
response = urllib.request.urlopen(URL, params, timeout)
except urllib.error.URLError as exc:
exc_class_name = exc.__class__.__name__
LOGGER.error("Query returned %s request exception", exc_class_name)
raise
try:
data = json.loads(response.read())["InmateLocator"]
except KeyError:
return []
inmates = map(_data_to_inmate, data)
inmates = filter(_is_in_texas, inmates)
inmates = filter(_has_not_been_released, inmates)
inmates = list(inmates)
for inmate in inmates:
last, first = inmate["last_name"], inmate["first_name"]
id_ = inmate["id"]
LOGGER.debug("%s, %s #%s: MATCHES", last, first, id_)
return inmates
def _has_not_been_released(inmate):
"""Private helper for checking if an inmate has been released."""
try:
released = date.today() >= inmate["release"]
except TypeError:
# release can be a string for life sentence, etc
released = False
return not released
def _is_in_texas(inmate):
"""Private helper for checking if an inmate is in Texas."""
return inmate["unit"] in set.union(TEXAS_UNITS, SPECIAL_UNITS)
def _data_to_inmate(entry):
"""Private helper for formatting the FBOP JSON output."""
inmate = dict()
inmate["id"] = entry["inmateNum"]
inmate["jurisdiction"] = "Federal"
inmate["first_name"] = entry["nameFirst"]
inmate["last_name"] = entry["nameLast"]
inmate["unit"] = entry["faclCode"] or None
inmate["race"] = entry.get("race")
inmate["sex"] = entry.get("sex")
inmate["url"] = None
def parse_date(datestr):
"""Parse an FBOP date."""
return datetime.strptime(datestr, "%m/%d/%Y").date()
try:
release = parse_date(entry["actRelDate"])
except ValueError:
try:
release = parse_date(entry["projRelDate"])
except ValueError:
release = entry["projRelDate"]
inmate["release"] = release
inmate["datetime_fetched"] = datetime.now()
return inmate
``` |
{
"source": "jonkiky/ppdc-OT-data-pipeline",
"score": 3
} |
#### File: plugins/gene/safety.py
```python
from yapsy.IPlugin import IPlugin
from opentargets_urlzsource import URLZSource
import logging
import simplejson as json
import csv
class Safety(IPlugin):
def __init__(self):
self._logger = logging.getLogger(__name__)
self.safety = {}
self.experimental_toxicity = {}
def merge_data(self, genes, es, r_server, data_config, es_config):
# Read adverse_effects and risk_info
self.build_json_safety(filename=data_config.safety)
# Read experimental toxicity
self.build_json_experimental_toxicity(uri=data_config.experimental_toxicity)
for gene_id, gene in genes.iterate():
# extend gene with related safety data
if gene.approved_symbol in self.safety:
gene.safety = dict()
gene.safety = self.safety[gene.approved_symbol]
if gene.id in self.experimental_toxicity:
if hasattr(gene, 'safety'):
gene.safety['experimental_toxicity'] = self.experimental_toxicity[gene.id]
else:
gene.safety = {'experimental_toxicity' : self.experimental_toxicity[gene.id]}
def build_json_safety(self, filename):
with URLZSource(filename).open() as r_file:
safety_data = json.load(r_file)
for genekey in safety_data:
if genekey not in self.safety:
self.safety[genekey] = safety_data[genekey]
else:
self._logger.info("Safety gene id duplicated: " + genekey)
# Create a dict using gene_id as key and an array to collapse the common info.
def build_json_experimental_toxicity(self, uri):
with URLZSource(uri).open() as f_obj:
for row in csv.DictReader(f_obj, dialect='excel-tab'):
toxicity_json = self.exp_toxicity_json_format(row)
genekey = row["ensembl_gene_id"].strip()
if genekey not in self.experimental_toxicity:
self.experimental_toxicity[genekey]= []
self.experimental_toxicity[genekey].append(toxicity_json)
# Shape the info as the user requested
def exp_toxicity_json_format(self, row):
exp_toxicity_dict = dict()
for key, value in row.items():
if key not in "ensembl_gene_id":
if key in ("data_source","data_source_reference_link"):
exp_toxicity_dict[key] = value
else:
if "experiment_details" not in exp_toxicity_dict:
exp_toxicity_dict["experiment_details"] = {}
exp_toxicity_dict["experiment_details"][key] = value
return exp_toxicity_dict
``` |
{
"source": "JonKim-bot/gplan",
"score": 3
} |
#### File: Libraries/AItocreate_file/generateControllerCi4New.py
```python
from difflib import SequenceMatcher
import mysql.connector
import os
# arr = os.listdir("C:\\xampp\\htdocs\\galaeats\\application\\controllers")
import datetime
import turtle
import os
print(datetime.datetime.now())
conn = mysql.connector.connect(
user='root',
password='password',
host='127.0.0.1',
database='carlink')
mycursor = conn.cursor((mysql.connector).connect.__dict__)
# mycursor.execute("SHOW TABLES LIKE '%_log%'") # using excure function to show the tabkes;
mycursor.execute("SHOW TABLES")
# using excure function to show the tabkes;
resultAll = [item[0] for item in mycursor.fetchall()]
resultWithoutlog = []
def cap(str):
return str.charAt(0).toUpperCase() + str.slice(1).replace('/ _/', ':')
for tables in resultAll:
if (tables.endswith("_log")):
# print("All log tables")
print()
else:
# run the create file function here
tablesCap = str(tables.capitalize())
# print(tablesCap)
small_letter_name = tablesCap.lower()
if ("_" in tablesCap):
# Booking_request
tableNameArr = tablesCap.replace("_", " ")
tableNameClean = tableNameArr.title()
tableNameClean = tableNameClean.replace(" ", "")
print(tableNameClean + "tablename calan")
else:
tableNameClean = tablesCap
# Booking[0]
# request[0]
print("create file")
with open('sample_controller.txt') as f:
text = f.read()
# print(text)
# print(tableNameClean)
new_file_text = text.replace('brand',small_letter_name)
new_file_text = new_file_text.replace('Brand',tableNameClean)
print(new_file_text)
controllerName = str(tableNameClean) + ".php"
# if file not created
print(controllerName + " not in arr")
# print(text)
f = open("controllerCi4/" + controllerName, "w+")
f.write(new_file_text)
f.close()
# print(text)
# f = open("controller/"+controllerName, "w+")
# f.write(text)
# f.close()
```
#### File: Libraries/AItocreate_file/generateView.py
```python
from difflib import SequenceMatcher
import mysql.connector
import os
import os
# arr = os.listdir("C:\\xampp\\htdocs\\galaeats\\application\\controllers")
import datetime
import turtle
import os
print(datetime.datetime.now())
conn = mysql.connector.connect(
user='root',
password='password',
host='127.0.0.1',
database='carlink')
mycursor = conn.cursor((mysql.connector).connect.__dict__)
# mycursor.execute("SHOW TABLES LIKE '%_log%'") # using excure function to show the tabkes;
mycursor.execute("SHOW TABLES")
# using excure function to show the tabkes;
resultAll = [item[0] for item in mycursor.fetchall()]
resultWithoutlog = []
def cap(str):
return str.charAt(0).toUpperCase() + str.slice(1).replace('/ _/', ':')
def generate_all_file(tableSmall,tableBig):
with open('view_sample/all_sample.txt') as f:
text = f.read()
new_file_text = text.replace('brand', tableSmall)
new_file_text = new_file_text.replace('Brand', tableBig)
print(new_file_text)
path = "viewCi4/" + tableSmall
os.makedirs(path, exist_ok=True)
f = open(path+ "/all.php", "w+")
f.write(new_file_text)
f.close()
def generate_add_file(tableSmall,tableBig):
with open('view_sample/add_sample.txt') as f:
text = f.read()
new_file_text = text.replace('brand', tableSmall)
new_file_text = new_file_text.replace('Brand', tableBig)
print(new_file_text)
path = "viewCi4/" + tableSmall
os.makedirs(path, exist_ok=True)
f = open(path+ "/add.php", "w+")
f.write(new_file_text)
f.close()
def generate_edit_file(tableSmall, tableBig):
with open('view_sample/edit_sample.txt') as f:
text = f.read()
new_file_text = text.replace('brand', tableSmall)
new_file_text = new_file_text.replace('Brand', tableBig)
print(new_file_text)
path = "viewCi4/" + tableSmall
os.makedirs(path, exist_ok=True)
f = open(path + "/edit.php", "w+")
f.write(new_file_text)
f.close()
def generate_detail(tableSmall, tableBig):
with open('view_sample/detail_sample.txt') as f:
text = f.read()
new_file_text = text.replace('brand', tableSmall)
new_file_text = new_file_text.replace('Brand', tableBig)
print(new_file_text)
path = "viewCi4/" + tableSmall
os.makedirs(path, exist_ok=True)
f = open(path + "/detail.php", "w+")
f.write(new_file_text)
f.close()
for tables in resultAll:
if (tables.endswith("_log")):
# print("All log tables")
print()
else:
# run the create file function here
tablesCap = str(tables.capitalize())
# print(tablesCap)
small_letter_name = tablesCap.lower()
if ("_" in tablesCap):
# Booking_request
tableNameArr = tablesCap.replace("_", " ")
tableNameClean = tableNameArr.title()
tableNameClean = tableNameClean.replace(" ", "")
print(tableNameClean + "tablename calan")
else:
tableNameClean = tablesCap
# Booking[0]
# request[0]
print("create file")
generate_all_file(small_letter_name,tableNameClean)
generate_add_file(small_letter_name,tableNameClean)
generate_edit_file(small_letter_name,tableNameClean)
generate_detail(small_letter_name,tableNameClean)
# print(text)
# f = open("controller/"+controllerName, "w+")
# f.write(text)
# f.close()
``` |
{
"source": "Jonkimi/BuildingMachineLearningSystemsWithPython",
"score": 3
} |
#### File: BuildingMachineLearningSystemsWithPython/ch08/chapter.py
```python
import numpy as np # NOT IN BOOK
from matplotlib import pyplot as plt # NOT IN BOOK
def load():
import numpy as np
from scipy import sparse
data = np.loadtxt('data/ml-100k/u.data')
ij = data[:, :2]
ij -= 1 # original data is in 1-based system
values = data[:, 2]
reviews = sparse.csc_matrix((values, ij.T)).astype(float)
return reviews.toarray()
reviews = load()
U,M = np.where(reviews)
import random
test_idxs = np.array(random.sample(range(len(U)), len(U)//10))
train = reviews.copy()
train[U[test_idxs], M[test_idxs]] = 0
test = np.zeros_like(reviews)
test[U[test_idxs], M[test_idxs]] = reviews[U[test_idxs], M[test_idxs]]
class NormalizePositive(object):
def __init__(self, axis=0):
self.axis = axis
def fit(self, features, y=None):
if self.axis == 1:
features = features.T
# count features that are greater than zero in axis 0:
binary = (features > 0)
count0 = binary.sum(axis=0)
# to avoid division by zero, set zero counts to one:
count0[count0 == 0] = 1.
# computing the mean is easy:
self.mean = features.sum(axis=0)/count0
# only consider differences where binary is True:
diff = (features - self.mean) * binary
diff **= 2
# regularize the estimate of std by adding 0.1
self.std = np.sqrt(0.1 + diff.sum(axis=0)/count0)
return self
def transform(self, features):
if self.axis == 1:
features = features.T
binary = (features > 0)
features = features - self.mean
features /= self.std
features *= binary
if self.axis == 1:
features = features.T
return features
def inverse_transform(self, features, copy=True):
if copy:
features = features.copy()
if self.axis == 1:
features = features.T
features *= self.std
features += self.mean
if self.axis == 1:
features = features.T
return features
def fit_transform(self, features):
return self.fit(features).transform(features)
norm = NormalizePositive(axis=1)
binary = (train > 0)
train = norm.fit_transform(train)
# plot just 200x200 area for space reasons
plt.imshow(binary[:200, :200], interpolation='nearest')
from scipy.spatial import distance
# compute all pair-wise distances:
dists = distance.pdist(binary, 'correlation')
# Convert to square form, so that dists[i,j]
# is distance between binary[i] and binary[j]:
dists = distance.squareform(dists)
neighbors = dists.argsort(axis=1)
# We are going to fill this matrix with results
filled = train.copy()
for u in range(filled.shape[0]):
# n_u is neighbors of user
n_u = neighbors[u, 1:]
for m in range(filled.shape[1]):
# get relevant reviews in order!
revs = [train[neigh, m]
for neigh in n_u
if binary [neigh, m]]
if len(revs):
# n is the number of reviews for this movie
n = len(revs)
# take half of the reviews plus one into consideration:
n //= 2
n += 1
revs = revs[:n]
filled[u,m] = np.mean(revs)
predicted = norm.inverse_transform(filled)
from sklearn import metrics
r2 = metrics.r2_score(test[test > 0], predicted[test > 0])
print('R2 score (binary neighbors): {:.1%}'.format(r2))
reviews = reviews.T
# use same code as before
r2 = metrics.r2_score(test[test > 0], predicted[test > 0])
print('R2 score (binary movie neighbors): {:.1%}'.format(r2))
from sklearn.linear_model import ElasticNetCV # NOT IN BOOK
reg = ElasticNetCV(alphas=[
0.0125, 0.025, 0.05, .125, .25, .5, 1., 2., 4.])
filled = train.copy()
# iterate over all users:
for u in range(train.shape[0]):
curtrain = np.delete(train, u, axis=0)
bu = binary[u]
reg.fit(curtrain[:,bu].T, train[u, bu])
filled[u, ~bu] = reg.predict(curtrain[:,~bu].T)
predicted = norm.inverse_transform(filled)
r2 = metrics.r2_score(test[test > 0], predicted[test > 0])
print('R2 score (user regression): {:.1%}'.format(r2))
# SHOPPING BASKET ANALYSIS
# This is the slow version of the code, which will take a long time to
# complete.
from collections import defaultdict
from itertools import chain
# File is downloaded as a compressed file
import gzip
# file format is a line per transaction
# of the form '12 34 342 5...'
dataset = [[int(tok) for tok in line.strip().split()]
for line in gzip.open('data/retail.dat.gz')]
dataset = [set(d) for d in dataset]
# count how often each product was purchased:
counts = defaultdict(int)
for elem in chain(*dataset):
counts[elem] += 1
minsupport = 80
valid = set(k for k,v in counts.items() if (v >= minsupport))
itemsets = [frozenset([v]) for v in valid]
freqsets = []
for i in range(16):
nextsets = []
tested = set()
for it in itemsets:
for v in valid:
if v not in it:
# Create a new candidate set by adding v to it
c = (it | frozenset([v]))
# check If we have tested it already
if c in tested:
continue
tested.add(c)
# Count support by looping over dataset
# This step is slow.
# Check `apriori.py` for a better implementation.
support_c = sum(1 for d in dataset if d.issuperset(c))
if support_c > minsupport:
nextsets.append(c)
freqsets.extend(nextsets)
itemsets = nextsets
if not len(itemsets):
break
print("Finished!")
minlift = 5.0
nr_transactions = float(len(dataset))
for itemset in freqsets:
for item in itemset:
consequent = frozenset([item])
antecedent = itemset-consequent
base = 0.0
# acount: antecedent count
acount = 0.0
# ccount : consequent count
ccount = 0.0
for d in dataset:
if item in d: base += 1
if d.issuperset(itemset): ccount += 1
if d.issuperset(antecedent): acount += 1
base /= nr_transactions
p_y_given_x = ccount/acount
lift = p_y_given_x / base
if lift > minlift:
print('Rule {0} -> {1} has lift {2}'
.format(antecedent, consequent,lift))
``` |
{
"source": "Jonkimi/rap2-websocket",
"score": 2
} |
#### File: Jonkimi/rap2-websocket/mock.py
```python
from __future__ import print_function
# from builtins import input
from sys import stdout
import json
import os.path
import urllib2
import urlparse
import shutil
import argparse
import shlex
import ConfigParser
# test cookie 'koa.sid=7QC71YKGHTaOZUbeMHirZjicr0MN8-fX; koa.sid.sig=h0Q0zcHLVyJ_qQVxfwD_9pvXSLE'
server = 'https://127.0.0.1:8088'
repo_url = '{0}/repository/get?id={1}'
mock_url_prefix = '{0}/app/mock/{1}/'
# 脚本所在目录
mock_dir = os.path.split(os.path.realpath(__file__))[0]
example_script = u'test'
example_json = u'test.json'
def send_req(url, cookie):
'''
向 rap2-delos 发送请求
:param url:
:param cookie:
:return:
'''
print('send request: ', url)
req = urllib2.Request(url)
req.add_header('cookie', cookie)
try:
response = urllib2.urlopen(req, timeout=60)
except urllib2.HTTPError as err:
print("HTTP error({0}): {1}".format(err.errno, err.strerror))
response_str = None
except IOError as err:
print("IO error({0}): {1}, please check your repo id and cookie.".format(err.errno, err.strerror))
response_str = None
else:
response_str = response.read()
return response_str
def mock_repo(repo_id, cookie):
repo_str = send_req(repo_url.format(server, repo_id), cookie)
if repo_str is None:
return
parsed_json = json.loads(repo_str)
data = parsed_json['data']
if data is not None:
interfaces = [y for x in data['modules'] for y in x['interfaces']]
# print json.dumps(interfaces)
ws_interfaces = filter(lambda i: i['name'].startswith('ws'), interfaces)
# print json.dumps(ws_interfaces)
for ws in ws_interfaces:
print('----------------------------------------')
ws_url = ws['url']
if ws_url.endswith('/'):
ws_url = ws_url[:len(ws_url) - 1]
name = os.path.basename(ws_url)
if ws_url.startswith('/'):
ws_url = ws_url[1: len(ws_url)]
path = os.path.dirname(os.path.join(mock_dir, ws_url))
print('path:', ws_url)
# 创建目录
try:
if not os.path.exists(path):
os.makedirs(path)
except OSError as err:
print("OSError ({0}): {1}".format(err.errno, err.strerror))
else:
# 复制脚本
shutil.copy(os.path.join(mock_dir, example_script), os.path.join(path, name))
# 写入脚本配置
with open(os.path.join(path, name + '.conf'), 'w') as mock_url:
mock_url.write(urlparse.urljoin(mock_url_prefix.format(server, repo_id), ws_url))
print('mock {0} OK '.format(ws_url))
def handle():
while True:
stdout.flush()
command = raw_input()
if command is None or not command.startswith('mock '):
print(APP_DESC)
else:
try:
args = parser.parse_args(shlex.split(command[5:]))
except SystemExit :
# 将命令行解析失败或打印帮助信息后的退出 忽略
pass
else:
# print(args)
if args.cookie is None or args.repo_id is None:
print(APP_DESC)
else:
mock_repo(args.repo_id, args.cookie)
if __name__ == '__main__':
APP_DESC = """usage: mock [-h] [-i repo-id] [-c cookie]
optional arguments:
-h, --help show this help message and exit
-i, --repo-id set repository id
-c, --cookie set auth cookie
"""
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--repo-id', type=int, help="")
parser.add_argument('-c', '--cookie', type=str, help="")
# 读取服务器配置
config_path = os.path.join(mock_dir, "mock.conf")
if not os.path.exists(config_path):
print("config file mock.conf not exists.")
else:
cf = ConfigParser.ConfigParser()
cf.read(config_path)
server = cf.get('main', 'server')
print(APP_DESC)
handle()
``` |
{
"source": "jonklo/django-deprecated-field",
"score": 3
} |
#### File: django-deprecated-field/deprecated_field/__init__.py
```python
import logging
from django.conf import settings # type: ignore
from django.db import models
logger = logging.getLogger(__name__)
class DeprecatedFieldAccessError(Exception):
"""
Raised if a deprecated field is accessed in strict mode
"""
def log_or_raise(message_format: str, *args) -> None:
"""
Either log an error message or if in strict mode raise an exception.
"""
if getattr(settings, "STRICT_DEPRECATED_FIELD", None) is True:
message = message_format % args
raise DeprecatedFieldAccessError(message)
logger.error(message_format, *args, stack_info=True)
class DeprecatedFieldDescriptor:
"""
A descriptor for a deprecated field. Logs an error whenever it's accessed
and always returns None.
"""
def __init__(self, field):
self.field = field
def __get__(self, instance, cls=None):
if instance:
log_or_raise(
'Accessed deprecated field "%s" on instance of "%s.%s"',
self.field.name,
instance.__class__.__module__,
instance.__class__.__qualname__,
)
elif cls:
log_or_raise(
'Accessed deprecated field "%s" on model class "%s.%s"',
self.field.name,
cls.__module__,
cls.__qualname__,
)
def __set__(self, instance, value) -> None:
log_or_raise(
'Tried to set deprecated field "%s" on instance of "%s.%s"',
self.field.name,
instance.__class__.__module__,
instance.__class__.__qualname__,
)
class Null(models.Expression):
"""
An expression that always returns None.
"""
def as_sql(self, compiler, connection):
return "NULL", []
class DeprecatedField(models.Field):
"""
A field that ensures a column can safely be removed from the database in
a later deploy.
This ensures that Django does not reference the field in queries by default,
and if the field is explicitly referenced either an exception is raised or
an error is raised. The column will still be referenced in the database if
used in an .update() query, but in all other queries any reference to the
column is replaced with a NULL literal.
"""
concrete: bool
descriptor_class = DeprecatedFieldDescriptor
def __init__(self, original_field: models.Field) -> None:
super().__init__()
self.original_field = original_field
def contribute_to_class(self, cls, name, private_only=False):
super().contribute_to_class(cls, name, private_only=private_only)
self.concrete = False
def clone(self):
"""
This is where the magic happens. Instead of returning a copy of this
field we return a copy of the underlying field. This method is called
when the Django migrations system checks for changes, meaning that this
ensures the deprecation is invisible to the migration system.
"""
return self.original_field.clone()
def get_col(self, alias, output_field=None):
"""
Hook in to detect when the column is used in a query and replace the
column reference with null literal in the query.
Even though the field is marked as concrete=False, Django still allows
it to be referenced in .values() and .values_list() queries. This will
catch these cases and either raise an exception or log an error and
set the selected value to "NULL" in the database query.
"""
log_or_raise(
'Deprecated field "%s" on "%s.%s" referenced in query',
self.name,
self.model.__module__,
self.model.__qualname__,
)
return Null(output_field=output_field or self)
def get_db_prep_save(self, value, connection):
"""
Hook in to detect when the field is used in an update query.
Even though the field is marked as concrete=False, Django still allows
it to be referenced in .update(foo=bar) queries. This will catch these
cases and log or raise an error.
"""
log_or_raise(
'Writing to deprecated field "%s" on "%s.%s"',
self.name,
self.model.__module__,
self.model.__qualname__,
)
return self.get_db_prep_value(value, connection=connection, prepared=False)
def get_default(self):
"""
Hook into the logic Django uses to set a value on a model if one wasn't
provided in __init__, create() or similar. This basically tells Django
to not set a value, which we don't want for deprecated fields.
"""
return models.DEFERRED
def deprecated(original_field: models.Field) -> DeprecatedField:
"""
Mark a field as deprecated. This removes the field from queries against the
database, so we can safely remove it from the database after this change
has been rolled out.
"""
# Make sure the original field is nullable
original_field.null = True
return DeprecatedField(original_field=original_field)
__all__ = ["deprecated", "DeprecatedFieldAccessError"]
```
#### File: tests/tests/test_migrations.py
```python
from django.apps import apps
from django.core.management import call_command
from django.db.migrations.autodetector import MigrationAutodetector
from django.db.migrations.loader import MigrationLoader
from django.db.migrations.operations import AlterField
from django.db.migrations.questioner import NonInteractiveMigrationQuestioner
from django.db.migrations.state import ProjectState
from django.db.models import CharField
def test_make_migrations_deprecated_field(db, capsys):
call_command("makemigrations", "--dry-run", "--no-input", "tests")
captured = capsys.readouterr()
assert "Alter field name on artist" in captured.out
def test_state_introspection():
loader = MigrationLoader(None, ignore_no_migrations=True)
questioner = NonInteractiveMigrationQuestioner(
specified_apps=["tests"], dry_run=True
)
# Set up autodetector
autodetector = MigrationAutodetector(
loader.project_state(),
ProjectState.from_apps(apps),
questioner,
)
# Detect changes
changes = autodetector.changes(
graph=loader.graph,
trim_to_apps=["tests"],
convert_apps=["tests"],
migration_name=None,
)
assert "tests" in changes
assert len(changes["tests"]) == 1
migration = changes["tests"][0]
operations = migration.operations
for operation in operations:
print(operation)
assert len(operations) == 2
operation = next(
operation
for operation in operations
if isinstance(operation, AlterField)
and operation.model_name == "artist"
and operation.name == "name"
)
assert isinstance(operation.field, CharField)
assert operation.field.null is True
assert operation.field.max_length == 256
```
#### File: tests/tests/test_queries.py
```python
import logging
import pytest
from django.test import override_settings
from deprecated_field import DeprecatedFieldAccessError
from ..models import Genre
def test_create_without_deprecated_field_not_in_db(db, caplog):
"""
Ensure that we can created objects even if a deprecated field on the model
does not exist in the database.
"""
Genre.objects.create()
assert not caplog.records
def test_create_with_deprecated_field_not_in_db(db, caplog):
"""
Ensure that we can created objects even if a deprecated field that does
not exist in the database is specified.
"""
caplog.set_level(logging.ERROR)
Genre.objects.create(name="test")
assert (
'Tried to set deprecated field "name" on instance of "tests.models.Genre"'
in caplog.text
)
def test_create_with_deprecated_field_not_in_db_strict(db, caplog):
with override_settings(STRICT_DEPRECATED_FIELD=True):
Genre.objects.create()
def test_create_with_deprecated_field_not_in_db_strict(db, caplog):
with pytest.raises(DeprecatedFieldAccessError):
with override_settings(STRICT_DEPRECATED_FIELD=True):
Genre.objects.create(name="test")
def test_bulk_create_with_deprecated_field_not_in_db(db, caplog):
"""
Ensure that we can created objects even if a deprecated field that does
not exist in the database is specified.
"""
genre = [Genre(name="rock"), Genre(name="roll")]
caplog.set_level(logging.ERROR)
caplog.clear()
Genre.objects.bulk_create(genre)
assert not caplog.records
``` |
{
"source": "jonknoll/battlesnake-python-jk",
"score": 3
} |
#### File: battlesnake-python-jk/app/strategy.py
```python
import random
from .grid import Grid
from .snakestuff import *
# Values to put in grid, priorities TBD
OPEN_SPACE = '.'
FOOD = 'F'
ME_HEAD = 'H'
ME_BODY = 'B'
ME_TAIL = 'T'
EATABLE_HEAD_ZONE = 'e'
VERY_EATABLE_HEAD = 'E'
OTHER_HEAD = 'h'
OTHER_BODY = 'b'
OTHER_TAIL = 't'
MAYBE_GO = '+'
#EAT_THIS_HEAD = '$'
#ORTHOGONAL_HEAD = '+'
#DIAGONAL_HEAD = '-'
#CLOSE_TO_WALL = '?'
#TAIL = 'T'
#NO_GO = 'X'
#WALL = -1
def executeStrategy(data):
height = data['height']
width = data['width']
ourSnakeCoords = getOurSnakeCoords(data)
ourHead = getHeadCoord(ourSnakeCoords)
mySnakeCoords = getOurSnakeCoords(data)
mySnakeLength = len(mySnakeCoords)
myTrajectory = getTrajectory(mySnakeCoords)
health = getOurSnakeHealth(data)
largerThanUs = snakesLargerThanUs(data)
# Build Grid with symbols for different things
symbolGrid = buildSymbolGrid(data)
symbolGrid.printGrid()
maxSnakeMove = symbolGrid.getPerimeter()
# Build grid that contains the number of moves that it would take to get
# somewhere.
distanceGrid = Grid(width, height, maxSnakeMove)
# Build grid that contains which direction the snake should go to get to
# that coordinate.
moveGrid = Grid(width, height, None)
# Get lists of stuff on the board
noGoCoordsList = symbolGrid.getListOfType([ME_HEAD, ME_BODY, OTHER_HEAD, OTHER_BODY])
# Note: You would expect ME_TAIL to be in the maybeGoCoordsList, but then
# it doesn't get a distance associated with it. However the tail will
# disappear for one move when the snake eats food (2 tail coords so it gets
# covered up by the body) so actually we should be ok.
maybeGoCoordsList = symbolGrid.getListOfType([MAYBE_GO])
# need this for counting open spaces. Snake heads can close off spaces in
# a hurry!
cautionCoordsList = symbolGrid.getListOfType([EATABLE_HEAD_ZONE])
# build grid using only safe moves
barrierCoordsList = noGoCoordsList + maybeGoCoordsList
#print("NO GO COORDS={}".format(noGoCoordsList))
#print("MAYBE GO COORDS={}".format(maybeGoCoordsList))
#print("BARRIER COORDS={}".format(barrierCoordsList))
# build up the list of moves at each location
fillDistanceAndMoveGrids(distanceGrid, moveGrid, ourHead, barrierCoordsList)
# Useful grids for debugging
#distanceGrid.printGrid(2)
#moveGrid.printGrid(4)
# build a dictionary of the number of open spaces available at each move
openSpaceCoordsList = barrierCoordsList + cautionCoordsList
moveDict = {}
moveDict['left'] = countOpenSpaces(data, symbolGrid, (ourHead[0]-1, ourHead[1]), openSpaceCoordsList)
moveDict['up'] = countOpenSpaces(data, symbolGrid, (ourHead[0], ourHead[1]-1), openSpaceCoordsList)
moveDict['right'] = countOpenSpaces(data, symbolGrid, (ourHead[0]+1, ourHead[1]), openSpaceCoordsList)
moveDict['down'] = countOpenSpaces(data, symbolGrid, (ourHead[0], ourHead[1]+1), openSpaceCoordsList)
# Print grids for debugging
#distanceGrid.printGrid(2)
#moveGrid.printGrid()
# Decide on a direction
print("Health={}, Size={}, Trajectory={}, snakes larger than us={}".format(health, mySnakeLength, myTrajectory, largerThanUs))
ourMove, ourTaunt = decisionTree(data, symbolGrid, distanceGrid, moveGrid, moveDict)
if ourMove is None:
print("Reassessing with maybe-go coordinates as OK")
# Better reassess with a smaller set of no-go coordinates
distanceGrid = Grid(width, height, maxSnakeMove)
moveGrid = Grid(width, height, None)
fillDistanceAndMoveGrids(distanceGrid, moveGrid, ourHead, noGoCoordsList)
# build a dictionary of the number of open spaces available at each move
moveDict = {}
moveDict['left'] = countOpenSpaces(data, symbolGrid, (ourHead[0]-1, ourHead[1]), noGoCoordsList)
moveDict['right'] = countOpenSpaces(data, symbolGrid, (ourHead[0]+1, ourHead[1]), noGoCoordsList)
moveDict['up'] = countOpenSpaces(data, symbolGrid, (ourHead[0], ourHead[1]-1), noGoCoordsList)
moveDict['down'] = countOpenSpaces(data, symbolGrid, (ourHead[0], ourHead[1]+1), noGoCoordsList)
#distanceGrid.printGrid(2)
#moveGrid.printGrid(4)
ourMove, ourTaunt = decisionTree(data, symbolGrid, distanceGrid, moveGrid, moveDict)
if ourMove is None:
ourMove, ourTaunt = panicDecisionTree(data, symbolGrid, distanceGrid, moveGrid, moveDict)
print("Our move is {}, our taunt is: {}".format(ourMove, ourTaunt))
return(ourMove, ourTaunt)
def buildSymbolGrid(data):
"""
Build a grid and use symbols to represent all the items in the grid.
Then you can use getListOfType() to get all the coordinates for a list
of symbols. You can also print this grid (super useful for debugging).
The grid should be built up in layers from least dangerous (eg. food) to
most dangerous (eg. snake body). That way, any additional zones put in
place do not clobber no-go coordinates.
"""
height = data['height']
width = data['width']
myId = data['you']
mySnakeObj = getMySnakeObj(data)
grid = Grid(width, height, OPEN_SPACE)
# Food (very safe)
grid.setList(data['food'], FOOD)
# Area around Eatable Snake Heads (safe)
for snake in data['snakes']:
if(snake['id'] != myId):
if(len(snake["body"]) < len(mySnakeObj["body"])):
# snake is shorter than us. Positions around the head are
# the goal, not the head itself, as the snake's head will not
# be there on the next move.
orthList = grid.getOrthogonal(snake["body"][0])
grid.setList(orthList, EATABLE_HEAD_ZONE)
# special case if the eatable head is in range on our next move
myOrthList = grid.getOrthogonal(mySnakeObj["body"][0])
for i in orthList:
if i in myOrthList:
grid.set(i, VERY_EATABLE_HEAD)
# Special case when the tail is hidden by the body (2 tail coordinates)
# immediately after eating food. So in that case, put tail markers around
# where the tail would be (can't mark the tail because it's not going to
# move for 1 turn.
if len(mySnakeObj["body"]) > 1:
if mySnakeObj["body"][-1] == mySnakeObj["body"][-2]:
orthList = grid.getOrthogonal(mySnakeObj["body"][-1])
grid.setList(orthList, ME_TAIL)
# Snake heads and bodies (deadly: no-go) and tails (risky: maybe-go)
# Must lay down after everything else, so it doesn't get overwritten
for snake in data['snakes']:
if(snake['id'] == myId):
# Keep this order (tail, snake, head). There are 2 tail coordinates
# at the same location when the snake eats food. So the tail is
# temporarily hidden by the body which gets overlayed after the
# tail has been placed. This is actually what we want so we don't
# choose the tail spot after a snake has eaten something.
grid.set(snake["body"][-1], ME_TAIL)
grid.setList(snake["body"][1:-1], ME_BODY)
grid.set(snake["body"][0], ME_HEAD)
#print("me snake coords={}".format(snake["body"]))
else:
grid.set(snake["body"][-1], OTHER_TAIL)
grid.setList(snake["body"][1:-1], OTHER_BODY)
grid.set(snake["body"][0], OTHER_HEAD)
#print("snake coords={}".format(snake["body"]))
# Area around Non-eatable Snake Heads (risky: maybe-go)
dontOverwriteList = [None, ME_BODY, ME_HEAD, OTHER_BODY, OTHER_HEAD]
for snake in data['snakes']:
if snake['id'] != myId:
if len(snake["body"]) >= len(mySnakeObj["body"]):
# Bigger snake
# put a danger zone around the head of larger snake
orthList = grid.getOrthogonal(snake["body"][0])
grid.setList(orthList, MAYBE_GO, dontOverwriteList)
# Strategy from watching snakes engage in risky behaviour:
# Plot areas where a snake head is one space over from a wall or another
# snake. This is a high risk area since the snake is making a tunnel and
# could suddenly turn and close the area off. Better to mark as maybe-go
# to avoid entering a high risk area.
# Me Tail is in the list because the MAYBE_GO can overwrite our tail marker
# which causes our snake to lose sight of it's tail and make bad decisions
# in tight situations.
noGoList = [None, ME_BODY, ME_TAIL, OTHER_HEAD, OTHER_BODY, MAYBE_GO]
for head in grid.getListOfType([OTHER_HEAD]):
# left
if grid.get((head[0]-1, head[1])) not in noGoList and grid.get((head[0]-2, head[1])) in noGoList:
grid.set((head[0]-1, head[1]), MAYBE_GO)
# right
if grid.get((head[0]+1, head[1])) not in noGoList and grid.get((head[0]+2, head[1])) in noGoList:
grid.set((head[0]+1, head[1]), MAYBE_GO)
# up
if grid.get((head[0], head[1]-1)) not in noGoList and grid.get((head[0], head[1]-2)) in noGoList:
grid.set((head[0], head[1]-1), MAYBE_GO)
# down
if grid.get((head[0], head[1]+1)) not in noGoList and grid.get((head[0], head[1]+2)) in noGoList:
grid.set((head[0], head[1]+1), MAYBE_GO)
return(grid)
def fillDistanceAndMoveGrids(distanceGrid, moveGrid, startingCoord, noGoCoords):
maxMoves = distanceGrid.getPerimeter()
# make a list of all the coordinates to try this round
# remove them if they are one of the no go coordinates
stepDict = distanceGrid.getOrthogonalDict(startingCoord, noGoCoords)
#print("stepDict={}".format(stepDict))
for stepNumber in range(1, maxMoves):
coordsForNextStep = {}
#print("step {}: {}".format(stepNumber, stepDict))
for stepCoord, move in stepDict.items():
if distanceGrid.get(stepCoord) == maxMoves:
distanceGrid.set(stepCoord, stepNumber)
# update move grid
moveGrid.set(stepCoord, move)
# keep this coordinate for the next step
coordsForNextStep[stepCoord] = move
if len(coordsForNextStep) == 0:
break
# Get the coordinates for the next step
for coord, move in coordsForNextStep.items():
tempList = distanceGrid.getOrthogonal(coord, noGoCoords)
stepDict.update({c:move for c in tempList})
def countOpenSpaces(data, symbolGrid, startingCoord, noGoCoords):
"""
Count the number of spaces the snake could reach starting from the
starting coordinate. Provide the list of barriers in the noGoCoords.
The algorithm has some added complexity because if our tail is in the open
space we are looking at, then we will always have an escape. That makes
the space basically safe.
"""
if startingCoord in noGoCoords:
return(0)
counterGrid = Grid(data['width'], data['height'], 0)
if counterGrid.get(startingCoord) == None: # outside the grid
return(0)
# Set starting coordinate as valid
counterGrid.set(startingCoord, 1)
stepList = counterGrid.getOrthogonal(startingCoord, noGoCoords)
for _ in range(counterGrid.getPerimeter()):
tempList = []
for stepCoord in stepList:
if stepCoord not in noGoCoords and counterGrid.get(stepCoord) == 0:
counterGrid.set(stepCoord, 1)
tempList.append(stepCoord)
stepList = tempList
# if no coordinates to check out then we're done.
if len(stepList) > 0:
stepList = counterGrid.getOrthogonalFromList(stepList, noGoCoords)
else:
break
numOpenSpaces = counterGrid.count(1)
#counterGrid.printGrid()
# special exception for our tail. If the tail is part of the open spaces
# and the number of open spaces is shorter than our snake then set the
# number of spaces to our snake length.
ourSnakeLength = len(getOurSnakeCoords(data))
if ourSnakeLength > numOpenSpaces:
tailList = symbolGrid.getListOfType([ME_TAIL])
openSpaceList = counterGrid.getListOfType([1])
# is any tail coordinate in the open space coordinates?
if len(set(tailList) & set(openSpaceList)) > 0:
numOpenSpaces = ourSnakeLength
return(numOpenSpaces)
def decisionTree(data, symbolGrid, distanceGrid, moveGrid, moveDict):
"""
Decisions for Snake:
- Goals are: Self Preservation, Food, Heads of smaller snakes, Chase our own tail
- The direction is determined by whatever is the top goal for this round
- The top priority is food if below the health threshold
- The second priority is go for heads of smaller snakes
- If health is good and no smaller snakes then chase tail
- If none of the goals can be reached then see if any of our directions are
in the "MAYBE_GO" list, if so, take it.
- If there is no move available, then make a random choice and die.
Returns the move and the taunt
"""
mySnakeCoords = getOurSnakeCoords(data)
mySnakeLength = len(mySnakeCoords)
ourMove = None
ourTaunt = None
health = getOurSnakeHealth(data)
largerThanUs = snakesLargerThanUs(data)
preferredMoveList = sorted(moveDict, key=moveDict.get, reverse=True)
preferredMoveListRanked = getPreferredMoveListRanked(moveDict)
# Useful runtime stats
print("moveDict={}, rank={}".format(moveDict, preferredMoveListRanked))
# stay healthy, try to be biggest snake, eat smaller snakes
if ourMove == None:
if health < 75:
# must maintain health, only go for food
nearestFoodList = getNearestOfType([FOOD], symbolGrid, distanceGrid)
decisionString = "Eat food"
elif largerThanUs == 0:
# we're big and well fed, now get the snakes!
nearestFoodList = getNearestOfType([EATABLE_HEAD_ZONE, VERY_EATABLE_HEAD], symbolGrid, distanceGrid)
decisionString = "Chase snakes"
else:
# heath is good, snack on snakes while growing
nearestFoodList = getNearestOfType([FOOD, EATABLE_HEAD_ZONE, VERY_EATABLE_HEAD], symbolGrid, distanceGrid)
decisionString = "Eat food and snakes"
numFood = len(nearestFoodList)
#print("decision so far={}, nearestFoodList={}".format(decisionString, nearestFoodList))
if numFood == 0:
#print("No Food! Moving on...")
pass
elif numFood == 1:
ourMove = moveGrid.get(nearestFoodList[0])
ourTaunt = decisionString
print("Decision: {}: {}, distance={}, ourMove={}".format(decisionString, nearestFoodList[0], distanceGrid.get(nearestFoodList[0]), moveGrid.get(nearestFoodList[0])))
else: # special case: more than one food at equal distance!
moveList = [moveGrid.get(coord) for coord in nearestFoodList]
for preferredmove in preferredMoveList:
if preferredmove in moveList:
ourMove = preferredmove
ourTaunt = decisionString
break
print("Decision: {}: {}, distance={}, ourMove={}".format(decisionString, nearestFoodList, distanceGrid.get(nearestFoodList[0]), ourMove))
# Safety check: how many spaces are we moving into.
if ourMove != None:
if moveDict[ourMove] < mySnakeLength:
print("Safety override for move {}! snake length={}, spaces available={}".format(ourMove, mySnakeLength, moveDict[ourMove]))
ourMove = None
# eating didn't work out - chase tail!
if ourMove == None:
myTailList = getNearestOfType([ME_TAIL], symbolGrid, distanceGrid)
if len(myTailList) > 0:
#myTail = random.choice(myTailList)
myTail = myTailList[0]
ourMove = moveGrid.get(myTail)
ourTaunt = "Chase tail"
print("Decision: Chase tail at {}, distance={}, ourMove={}".format(myTail, distanceGrid.get(myTail), ourMove))
# Safety check: how many spaces are we moving into.
# EXPERIMENTAL: If moving toward tail, we should always have a space
# available. But it depends on how close we are to our tail!
if ourMove != None:
if moveDict[ourMove] < mySnakeLength:
print("Safety override for move {}! snake length={}, spaces available={}".format(ourMove, mySnakeLength, moveDict[ourMove]))
ourMove = None
# Can't find tail - move wherever there is space
# First check the moveDict to see if any direction shows available moves.
# Go with whichever direction has the most coordinates marked.
# Prefer current trajectory over random direction (testing this strategy)
if ourMove == None:
# make sure the top rated direction has at least one position to move
# into.
if moveDict[preferredMoveList[0]] >= mySnakeLength:
#ourMove = random.choice(preferredMoveListRanked[0])
ourMove = preferredMoveListRanked[0][0]
ourTaunt = "Wander"
print("Decision: go with majority (L,U,R,D). Spaces={}, move={}".format(moveDict[preferredMoveList[0]], ourMove))
return(ourMove, ourTaunt)
def panicDecisionTree(data, symbolGrid, distanceGrid, moveGrid, moveDict):
# Nothing within reach! Start to panic.
ourMove = None
ourTaunt = None
preferredMoveList = sorted(moveDict, key=moveDict.get, reverse=True)
preferredMoveListRanked = getPreferredMoveListRanked(moveDict)
# Go with whichever direction has the most coordinates marked.
if ourMove == None:
# make sure the top rated direction has at least one position to move
# into.
if moveDict[preferredMoveList[0]] > 0:
#ourMove = random.choice(preferredMoveListRanked[0])
ourMove = preferredMoveListRanked[0][0]
ourTaunt = "Wander with concern"
print("Decision: go with what's left (L,U,R,D). Spaces={}, move={}".format(moveDict[preferredMoveList[0]], ourMove))
# If we get here, then we should be panicing.
# At this point, go to the list of "maybe go" positions. This is a last
# resort because moving into these positions are high risk (eg. moving
# beside the head of a larger snake). But if no other choice, then a
# maybe-go is better than a random fate.
if ourMove == None:
ourSnakeHead = getHeadCoord(getOurSnakeCoords(data))
orthogonalList = symbolGrid.getOrthogonal(ourSnakeHead)
maybeGoCoordsList = symbolGrid.getListOfType([ME_TAIL, MAYBE_GO])
possibleCoordinates = [a for a in orthogonalList if a in maybeGoCoordsList]
possibleDirections = [getTrajectory([coord, ourSnakeHead]) for coord in possibleCoordinates]
if len(possibleDirections) > 0:
#ourMove = random.choice(possibleDirections)
ourMove = possibleDirections[0]
ourTaunt = "Wander with more concern"
print("Decision: No good options. Resort to the Maybe-go list (L,U,R,D), move={}".format(ourMove))
# Full panic, we're probably going to die. Keep on our trajectory, so we
# at least don't turn in on ourself.
if ourMove == None:
#directions = ['up', 'down', 'left', 'right']
#ourMove = random.choice(directions)
ourMove = getTrajectory(getOurSnakeCoords(data))
ourTaunt = "Uh oh"
print("Descision: PANIC! No moves available! Going straight ahead: {}".format(ourMove))
return(ourMove, ourTaunt)
def getNearestOfType(thingsToFindList, symbolGrid, distanceGrid):
"""
Pass in an array of things to find in the symbol table, and return an
array of coordinates of the nearest things. Normally there will be an arry
with 1 coordinate, but in some cases there will be 2 things of equal
distance away. When that happens, further decisions need to be made as
to which thing to head towards.
"""
maxSnakeMove = symbolGrid.getPerimeter()
foundThingsList = symbolGrid.getListOfType(thingsToFindList)
distanceDict = {} # key=coordinate, value=distance from head
nearestThingsCoordsList = []
for coord in foundThingsList:
coordDistance = distanceGrid.get(coord)
if coordDistance < maxSnakeMove: # check for unobtainium
distanceDict[coord] = coordDistance
#print("distanceDict={}".format(distanceDict))
# klunktastic
sortedCoords = sorted(distanceDict, key=distanceDict.get)
sortedDistances = sorted(distanceDict.values())
#print("sortedCoords={}, sortedDistances={}".format(sortedCoords, sortedDistances))
shortestDistance = None
for i in range(len(sortedCoords)):
if shortestDistance == None or sortedDistances[i] == shortestDistance:
nearestThingsCoordsList.append(sortedCoords[i])
shortestDistance = sortedDistances[i]
return(nearestThingsCoordsList)
def getPreferredMoveListRanked(moveDict):
"""
Returns a list of lists of the moves in order of the number of spaces
available. Eg. {'left': 8, 'right': 15, 'up': 15, 'down': 0}
will return [['right', 'up'], ['left'], ['down']]
This thing inverts the dictionary and the reverse sorts.
"""
temp = {}
for k, v in moveDict.items():
temp[v] = temp.get(v, [])
temp[v].append(k)
preferredList = [temp[k] for k in sorted(temp, reverse=True)]
return(preferredList)
``` |
{
"source": "jonknoll/pyAtemSim",
"score": 3
} |
#### File: jonknoll/pyAtemSim/atem_commands.py
```python
from os import truncate
import struct
import raw_commands
from typing import List
import atem_config
from atem_config import DEVICE_VIDEO_SOURCES
import datetime
import time
class ATEMCommand(object):
def __init__(self, bytes=b''):
self.bytes = bytes
self.length = None
self.code = type(self).__name__[-4:]
self.full = ""
self.time_to_send = 0
def parse_cmd(self):
"""
Parse the command into useful variables
"""
pass
def update_state(self):
"""
Updated the internal state of the switcher
"""
pass
def to_bytes(self):
"""
Build the command into a byte stream
"""
pass
def _build(self, content):
"""
Boilerplate bytes stream build stuff for commands
"""
if self.length != None:
cmd_length = self.length
else:
cmd_length = len(content) + 8
self.length = cmd_length
cmd = struct.pack('!H 2x 4s', cmd_length, self.code.encode())
return cmd + content
class Cmd__ver(ATEMCommand):
def __init__(self, bytes=b''):
super().__init__(bytes=bytes)
self.full = "ProtocolVersion"
self.major = 2
self.minor = 30
def to_bytes(self):
content = struct.pack('!HH', self.major, self.minor)
self.bytes = self._build(content)
class Cmd__pin(ATEMCommand):
def __init__(self, bytes=b''):
super().__init__(bytes=bytes)
self.full = "ProductId"
self.product_name = "ATEM Television Studio HD"
def to_bytes(self):
content = struct.pack('!44s', self.product_name.encode())
self.bytes = self._build(content)
class Cmd_InCm(ATEMCommand):
def __init__(self, bytes=b''):
super().__init__(bytes=bytes)
self.length = 12
self.raw_hex = b'\x01\x00\x00\x00'
def to_bytes(self):
self.bytes = self._build(self.raw_hex)
######################################################
# COMMANDS FROM CLIENT
######################################################
# Auto Transition from client
class Cmd_DAut(ATEMCommand):
def __init__(self, bytes=b''):
super().__init__(bytes=bytes)
self.me = None
self.prog = None
self.prev = None
self.transition_pos = None
self.transition_total_frames = None
def parse_cmd(self):
self.length = len(self.bytes)
self.me = struct.unpack('!B', self.bytes[8:9])
self.me = self.me[0]
def update_state(self):
self.prog = atem_config.conf_db['MixEffectBlocks'][self.me]['Program']['input']
self.prev = atem_config.conf_db['MixEffectBlocks'][self.me]['Preview']['input']
self.transition_pos = int(atem_config.conf_db['MixEffectBlocks'][self.me]['TransitionStyle']['transitionPosition'])
transition_style = atem_config.conf_db['MixEffectBlocks'][self.me]['TransitionStyle']['style']
if transition_style == "Dip":
self.transition_total_frames = int(atem_config.conf_db['MixEffectBlocks'][self.me]['TransitionStyle']['DipParameters']['rate'])
elif transition_style == "Wipe":
self.transition_total_frames = int(atem_config.conf_db['MixEffectBlocks'][self.me]['TransitionStyle']['WipeParameters']['rate'])
else: # default to mix parameters
self.transition_total_frames = int(atem_config.conf_db['MixEffectBlocks'][self.me]['TransitionStyle']['MixParameters']['rate'])
def update_prog_prev(self):
atem_config.conf_db['MixEffectBlocks'][self.me]['Program']['input'] = self.prev
atem_config.conf_db['MixEffectBlocks'][self.me]['Preview']['input'] = self.prog
# Cut from client
class Cmd_DCut(ATEMCommand):
def __init__(self, bytes=b''):
super().__init__(bytes=bytes)
self.me = None
def parse_cmd(self):
self.length = len(self.bytes)
self.me = struct.unpack('!B', self.bytes[8:9])
self.me = self.me[0]
def update_state(self):
prog_source = atem_config.conf_db['MixEffectBlocks'][self.me]['Program']['input']
prev_source = atem_config.conf_db['MixEffectBlocks'][self.me]['Preview']['input']
atem_config.conf_db['MixEffectBlocks'][self.me]['Program']['input'] = prev_source
atem_config.conf_db['MixEffectBlocks'][self.me]['Preview']['input'] = prog_source
#print(f"me{self.me}={atem_config.conf_db['MixEffectBlocks'][self.me]}")
# Program Input from client (See also PrgI)
class Cmd_CPgI(ATEMCommand):
def __init__(self, bytes=b''):
super().__init__(bytes=bytes)
self.length = 12
self.me = None
self.video_source = None
def parse_cmd(self):
self.length = len(self.bytes)
self.me, self.video_source = struct.unpack('!B x H', self.bytes[8:12])
def update_state(self):
atem_config.conf_db['MixEffectBlocks'][self.me]['Program']['input'] = str(self.video_source)
#print(f"me{self.me}={atem_config.conf_db['MixEffectBlocks'][self.me]}")
# Preview Input from client, almost identical to Cmd_CPgI (See also PrvI)
class Cmd_CPvI(ATEMCommand):
def __init__(self, bytes=b''):
super().__init__(bytes=bytes)
self.length = 12
self.me = None
self.video_source = None
def parse_cmd(self):
self.length = len(self.bytes)
self.me, self.video_source = struct.unpack('!B x H', self.bytes[8:12])
def update_state(self):
atem_config.conf_db['MixEffectBlocks'][self.me]['Preview']['input'] = str(self.video_source)
#print(f"me{self.me}={atem_config.conf_db['MixEffectBlocks'][self.me]}")
######################################################
# COMMANDS TO CLIENT
######################################################
# Time sent to client
class Cmd_Time(ATEMCommand):
def __init__(self, offset_sec=0):
super().__init__(b'')
self.length = 16
self.offset_sec = offset_sec
def to_bytes(self):
video_mode = atem_config.conf_db['VideoMode']['videoMode']
if "5994" in video_mode:
frame_rate = 59.94
elif "2997" in video_mode:
frame_rate = 29.97
elif "2398" in video_mode:
frame_rate = 23.98
elif "50" in video_mode:
frame_rate = 50
elif "25" in video_mode:
frame_rate = 25
elif "24" in video_mode:
frame_rate = 24
t = datetime.datetime.now() + datetime.timedelta(seconds=int(self.offset_sec), microseconds=int((self.offset_sec % 1) * 1000000))
frame = int(t.microsecond / 1000000 * frame_rate)
content = struct.pack('!4B 4x', t.hour, t.minute, t.second, frame)
self.bytes = self._build(content)
# Tally By Index sent to client
class Cmd_TlIn(ATEMCommand):
def __init__(self, me=0):
super().__init__(b'')
self.me = me
self.program_source = int(atem_config.conf_db['MixEffectBlocks'][self.me]['Program']['input'])
self.preview_source = int(atem_config.conf_db['MixEffectBlocks'][self.me]['Preview']['input'])
self.transition_pos = int(atem_config.conf_db['MixEffectBlocks'][self.me]['TransitionStyle']['transitionPosition'])
self.num_inputs = len(atem_config.conf_db['Settings']['Inputs'])
def to_bytes(self):
# Build content
content = struct.pack('!H', self.num_inputs)
for i in range(self.num_inputs):
input_byte = 0x00
if self.program_source <= self.num_inputs and self.program_source == i + 1:
input_byte |= 0x01
if self.preview_source <= self.num_inputs and self.preview_source == i + 1:
input_byte |= 0x02
# If in mid transition then the preview source is also the program source.
# Transition range is 0-10000
if self.transition_pos > 0 and self.transition_pos < 10000:
input_byte |= 0x1
content += struct.pack('!B', input_byte)
# add the 2 unknown bytes
content += struct.pack('!2x')
self.bytes = self._build(content)
# Tally By Source sent to client
class Cmd_TlSr(ATEMCommand):
def __init__(self, me=0):
super().__init__(bytes=bytes)
self.length = 84
self.me = me
self.program_source = int(atem_config.conf_db['MixEffectBlocks'][self.me]['Program']['input'])
self.preview_source = int(atem_config.conf_db['MixEffectBlocks'][self.me]['Preview']['input'])
self.transition_pos = int(atem_config.conf_db['MixEffectBlocks'][self.me]['TransitionStyle']['transitionPosition'])
# the product determines how many sources there are
product = atem_config.conf_db['product']
self.video_sources = DEVICE_VIDEO_SOURCES[product]
self.num_sources = len(self.video_sources)
def to_bytes(self):
# Build content
content = struct.pack('!H', self.num_sources)
for i in range(self.num_sources):
source_byte = 0x00
if self.program_source == self.video_sources[i]:
source_byte |= 0x01
if self.preview_source == self.video_sources[i]:
source_byte |= 0x02
# If in mid transition then the preview source is also the program source.
# Transition range is 0-10000
if self.transition_pos > 0 and self.transition_pos < 10000:
source_byte |= 0x1
content += struct.pack('!HB', self.video_sources[i], source_byte)
# add the 2 unknown bytes
content += struct.pack('!2x')
self.bytes = self._build(content)
# Program Input to client (see also CPgI)
class Cmd_PrgI(ATEMCommand):
def __init__(self, me=0):
super().__init__(bytes=bytes)
self.me = me
self.program_source = int(atem_config.conf_db['MixEffectBlocks'][self.me]['Program']['input'])
def to_bytes(self):
content = struct.pack('!B x H', self.me, self.program_source)
self.bytes = self._build(content)
# Preview Input to client, almost identical to Cmd_PrgI (see also CPvI)
class Cmd_PrvI(ATEMCommand):
def __init__(self, me=0):
super().__init__(bytes=bytes)
self.me = me
self.preview_source = int(atem_config.conf_db['MixEffectBlocks'][self.me]['Preview']['input'])
def to_bytes(self):
content = struct.pack('!B x H 4x', self.me, self.preview_source)
self.bytes = self._build(content)
# Transition Position to client
class Cmd_TrPs(ATEMCommand):
def __init__(self, me=0, frames_remaining=None, total_frames=None):
super().__init__(bytes=bytes)
self.me = me
self.total_frames = total_frames
self.frames_remaining = frames_remaining
if frames_remaining > 255: # maximum size of the byte that it's going into
self.frames_remaining = 255
self.transition_pos = int((self.frames_remaining/self.total_frames) * 10000)
self.transition_pos = 10000 - self.transition_pos
atem_config.conf_db['MixEffectBlocks'][self.me]['TransitionStyle']['transitionPosition'] = str(self.transition_pos)
if self.frames_remaining == self.total_frames:
self.in_transition = 0
else:
self.in_transition = 1
def to_bytes(self):
content = struct.pack('!BBB x H 2x', self.me, self.in_transition, self.frames_remaining, self.transition_pos)
self.bytes = self._build(content)
class Cmd_Unknown(ATEMCommand):
def __init__(self, bytes, name=""):
super().__init__(bytes=bytes)
if name:
self.code = name
else:
self.code = "UNKN"
def parse_cmd(self):
pass
def to_bytes(self):
self.length = len(self.bytes)
class Cmd_Raw(ATEMCommand):
def __init__(self, bytes):
super().__init__(bytes=bytes)
def to_bytes(self):
self.length = len(self.bytes)
class CommandCarrier(object):
def __init__(self):
# array of commands to be sent in a packet
self.commands = []
# Can set to a future time (relative to monotonic clock)
# if it's a transition command where multiple commands
# have to be sent to update the state of the transition.
# Leave as 0 to be sent right away (default).
self.send_time = 0
# Set if the client object who receives the response,
# needs to send it to the client manager so it can be
# sent out to the other clients as well. Normally this
# is the case. There are just a few commands that are
# for the requesting client only.
self.multicast = True
# Packet id to ack when this response command(s) is sent back.
# This is more for the client to manage in the outbound_packet_list.
self.ack_packet_id = 0
def build_setup_commands_list():
raw_setup_commands = [
raw_commands.commands1,
raw_commands.commands2,
raw_commands.commands3,
raw_commands.commands4,
raw_commands.commands5,
raw_commands.commands6,
#raw_commands.commands7,
#raw_commands.commands8,
]
commands_list = []
for rsc in raw_setup_commands:
cmd_bytes = raw_commands.getByteStream(rsc)
cmd = Cmd_Raw(cmd_bytes)
commands_list.append(cmd)
return commands_list
# get commands list by piecing together the name of the class from the command you want
# and use:
# classname = "Cmd" + command_name.decode()
# if hasattr(atem_commands, classname):
# TheClass = getattr(atem_commands, classname)
#OR instance = getattr(atem_commands, classname)(class_params)
#
# This would be the manual way...
commands_list = {'_ver' : Cmd__ver,
'_pin' : Cmd__pin,
'DAut' : Cmd_DAut,
'DCut' : Cmd_DCut,
'CPgI' : Cmd_CPgI,
'CPvI' : Cmd_CPvI,
'InCm' : Cmd_InCm,
}
def build_current_state_command_list():
return_list = []
cmd = Cmd__ver()
return_list.append(cmd)
cmd = Cmd__pin()
return_list.append(cmd)
#...etc.
return return_list
def build_command_list_from_names(command_names: list):
return_list = []
for cmd_name in commands_list:
new_cmd = get_command_object(cmd_name)
if new_cmd != None:
return_list.append(new_cmd)
return return_list
def get_command_object(bytes=b'', cmd_name=""):
cmd_class = commands_list.get(cmd_name)
if cmd_class is not None:
cmd_obj = cmd_class(bytes)
else:
cmd_obj = Cmd_Unknown(bytes, cmd_name)
return cmd_obj
def get_response(cmd_list:List[ATEMCommand]):
"""
Get the response command(s) for a list of commands
from a client packet. Typically a client sends only one
command at a time.
Returns a list of CommandCarrier objects.
The CommandCarrier object contains one or more response
commands as well as metadata for the command.
Typically there is only one CommandCarrier object returned,
containing one set of response commands to send
to the client(s). However, there may be more than one
CommandCarrier object if the response requires more
than one packet be sent a result of the command
(eg. a transition like fade to black).
If it is an unknown command then it returns an empty list.
"""
response_list = []
for cmd in cmd_list:
if isinstance(cmd, Cmd_DAut):
cmd.update_state()
now = time.monotonic()
time_offset_sec = 0
frames_total = cmd.transition_total_frames
frames_remaining = frames_total - 1
print(f"ME: {cmd.me}, AUTO TRANSITION")
# The transition position command object has to be created first so
# the transition position gets updated in the conf_db. The tally
# commands set two program sources based on whether the transition
# position is > 0.
trPs = Cmd_TrPs(cmd.me, frames_remaining, frames_total)
# create response packet
cc = CommandCarrier()
cc.commands.append(Cmd_Time(time_offset_sec))
cc.commands.append(Cmd_TlIn(cmd.me))
cc.commands.append(Cmd_TlSr(cmd.me))
cc.commands.append(Cmd_PrvI(cmd.me))
cc.commands.append(trPs)
response_list.append(cc)
# create future packets
while frames_remaining > 0:
# Send an update every 200ms which is every 6 "frames".
# The transition framerate seems to remain at 30fps.
# This is way fewer than a real switcher but gets a similar result.
frames_remaining -= 6
if frames_remaining <= 0:
frames_remaining = 0
break
cc = CommandCarrier()
time_offset_sec += 0.200 # create another update packet every 1/5th of a second
cc.send_time = now + time_offset_sec
cc.commands.append(Cmd_Time(time_offset_sec))
cc.commands.append(Cmd_TrPs(cmd.me, frames_remaining, frames_total))
response_list.append(cc)
# create last future packet
transition_pos = int((frames_remaining/frames_total) * 10000)
cmd.update_prog_prev()
cc = CommandCarrier()
cc.send_time = now + (frames_total / 30)
cc.commands.append(Cmd_TrPs(cmd.me, frames_remaining, frames_total))
# create final trPs so the tallys show correctly based on the transition position
final_trPs = Cmd_TrPs(cmd.me, frames_total, frames_total)
cc.commands.append(Cmd_TlIn(cmd.me)) # Tally by Index
cc.commands.append(Cmd_TlSr(cmd.me)) # Tally by Source
cc.commands.append(Cmd_PrgI(cmd.me)) # Program Input (PrgI)
cc.commands.append(Cmd_PrvI(cmd.me)) # Preivew Input (PrvI)
cc.commands.append(final_trPs)
response_list.append(cc)
elif isinstance(cmd, Cmd_DCut):
cmd.update_state()
print(f"ME: {cmd.me}, CUT")
cc = CommandCarrier()
# Time
cc.commands.append(Cmd_Time()) # Time
cc.commands.append(Cmd_TlIn(cmd.me)) # Tally by Index
cc.commands.append(Cmd_TlSr(cmd.me)) # Tally by Source
cc.commands.append(Cmd_PrgI(cmd.me)) # Program Input (PrgI)
cc.commands.append(Cmd_PrvI(cmd.me)) # Preivew Input (PrvI)
response_list.append(cc)
elif isinstance(cmd, Cmd_CPgI):
cmd.update_state()
print(f"ME: {cmd.me}, Program Source: {cmd.video_source}")
cc = CommandCarrier()
cc.commands.append(Cmd_Time()) # Time
cc.commands.append(Cmd_TlIn(cmd.me)) # Tally by Index
cc.commands.append(Cmd_TlSr(cmd.me)) # Tally by Source
cc.commands.append(Cmd_PrgI(cmd.me)) # Program Input (PrgI)
response_list.append(cc)
elif isinstance(cmd, Cmd_CPvI):
cmd.update_state()
print(f"ME: {cmd.me}, Preview Source: {cmd.video_source}")
cc = CommandCarrier()
cc.commands.append(Cmd_Time()) # Time
cc.commands.append(Cmd_TlIn(cmd.me)) # Tally by Index
cc.commands.append(Cmd_TlSr(cmd.me)) # Tally by Source
cc.commands.append(Cmd_PrvI(cmd.me)) # Preview Input (PrvI)
response_list.append(cc)
else:
pass
return response_list
if __name__ == "__main__":
# Quick test
for cmd_name in commands_list:
cmd_class = commands_list[cmd_name]
cmd_obj = cmd_class(bytes)
``` |
{
"source": "JonKoala/diariobot-scraper",
"score": 3
} |
#### File: JonKoala/diariobot-scraper/ondemand.py
```python
import scraper
from db import Dbinterface
from db.models import Publicacao_Original
import argparse
import calendar
import os
##
# Utils
def get_dates(year, month):
num_days = calendar.monthrange(year, month)[1]
return ['-'.join([str(year), str(month), str(day)]) for day in range(1, num_days+1)]
##
# Command line arguments
parser = argparse.ArgumentParser()
parser.add_argument('year', type=int, help='Year to scrap')
parser.add_argument('month', type=int, help='Month to scrap')
year = parser.parse_args().year
month = parser.parse_args().month
##
# Scrap routine
print('starting scraping routine')
publicacoes = []
dates = get_dates(year, month)
for date in dates:
print('scraping {}'.format(date))
publicacoes += scraper.scrap(date)
##
# Persist results
print('persisting on database')
dbi = Dbinterface(os.environ['DIARIOBOT_DATABASE_CONNECTIONSTRING'])
with dbi.opensession() as session:
for publicacao in publicacoes:
entry = Publicacao_Original(**publicacao)
session.add(entry)
session.commit()
```
#### File: JonKoala/diariobot-scraper/routine.py
```python
import os
from datetime import datetime
import scraper
from db import Dbinterface
from db.models import Publicacao_Original
def routine(*dates):
results = _scrap_dates(dates)
if len(results) == 0:
return
with Dbinterface(os.environ['DIARIOBOT_DATABASE_CONNECTIONSTRING']).opensession() as session:
for publicacao in _get_publicacoes(results):
entry = Publicacao_Original(**publicacao)
session.add(entry)
session.commit()
def _scrap_dates(dates):
return scraper.scrap(dates[0]) + _scrap_dates(dates[1:]) if len(dates) > 0 else []
def _get_publicacoes(results):
for result in results:
edicao = { 'edicao': result['id'], 'numero': result['numero'], 'data': datetime.strptime(result['date'], '%Y-%m-%d').strftime('%d/%m/%Y') }
for item in result['publicacoes']:
summary = _get_summary(item['summary_stack'])
publicacao = { 'materia': item['title'], 'identificador': item['identificador'], 'corpo': item['body'] }
yield { **edicao, **summary, **publicacao, 'fonte': 'ioes' }
def _get_summary(summary_stack):
if len(summary_stack) < 3:
raise Exception('unsupported summary: {}'.format(summary_stack))
categoria, orgao = summary_stack[:2]
tipo, = summary_stack[-1:]
suborgao = summary_stack[2] if len(summary_stack) > 3 else ''
return { 'categoria': categoria, 'orgao': orgao, 'suborgao': suborgao, 'tipo': tipo }
``` |
{
"source": "JonKoala/DIOES-FeatureExtraction",
"score": 3
} |
#### File: DIOES-FeatureExtraction/classification/classifier.py
```python
import numpy as np
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.linear_model import SGDClassifier
from sklearn.pipeline import Pipeline
class Classifier:
train_steps = [('vectorizer', TfidfVectorizer()), ('classifier', SGDClassifier())]
model_default_params = {'vectorizer__sublinear_tf': True}
@classmethod
def default_pipeline(cls):
pipeline = Pipeline(cls.train_steps)
pipeline.set_params(**cls.model_default_params)
return pipeline
def __init__(self, params={}, stop_words=[]):
self.user_params = params
self.stop_words = stop_words
@property
def params(self):
return {**Classifier.model_default_params, 'vectorizer__stop_words': self.stop_words, **self.user_params}
@property
def pipeline(self):
pipeline = Classifier.default_pipeline()
pipeline.set_params(**self.params)
return pipeline
@property
def classifier(self):
return self.model.named_steps['classifier']
@property
def vectorizer(self):
return self.model.named_steps['vectorizer']
@property
def classes(self):
return list(self.classifier.classes_)
@property
def features_names(self):
return self.vectorizer.get_feature_names()
def train(self, data, target):
self.model = self.pipeline
self.model.fit(data, target)
def predict(self, data):
return self.model.predict(data)
def get_class_features_weights(self, classe):
classe_index = self.classes.index(classe)
return self.classifier.coef_[classe_index]
def get_class_keywords(self, classe, count):
sorted_features_indexes = np.argsort(self.get_class_features_weights(classe))
top_features_indexes = sorted_features_indexes[-1*count:]
keywords = [self.features_names[feature_index] for feature_index in top_features_indexes]
return keywords
```
#### File: DIOES-FeatureExtraction/classification/dataset.py
```python
class Dataset(list):
def __init__(self, entries=[]):
list.extend(self, entries)
@property
def data(self):
return [entry.data for entry in self]
@property
def target(self):
return [entry.target for entry in self]
class DatasetEntry:
def __init__(self, id, data, target):
self.id = id
self.data = data
self.target = target
def __repr__(self):
return '<DatasetEntry(id={}, data={}, target={})>'.format(self.id, self.data, self.target)
```
#### File: DIOES-FeatureExtraction/classification/vectorizer.py
```python
from sklearn.feature_extraction.text import TfidfVectorizer
class Vectorizer:
def __init__(self, corpus, stopwords=None):
self.vectorspace = TfidfVectorizer(strip_accents='ascii', stop_words=stopwords)
self.corpus = corpus
def _learn(self, data):
self.vectorspace.fit(data)
self.features = self.vectorspace.get_feature_names()
def transform(self, data):
return self.vectorspace.transform(data)
@property
def corpus(self):
return None
@corpus.setter
def corpus(self, value):
self._learn(value)
```
#### File: db/models/keyword_backlisted.py
```python
from . import Base
from sqlalchemy import Column, Integer, String
class Keyword_Backlisted(Base):
__tablename__ = 'Keyword_Backlisted'
id = Column(Integer, primary_key=True)
palavra = Column(String)
def __repr__(self):
return '<Blacklisted(id={}, palavra={})>'.format(self.id, self.palavra)
```
#### File: db/models/keyword.py
```python
from . import Base
from sqlalchemy import Column, Integer, String, Numeric, Date, ForeignKey
from sqlalchemy.orm import relationship, synonym
class Keyword(Base):
__tablename__ = 'Keyword'
id = Column(Integer, primary_key=True)
classe_id = Column(Integer, ForeignKey('Classe.id'))
palavra = Column(String)
def __repr__(self):
return '<Keyword(id={}, classe_id={}, palavra={})>'.format(self.id, self.classe_id, self.palavra)
```
#### File: JonKoala/DIOES-FeatureExtraction/logsetup.py
```python
import logging
import logging.handlers
def init(filepath, logtoconsole=True, filehandler_config={}):
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
# creating file handler
fh = logging.handlers.RotatingFileHandler(filepath, **filehandler_config)
fh.setLevel(logging.DEBUG)
_skipline(logger, fh) # add a new line sperating each execution
fh.setFormatter(logging.Formatter('%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s'))
# creating console handler
ch = logging.StreamHandler()
ch.setLevel(logging.INFO)
ch.setFormatter(logging.Formatter('%(levelname)s [%(filename)s:%(lineno)d] %(message)s'))
logger.addHandler(fh)
if logtoconsole:
logger.addHandler(ch)
def _skipline(logger, fh):
fh.setFormatter(logging.Formatter(''))
logger.addHandler(fh)
logging.info('\n')
```
#### File: DIOES-FeatureExtraction/nlp/preprocessor.py
```python
import inspect
import re
from nltk.stem.snowball import PortugueseStemmer
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.feature_extraction.text import strip_accents_ascii
class Preprocessor:
def __init__(self):
self.stemmer = PortugueseStemmer()
self.token_pattern = inspect.signature(TfidfVectorizer).parameters['token_pattern'].default
self.regex = re.compile(self.token_pattern)
def stem(self, token):
return self.stemmer.stem(token)
def tokenize(self, document):
return self.regex.findall(document)
def strip_accents(self, entry):
return strip_accents_ascii(entry)
def lowercase(self, entry):
return entry.lower()
def build_tokenizer(self, stem=True, strip_accents=True, lowercase=True):
null_call = lambda x: x
stem_call = self.stem if stem else null_call
strip_accents_call = self.strip_accents if strip_accents else null_call
lowercase_call = self.lowercase if lowercase else null_call
tokenize_call = lambda document: self.tokenize(strip_accents_call(lowercase_call(document)))
return lambda document: [stem_call(token) for token in tokenize_call(document)]
```
#### File: JonKoala/DIOES-FeatureExtraction/routine_extract_patterns.py
```python
import inout
from db import Dbinterface
from db.models import Publicacao, Predicao_Classificacao, Predicao_Regex
import argparse
import os
import re
from decimal import Decimal
##
# Command line arguments
parser = argparse.ArgumentParser()
parser.add_argument('--full', help='Extract patterns for the whole database', action='store_true')
reset_base = parser.parse_args().full
##
# Utils
# regex patterns
re_money = re.compile('\$:*\s*[1-9][\d\.,]*\d')
re_fractional_part = re.compile('[\.,]\d{1,2}$')
def to_decimal(match):
treated = ''.join(match.group(0).split()).replace('$', '').replace(':', '')
integer_match = re.split(re_fractional_part, treated)
integer_part = re.sub('[\.,]', '', integer_match[0])
fractional_match = re.search(re_fractional_part, treated)
fractional_part = re.sub('[\.,]', '', fractional_match.group(0)) if fractional_match else '00'
return Decimal('{}.{}'.format(integer_part, fractional_part))
##
# Get resources
dbi = Dbinterface(os.environ['DIARIOBOT_DATABASE_CONNECTIONSTRING'])
with dbi.opensession() as session:
to_extract = session.query(Predicao_Classificacao).join(Predicao_Classificacao.publicacao)
if not reset_base:
already_extracted = session.query(Predicao_Regex.publicacao_id)
to_extract = to_extract.filter(Predicao_Classificacao.publicacao_id.notin_(already_extracted))
data = [(predicao.publicacao.id, predicao.publicacao.corpo) for predicao in to_extract]
##
# Find Regex patterns (for now, just biggest value)
results = []
for index, publicacao in enumerate(data):
# get values from publicacao
iter_matches = re.finditer(re_money, publicacao[1])
values = [to_decimal(match) for match in iter_matches]
# get biggest value
values.sort(reverse=True)
biggest = next(iter(values), None)
results += [(publicacao[0], biggest)]
##
# Persist patterns on database
with dbi.opensession() as session:
# clean old entries
if reset_base:
session.query(Predicao_Regex).delete()
session.flush()
# insert new patterns
for result in results:
predicao_regex = Predicao_Regex(publicacao_id=result[0], valor=result[1])
session.add(predicao_regex)
session.commit()
```
#### File: JonKoala/DIOES-FeatureExtraction/routine_predict.py
```python
import inout
from classification import Classifier, Dataset, DatasetEntry, evaluation
from db import Dbinterface
from db.models import Classe, Classificacao, Keyword_Backlisted, Keyword, Predicao_Classificacao, Publicacao
from nlp import Preprocessor
from utils import classe_filters, get_tunning_params, publicacao_tipo_filters
import argparse
import numpy as np
import os
import re
##
# Command line arguments
parser = argparse.ArgumentParser()
parser.add_argument('--full', help='Predict classes for the whole database', action='store_true')
reset_base = parser.parse_args().full
##
# Utils
def remove_numbers(text):
return re.sub(r'\S*\d\S*', ' ', text)
##
# Get resources
dbi = Dbinterface(os.environ['DIARIOBOT_DATABASE_CONNECTIONSTRING'])
with dbi.opensession() as session:
# get data to predict
to_predict = session.query(Publicacao).filter(Publicacao.tipo.in_(publicacao_tipo_filters))
if not reset_base:
already_predicted = session.query(Predicao_Classificacao.publicacao_id)
to_predict = to_predict.filter(Publicacao.id.notin_(already_predicted))
to_predict = [(publicacao.id, publicacao.corpo) for publicacao in to_predict]
if len(to_predict) < 1:
print('nothing to predict')
quit()
blacklist = list(session.query(Keyword_Backlisted.palavra))
classes = list(session.query(Classe).filter(Classe.nome.in_(classe_filters)))
# get crowdsourced data
training_dataset = session.query(Publicacao).join(Publicacao.classificacao).filter(Classificacao.classe_id.in_(classe.id for classe in classes))
training_dataset = Dataset([DatasetEntry(publicacao.id, remove_numbers(publicacao.corpo), publicacao.classificacao.classe_id) for publicacao in training_dataset])
stopwords = inout.read_json('./stopwords')
blacklist = stopwords + [entry[0] for entry in blacklist]
##
# preprocess stopwords
# i need to preprocess my stopwords (blacklist). Scikit will remove stopwords AFTER the tokenization process
# source: https://github.com/scikit-learn/scikit-learn/blob/a24c8b46/sklearn/feature_extraction/text.py#L265
prep = Preprocessor()
preprocess = lambda x: prep.strip_accents(prep.lowercase(x))
tokenize = prep.build_tokenizer(strip_accents=False, lowercase=False)
# repeat the stemming process until i have every variation of my stopwords
blacklist = set([preprocess(word) for word in blacklist])
while True:
len_blacklist = len(blacklist)
tokenized_blacklist = tokenize(' '.join(blacklist))
blacklist.update(tokenized_blacklist)
if (len_blacklist == len(blacklist)):
break
blacklist = list(blacklist)
##
# Train the model
hyperparams = {'vectorizer__preprocessor': preprocess, 'vectorizer__tokenizer': tokenize, **get_tunning_params()}
classifier = Classifier(hyperparams, blacklist)
classifier.train(training_dataset.data, training_dataset.target)
##
# Predict classes
ids, corpus = zip(*to_predict)
predictions = classifier.predict(corpus)
results = zip(ids, predictions)
##
# Get Keywords
classes_keywords = [(classe, reversed(classifier.get_class_keywords(classe, 25))) for classe in classifier.classes]
##
# Persist results on database
with dbi.opensession() as session:
# clean old entries
session.query(Keyword).delete()
if reset_base:
session.query(Predicao_Classificacao).delete()
session.flush()
# insert predicoes
for result in results:
predicao = Predicao_Classificacao(publicacao_id=result[0], classe_id=np.asscalar(result[1]))
session.add(predicao)
# insert keywords
for classe_keywords in classes_keywords:
classe = classe_keywords[0]
keywords = classe_keywords[1]
for keyword in keywords:
entry = Keyword(classe_id=np.asscalar(classe), palavra=keyword)
session.add(entry)
session.commit()
```
#### File: DIOES-FeatureExtraction/utils/tunning_parameters_parser.py
```python
import os
def _is_integer(val):
try:
int(val)
return True
except ValueError:
return False
def _is_float(val):
try:
float(val)
return not _is_integer(val)
except ValueError:
return False
def _is_bool(val):
return str.casefold(val) == 'true' or str.casefold(val) == 'false'
def _is_null(val):
return str.casefold(val) == 'null' or str.casefold(val) == 'none'
def _parse_val(val):
if _is_integer(val):
return int(val)
if _is_float(val):
return float(val)
if _is_bool(val):
return str.casefold(val) == 'true'
if _is_null(val):
return None
return val
def get_tunning_params():
param_prefix = 'DIARIOBOT_DM_PARAM_'
len_prefix = len(param_prefix)
params = {}
gen = (key for key in os.environ.keys() if key.startswith(param_prefix))
for key in gen:
parsed_key = str.casefold(key[len_prefix:])
params[parsed_key] =_parse_val(os.environ[key])
return params
``` |
{
"source": "JonKoala/sapo-tools",
"score": 3
} |
#### File: sapo-tools/scraper/digester.py
```python
import json
##
#CONSTANTS
CSV_HEADER = ['Município', 'Poder', 'Problema', 'api', 'periodo', 'url', 'teste']
def digest(data):
rows = [CSV_HEADER]
rows += _build_rows(data)
return rows
##
#UTILS
def _build_rows(data):
rows = []
for result in data:
profile = result['profile']
tests = result['test_results']
issues = []
issues += [_build_row(profile, test, 'dado inacessível') for test in tests['inacessible']]
issues += [_build_row(profile, test, 'ausência de dados') for test in tests['lack_of_data']]
issues += [_build_row(profile, test, 'dados falsos') for test in tests['fake_data']]
rows += issues if len(issues) > 0 else [_build_row(profile)]
rows = sorted(rows, key=lambda row: ''.join(row))
return rows
def _build_row(profile, test=None, test_name=None):
row = [profile['municipio'], profile['poder']]
if test:
row += [test_name, test['api'], '{}/{}'.format(test['ano'], test['mes']), test['url'], json.dumps(test)]
else:
row += ['nenhum'] + [''] * 4
return row
```
#### File: sapo-tools/scraper/routine.py
```python
import re
import inout
import digester
from appconfig import settings
from scraper import factory
from logger import log
##
#CONSTANTS
SOURCE_FILE = 'source.csv'
OUTPUT_FOLDER = 'output'
RAW_RESULT_FILE = OUTPUT_FOLDER + '/results.json'
DIGESTED_RESULT_FILE = OUTPUT_FOLDER + '/results.csv'
def filter_source_data(source, model_regex):
source_data = []
for row in source:
url_executivo = re.search(model_regex, row[2])
if url_executivo:
source_data += [dict(municipio=row[0],poder='executivo',url=url_executivo[0])]
url_legislativo = re.search(model_regex, row[4])
if url_legislativo:
source_data += [dict(municipio=row[0],poder='legislativo',url=url_legislativo[0])]
return source_data
##
#ROUTINE
#prep
test_cases = settings['dates']
EL_Tester = factory.get_tester('el')
log('\n - starting new scraper routine -')
log('extracting data from source (\'{}\')'.format(SOURCE_FILE))
raw_csv_input = inout.read_csv(SOURCE_FILE)
el_csv_input = filter_source_data(raw_csv_input, EL_Tester.MODEL_REGEX)
log('done extracting data ({} test cases)'.format(len(el_csv_input)))
log('executing test routines')
results = []
for index, data in enumerate(el_csv_input):
try:
log('testing {}th case ({} - {})'.format(index+1, data['municipio'], data['poder']))
tester = EL_Tester(data['url'], data['poder'], test_cases) #for now we only have EL as a test case
result = tester.execute_full_routine()
result['profile'] = data
results += [result]
except:
log('failed testing {}th case ({} - {})'.format(index+1, data['municipio'], data['poder']), logtype='exception')
log('done executing test routines')
log('printing raw results to file (\'{}\')'.format(RAW_RESULT_FILE))
inout.write_json(RAW_RESULT_FILE, results)
log('digesting results')
digested = digester.digest(results)
log('printing digested results to file (\'{}\')'.format(DIGESTED_RESULT_FILE))
inout.write_csv(DIGESTED_RESULT_FILE, digested)
log('\n - ending scraper routine - \n')
```
#### File: scraper/scraper/factory.py
```python
import scraper.el as el
_TESTERS = {
'el': el.tester.Tester
}
def get_tester(model):
model = model.lower()
return _TESTERS[model]
``` |
{
"source": "JonKohler/python-jsonschema-objects",
"score": 2
} |
#### File: python-jsonschema-objects/test/test_util_pytest.py
```python
import pytest
from python_jsonschema_objects.validators import ValidationError
from python_jsonschema_objects.wrapper_types import ArrayWrapper
@pytest.mark.parametrize('kwargs', [
{},
{'item_constraint': {'type': 'string'}},
{'item_constraint': [
{'type': 'string'}, {'type': 'string'}]}
])
def test_ArrayValidator_initializer(kwargs):
assert ArrayWrapper.create('hello', **kwargs)
def test_ArrayValidator_throws_error_if_not_classes_or_dicts():
with pytest.raises(TypeError):
ArrayWrapper.create('hello', item_constraint=['winner'])
def test_validate_basic_array_types():
validator = ArrayWrapper.create(
'test',
item_constraint={'type': 'number'}
)
instance = validator([1, 2, 3, 4])
instance.validate()
instance = validator([1, 2, "Hello"])
with pytest.raises(ValidationError):
instance.validate()
def test_validate_basic_tuple__types():
validator = ArrayWrapper.create(
'test',
item_constraint=[{'type': 'number'}, {'type': 'number'}]
)
instance = validator([1, 2, 3, 4])
instance.validate()
instance = validator([1, "Hello"])
with pytest.raises(ValidationError):
instance.validate()
def test_validate_arrays_with_object_types(Person):
validator = ArrayWrapper.create(
'test',
item_constraint=Person
)
instance = validator([{'firstName': 'winner', 'lastName': 'Smith'}])
instance.validate()
instance = validator([{'firstName': 'winner', 'lastName': 'Dinosaur'}, {'firstName': 'BadMan'}])
with pytest.raises(ValidationError):
instance.validate()
def test_validate_arrays_with_mixed_types(Person):
validator = ArrayWrapper.create(
'test',
item_constraint=[Person, {'type': 'number'}]
)
instance = validator([{'firstName': 'winner', 'lastName': 'Dinosaur'}, 'fried'])
with pytest.raises(ValidationError):
instance.validate()
instance = validator([{'firstName': 'winner', 'lastName': 'Dinosaur'}, 12324])
instance.validate()
def test_validate_arrays_nested():
validator = ArrayWrapper.create(
'test',
item_constraint={'type': 'array', 'items': {'type': 'integer'}}
)
instance = validator([[1, 2, 4, 5], [1, 2, 4]])
instance.validate()
instance = validator([[1, 2, 'h', 5], [1, 2, 4]])
with pytest.raises(ValidationError):
instance.validate()
instance = validator([[1, 2, 'h', 5], [1, 2, '4']])
with pytest.raises(ValidationError):
instance.validate()
def test_validate_arrays_length():
validator = ArrayWrapper.create(
'test',
minItems=1,
maxItems=3
)
instance = validator(range(1))
instance.validate()
instance = validator(range(2))
instance.validate()
instance = validator(range(3))
instance.validate()
instance = validator(range(4))
with pytest.raises(ValidationError):
instance.validate()
instance = validator([])
with pytest.raises(ValidationError):
instance.validate()
def test_validate_arrays_uniqueness():
validator = ArrayWrapper.create(
'test',
uniqueItems=True
)
instance = validator([])
instance.validate()
instance = validator([1, 2, 3, 4])
instance.validate()
instance = validator([1, 2, 2, 4])
with pytest.raises(ValidationError):
instance.validate()
``` |
{
"source": "jonkoi/deep-RL-trading",
"score": 3
} |
#### File: deep-RL-trading/src/agents.py
```python
from lib import *
class Agent:
def __init__(self, model,
batch_size=32, discount_factor=0.95):
self.model = model
self.batch_size = batch_size
self.discount_factor = discount_factor
self.memory = []
def remember(self, state, action, reward, next_state, done, next_valid_actions):
self.memory.append((state, action, reward, next_state, done, next_valid_actions))
def replay(self):
batch = random.sample(self.memory, min(len(self.memory), self.batch_size))
for state, action, reward, next_state, done, next_valid_actions in batch:
q = reward
if not done:
q += self.discount_factor * np.nanmax(self.get_q_valid(next_state, next_valid_actions))
self.model.fit(state, action, q)
def get_q_valid(self, state, valid_actions):
q = self.model.predict(state)
q_valid = [np.nan] * len(q)
for action in valid_actions:
q_valid[action] = q[action]
return q_valid
def act(self, state, exploration, valid_actions):
if np.random.random() > exploration:
q_valid = self.get_q_valid(state, valid_actions)
# print("q_valid", q_valid)
if np.nanmin(q_valid) != np.nanmax(q_valid):
return np.nanargmax(q_valid)
return random.sample(valid_actions, 1)[0]
def save(self, fld):
makedirs(fld)
attr = {
'batch_size':self.batch_size,
'discount_factor':self.discount_factor,
#'memory':self.memory
}
pickle.dump(attr, open(os.path.join(fld, 'agent_attr.pickle'),'wb'))
self.model.save(fld)
def load(self, fld):
path = os.path.join(fld, 'agent_attr.pickle')
print(path)
attr = pickle.load(open(path,'rb'))
for k in attr:
setattr(self, k, attr[k])
self.model.load(fld)
def add_dim(x, shape):
return np.reshape(x, (1,) + shape)
class QModelKeras:
# ref: https://keon.io/deep-q-learning/
def init(self):
pass
def build_model(self):
pass
def __init__(self, state_shape, n_action):
self.state_shape = state_shape
self.n_action = n_action
self.attr2save = ['state_shape','n_action','model_name']
self.init()
def save(self, fld):
makedirs(fld)
with open(os.path.join(fld, 'model.json'), 'w') as json_file:
json_file.write(self.model.to_json())
self.model.save_weights(os.path.join(fld, 'weights.hdf5'))
attr = dict()
for a in self.attr2save:
attr[a] = getattr(self, a)
pickle.dump(attr, open(os.path.join(fld, 'Qmodel_attr.pickle'),'wb'))
def load(self, fld, learning_rate):
json_str = open(os.path.join(fld, 'model.json')).read()
self.model = keras.models.model_from_json(json_str)
self.model.load_weights(os.path.join(fld, 'weights.hdf5'))
self.model.compile(loss='mse', optimizer=keras.optimizers.Adam(lr=learning_rate))
attr = pickle.load(open(os.path.join(fld, 'Qmodel_attr.pickle'), 'rb'))
for a in attr:
setattr(self, a, attr[a])
def predict(self, state):
q = self.model.predict(
add_dim(state, self.state_shape)
)[0]
if np.isnan(max(q)):
print('state'+str(state))
print('q'+str(q))
raise ValueError
return q
def fit(self, state, action, q_action):
q = self.predict(state)
q[action] = q_action
self.model.fit(
add_dim(state, self.state_shape),
add_dim(q, (self.n_action,)),
epochs=1, verbose=0)
class QModelMLP(QModelKeras):
# multi-layer perception (MLP), i.e., dense only
def init(self):
self.qmodel = 'MLP'
def build_model(self, n_hidden, learning_rate, activation='relu'):
model = keras.models.Sequential()
model.add(keras.layers.Reshape(
(self.state_shape[0]*self.state_shape[1],),
input_shape=self.state_shape))
for i in range(len(n_hidden)):
model.add(keras.layers.Dense(n_hidden[i], activation=activation))
#model.add(keras.layers.Dropout(drop_rate))
model.add(keras.layers.Dense(self.n_action, activation='linear'))
model.compile(loss='mse', optimizer=keras.optimizers.Adam(lr=learning_rate))
self.model = model
self.model_name = self.qmodel + str(n_hidden)
class QModelRNN(QModelKeras):
"""
https://keras.io/getting-started/sequential-model-guide/#example
note param doesn't grow with len of sequence
"""
def _build_model(self, Layer, n_hidden, dense_units, learning_rate, activation='relu'):
model = keras.models.Sequential()
model.add(keras.layers.Reshape(self.state_shape, input_shape=self.state_shape))
m = len(n_hidden)
for i in range(m):
model.add(Layer(n_hidden[i],
return_sequences=(i<m-1)))
for i in range(len(dense_units)):
model.add(keras.layers.Dense(dense_units[i], activation=activation))
model.add(keras.layers.Dense(self.n_action, activation='linear'))
model.compile(loss='mse', optimizer=keras.optimizers.Adam(lr=learning_rate))
self.model = model
self.model_name = self.qmodel + str(n_hidden) + str(dense_units)
class QModelLSTM(QModelRNN):
def init(self):
self.qmodel = 'LSTM'
def build_model(self, n_hidden, dense_units, learning_rate, activation='relu'):
Layer = keras.layers.LSTM
self._build_model(Layer, n_hidden, dense_units, learning_rate, activation)
class QModelGRU(QModelRNN):
def init(self):
self.qmodel = 'GRU'
def build_model(self, n_hidden, dense_units, learning_rate, activation='relu'):
Layer = keras.layers.GRU
self._build_model(Layer, n_hidden, dense_units, learning_rate, activation)
class QModelConv(QModelKeras):
"""
ref: https://keras.io/layers/convolutional/
"""
def init(self):
self.qmodel = 'Conv'
def build_model(self,
filter_num, filter_size, dense_units,
learning_rate, activation='relu', dilation=None, use_pool=None):
if use_pool is None:
use_pool = [True]*len(filter_num)
if dilation is None:
dilation = [1]*len(filter_num)
model = keras.models.Sequential()
model.add(keras.layers.Reshape(self.state_shape, input_shape=self.state_shape))
for i in range(len(filter_num)):
model.add(keras.layers.Conv1D(filter_num[i], kernel_size=filter_size[i], dilation_rate=dilation[i],
activation=activation, use_bias=True))
if use_pool[i]:
model.add(keras.layers.MaxPooling1D(pool_size=2))
model.add(keras.layers.Flatten())
for i in range(len(dense_units)):
model.add(keras.layers.Dense(dense_units[i], activation=activation))
model.add(keras.layers.Dense(self.n_action, activation='linear'))
model.compile(loss='mse', optimizer=keras.optimizers.Adam(lr=learning_rate))
self.model = model
self.model_name = self.qmodel + str([a for a in
zip(filter_num, filter_size, dilation, use_pool)
])+' + '+str(dense_units)
class QModelConvRNN(QModelKeras):
"""
https://keras.io/getting-started/sequential-model-guide/#example
note param doesn't grow with len of sequence
"""
def _build_model(self, RNNLayer, conv_n_hidden, RNN_n_hidden, dense_units, learning_rate,
conv_kernel_size=3, use_pool=False, activation='relu'):
model = keras.models.Sequential()
model.add(keras.layers.Reshape(self.state_shape, input_shape=self.state_shape))
for i in range(len(conv_n_hidden)):
model.add(keras.layers.Conv1D(conv_n_hidden[i], kernel_size=conv_kernel_size,
activation=activation, use_bias=True))
if use_pool:
model.add(keras.layers.MaxPooling1D(pool_size=2))
m = len(RNN_n_hidden)
for i in range(m):
model.add(RNNLayer(RNN_n_hidden[i],
return_sequences=(i<m-1)))
for i in range(len(dense_units)):
model.add(keras.layers.Dense(dense_units[i], activation=activation))
model.add(keras.layers.Dense(self.n_action, activation='linear'))
model.compile(loss='mse', optimizer=keras.optimizers.Adam(lr=learning_rate))
self.model = model
self.model_name = self.qmodel + str(conv_n_hidden) + str(RNN_n_hidden) + str(dense_units)
class QModelConvLSTM(QModelConvRNN):
def init(self):
self.qmodel = 'ConvLSTM'
def build_model(self, conv_n_hidden, RNN_n_hidden, dense_units, learning_rate,
conv_kernel_size=3, use_pool=False, activation='relu'):
Layer = keras.layers.LSTM
self._build_model(Layer, conv_n_hidden, RNN_n_hidden, dense_units, learning_rate,
conv_kernel_size, use_pool, activation)
class QModelConvGRU(QModelConvRNN):
def init(self):
self.qmodel = 'ConvGRU'
def build_model(self, conv_n_hidden, RNN_n_hidden, dense_units, learning_rate,
conv_kernel_size=3, use_pool=False, activation='relu'):
Layer = keras.layers.GRU
self._build_model(Layer, conv_n_hidden, RNN_n_hidden, dense_units, learning_rate,
conv_kernel_size, use_pool, activation)
def load_model(fld, learning_rate):
s = open(os.path.join(fld,'QModel.txt'),'r').read().strip()
qmodels = {
'Conv':QModelConv,
'DenseOnly':QModelMLP,
'MLP':QModelMLP,
'LSTM':QModelLSTM,
'GRU':QModelGRU,
}
qmodel = qmodels[s](None, None)
qmodel.load(fld, learning_rate)
return qmodel
``` |
{
"source": "jonkoi/pytorch-trpo-2",
"score": 2
} |
#### File: jonkoi/pytorch-trpo-2/run_trpo.py
```python
import argparse
import subprocess
from itertools import count
import torch
from tensorboard_logger import configure, log_value
from models import DQNRegressor, DQNSoftmax
from trpo_agent import TRPOAgent
from utils.atari_wrapper import make_atari, wrap_deepmind
def main(env_id):
env = wrap_deepmind(make_atari(env_id), scale=True)
policy_model = DQNSoftmax(env.action_space.n)
value_function_model = DQNRegressor()
agent = TRPOAgent(env, policy_model, value_function_model)
subprocess.Popen(["tensorboard", "--logdir", "runs"])
configure("runs/pong-run")
for t in count():
reward = agent.step()
log_value('score', reward, t)
if t % 100 == 0:
torch.save(policy_model.state_dict(), "policy_model.pth")
if __name__ == "__main__":
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--env', help='environment ID',
default='PongNoFrameskip-v4')
args = parser.parse_args()
main(args.env)
``` |
{
"source": "JonkopingUniversityLibrary/almaapi",
"score": 2
} |
#### File: almaapi/alma_analytics_parser/__init__.py
```python
import xmljson
from xml.etree.ElementTree import fromstring
import re
from collections import OrderedDict
class AlmaAnalyticsException(Exception):
"""Custom docstring"""
class AlmaAnalyticsParser:
def __init__(self, i):
def __parse_analytics__(xml_string):
urn_schema = '{urn:schemas-microsoft-com:xml-analysis:rowset}'
w3_schema = '{http://www.w3.org/2001/XMLSchema}'
raw_data = xmljson.badgerfish.data(fromstring(xml_string))
def __get_column_names__():
snake_case_pattern = re.compile(r'(?<!^)(?=[A-Z])')
column_names = \
raw_data['report']['QueryResult']['ResultXml'][urn_schema + 'rowset'][w3_schema + 'schema'][
w3_schema + 'complexType'][w3_schema + 'sequence'][w3_schema + 'element']
temp_column_names = []
for column in column_names[1:]: # Remove first column since its just the integer
for attribute, attribute_value in column.items():
if attribute == '@{urn:saw-sql}columnHeading':
temp_column_names.append(
snake_case_pattern.sub('_', attribute_value).lower().replace(' ', ''))
return temp_column_names
def __get_rows__():
try:
return raw_data['report']['QueryResult']['ResultXml'][urn_schema + 'rowset'][urn_schema + 'Row']
except KeyError:
return []
column_names = __get_column_names__()
temp_table = []
rows = __get_rows__()
if type(rows) is list: # More than one item in the list
for row in __get_rows__():
row.popitem(last=False) # Remove the integer column
temp_row = OrderedDict()
iter = 0
for column, column_value in row.items():
try:
temp_row[column_names[iter]] = column_value['$']
iter = iter + 1
except ValueError:
raise (AlmaAnalyticsException('Failed to load column number'))
temp_table.append(temp_row)
elif type(rows) is OrderedDict:
rows.popitem(last=False) # Remove the integer column
temp_row = OrderedDict()
iter = 0
for column, column_value in rows.items():
try:
temp_row[column_names[iter]] = column_value['$']
iter = iter + 1
except ValueError:
raise (AlmaAnalyticsException('Failed to load column number'))
temp_table.append(temp_row)
return temp_table
self.list = __parse_analytics__(i)
def get_table(self):
return self.list
def get_column(self, column_name):
temp_list = []
try:
for row in self.list:
temp_list.append(row[column_name])
return temp_list
except IndexError:
return None
``` |
{
"source": "jonkostyniuk/gtfs-viewer",
"score": 2
} |
#### File: jonkostyniuk/gtfs-viewer/flaskapp.py
```python
import flask as fl
#import os #USED??
#from datetime import datetime #USED??
#from flask import Flask, request, flash, url_for, redirect, render_template, abort, send_from_directory
# CUSTOM MODULES
# --------------
import gtfsviewer as gv
# GLOBAL CONSTANT VARIABLES
# -------------------------
# Initialize
# n/a
# Define
# n/a
# DEFINITIONS
# -----------
# Flask Initialize Variables
app = fl.Flask(__name__) # Define Application Object
app.config.from_pyfile("flaskapp.cfg") # Configure Application Object
# Reference Path Variables
#appPath = '/gtfs-viewer/api/v0.1/' # Define Application Path
# ##########################################################################################################
# DEFINED CLASSES AND FUNCTIONS
# ##########################################################################################################
# API CALL FUNCTIONS
# ------------------
# Function to Get GTFS Exchange Feed
@app.route("/api/agencies", methods=["POST"])
def apiAgencies():
return gv.getAgencies(fl.request.json["uuid"])
#return gv.getAgencies(fl.request.args.get("uuid")) ## WORKS FOR 'GET'
# Function to Get Trip Map
@app.route("/api/createmap", methods=["POST"])
def apiCreateMap():
return gv.createTripMap(fl.request.json["uuid"], fl.request.json["agency_id"], fl.request.json["route_id"], fl.request.json["datetime"])
# Function to Get GTFS Exchange Feed
@app.route("/api/gtfs", methods=["GET"])
def apiGTFS():
return gv.getGTFS()
# Function to Get Agency Route Data
@app.route("/api/routes", methods=["POST"])
def apiRoutes():
return gv.getRoutes(fl.request.json["uuid"], fl.request.json["agency_id"])
#return gv.getAgencies(fl.request.args.get("uuid")) ## WORKS FOR 'GET'
# Function to Get Bus Stop Points
@app.route("/api/stops", methods=["POST"])
def apiGetStopPoints():
return gv.getStopPoints(fl.request.json["uuid"], fl.request.json["agency_id"], fl.request.json["bounds"])
# Function to Create Unique User ID
@app.route("/api/uuid", methods=["GET"])
def apiCreateUUID():
return gv.createUUID()
# URL REFERENCE FUNCTIONS
# -----------------------
# Function to Route Index Page
@app.route("/")
def index():
return fl.render_template("index.html")
# Function to Route Static Resources
@app.route("/<path:resource>")
def serveStaticResource(resource):
return fl.send_from_directory("static/", resource)
# ##########################################################################################################
# MAIN PROGRAM
# ##########################################################################################################
# Run Application
if __name__ == "__main__":
app.run()
# ##########################################################################################################
# END OF SCRIPT
# ##########################################################################################################
``` |
{
"source": "jonkracht/fish-bowl-round-counter",
"score": 4
} |
#### File: jonkracht/fish-bowl-round-counter/fish-bowl-rounds-counter-process-data.py
```python
import pandas as pd
def load_data(file_name):
'''Load saved data (in csv form) into a Pandas dataframe.'''
return pd.read_csv(file_name)
def main():
'''Clean up data scraped from PDGA.'''
file_name = 'fish-bowl-rounds-counter-data.csv'
# Load saved data
data = load_data(file_name)
# Make names upper case
data['Name'] = data['Name'].apply(lambda Name: Name.upper())
data = data.sort_values(by='Number')
newData = []
for number in data['Number'].unique():
matches = data[data['Number'] == number]
years = sorted(matches['Year'])
names = list(sorted(matches['Name'].unique()))
counts = len(years)
newData.append([names, number, years, counts])
monkey = pd.DataFrame(newData, columns=['Names', 'Number', 'Years', 'Counts']).sort_values(by='Counts', ascending=False)
print(data.value_counts().head(50))
#print(data['Player Name'].value_counts().head(20))
monkey.to_csv('processed-data.csv')
print('hi')
if __name__ == '__main__':
main()
``` |
{
"source": "jonkracht/Insight-App",
"score": 4
} |
#### File: jonkracht/Insight-App/app.py
```python
import streamlit as st
import numpy as np
import pandas as pd
import geopy
import matplotlib.pyplot as plt
from similarity import rank_courses, convert_df, convert_prefs
def _max_width_():
"""
Increase Streamlit app width to fullscreen.
Fom https://discuss.streamlit.io/t/custom-render-widths/81/8
"""
max_width_str = f"max-width: 2000px;"
st.markdown(
f"""
<style>
.reportview-container .main .block-container{{
{max_width_str}
}}
</style>
""",
unsafe_allow_html=True,
)
def get_driving_time(place_1, place_2, speed = 40):
"""
Compute travel time between two places specified by their longitude/latitude pairs.
"""
from geopy.distance import geodesic
distance = geodesic(place_1, place_2).miles
time = distance/speed
return round(time, 2)
def get_latlon_from_zip(zip_code):
"""
Determine latitude and longitude for a given zip code.
"""
from geopy.geocoders import Nominatim
geolocator = Nominatim(user_agent="LocalRoute", country_bias = 'US')
result = geolocator.geocode({"postalcode": zip_code})
return (result.latitude, result.longitude)
def find_nearby_courses(df, start_zip, max_drive_time):
"""
Update dataframe of courses to only those within a certain distance.
"""
# Cast latitudes/longitudes as tuples
latlong = list(zip(df['latitude'], df['longitude']))
starting_location = get_latlon_from_zip(start_zip)
# st.write(starting_location)
df['time'] = [get_driving_time(starting_location, r) for r in latlong]
df_close = df[df['time'] <= max_drive_time]
return df_close
def get_user_prefs():
"""Query user for their preferences and return results in a dictionary."""
st.subheader('Enter some parameters of your trip.')
st.write('\n')
prefs = {}
st.subheader('Required information:')
prefs['starting_location'] = st.text_input("ZIP code of starting location:")
prefs['max_travel_hours'] = st.selectbox('Maximum drive time between courses [hours]:', ['' ,0.5, 1.0, 1.5, 2.0, 2.5, 3.0])
prefs['n_destinations'] = st.text_input("Number of courses to be played:")
#st.subheader('Optional information:')
if st.checkbox('Show optional parameters'):
hill_map = {'No preference': 'No preference', 'Mostly Flat': 0, 'Moderately Hilly': 1, 'Very Hilly': 2}
wood_map = {'No preference': 'No preference', 'Lightly Wooded': 0, 'Moderately Wooded': 1, 'Heavily Wooded': 2}
difficulty_map = {'No preference': 'No preference', 'Easy': 0, 'Moderate': 1, 'Difficult': 2}
st.sidebar.markdown('Optional parameters:')
prefs['hills'] = hill_map[st.sidebar.selectbox('Hills:', ['No preference', 'Mostly Flat', 'Moderately Hilly', 'Very Hilly'])]
prefs['woods'] = wood_map[st.sidebar.selectbox('Woods:', ['No preference', 'Lightly Wooded', 'Moderately Wooded', 'Heavily Wooded'])]
#prefs['difficulty'] = difficulty_map[st.sidebar.selectbox('Difficulty:', ['No preference', 'Easy', 'Moderate', 'Difficult'])]
prefs['difficulty'] = 'No preference'
diff = st.sidebar.selectbox('Difficulty:', ['No preference', 'Easy', 'Moderate', 'Difficult'])
#prefs['max_length'] = st.text_input("Maximum length course to play:")
#prefs['max_length'] = st.selectbox("Maximum length course to play:", ['No preference', '3000', '6000', '9000'])
#submit = st.button('Continue')
#if submit:
if is_user_inputs_populated(prefs):
return prefs
else:
st.write('Please input additional information.')
return None
def find_next_course(df, user_prefs, visited_courses, current_location):
"""
Determine next course to visit.
"""
df_nearby = find_nearby_courses(df, current_location, user_prefs['max_travel_hours'])
df_nearby_ranked = rank_courses(df_nearby, user_prefs)
# Check if recommendation is already among those visited
while df_nearby_ranked.iloc[0, :]['dgcr_id'] in visited_courses:
df_nearby_ranked = df_nearby_ranked.iloc[1:, :]
return df_nearby_ranked.iloc[0, :]['dgcr_id']
def is_user_inputs_populated(user_prefs):
"""
Takes a dictionary of user preferences and returns a Boolean whether all inputs are filled.
"""
return all(value != '' or value != 'No preference' for value in user_prefs.values())
###############################################################
def main():
from geopy.geocoders import Nominatim
geolocator = Nominatim(user_agent="LocalRoute")
# Forces app to full screen width, if desired
#_max_width_()
file_name ='all_courses_database_processed.plk'
df = pd.read_pickle(file_name)
df_original = df
st.title('LocalRoute')
st.header('Planning the ideal disc golf road trip')
#st.write('\n')
# Obtain user preferences
user_prefs = get_user_prefs()
visited_courses = []
submit = st.button('Continue')
if is_user_inputs_populated(user_prefs) and submit:
# Print starting location; encounters with accuracy of databased and temporarily removed
#st.subheader('\n\n\nRouting from ' + geolocator.geocode(user_prefs['starting_location'], country_codes='US').address)
current_location = user_prefs['starting_location']
all_destinations = pd.DataFrame()
with st.spinner('**Computing route**'):
for i in range(int(user_prefs['n_destinations'])):
#visited_courses.append(find_next_course(df, user_prefs, visited_courses, current_location))
try:
visited_courses.append(find_next_course(df, user_prefs, visited_courses, current_location))
except:
st.write('Failed to reach sufficient destinations. Displaying partial route.')
break
destination = df_original[df_original['dgcr_id'] == visited_courses[-1]]
all_destinations = pd.concat([all_destinations, destination], ignore_index = True)
current_location = list(destination['postal_code'])[0]
st.subheader('\nYour LocalRoute:')
plot_df = all_destinations[['latitude', 'longitude']]
st.deck_gl_chart(
viewport = {
'latitude': plot_df['latitude'].mean(),
'longitude': plot_df['longitude'].mean(),
'zoom': 8,
'angle': 0
},
layers = [ # STARTING LOCATION
{'type': 'ScatterplotLayer',
'data': pd.DataFrame(get_latlon_from_zip(user_prefs['starting_location']), index = ['latitude', 'longitude']).T,
'radiusScale': 10,
'radiusMinPixels': 10,
'getFillColor': [238, 0, 0],
'extruded': True,
'pickable': True,
},
{ # VISITED COURSES
'type': 'ScatterplotLayer',
'data': plot_df,
'radiusScale': 20,
'radiusMinPixels': 20,
'getFillColor': [113, 179, 255],
'extruded': True
},
{ # OTHER COURSES
'type': 'ScatterplotLayer',
'data': df,
'radiusScale': 5,
'radiusMinPixels': 5,
'getFillColor': [112, 131, 203],
'opacity': .1,
'extruded': True
}
])
cols_to_display = ['name', 'locality', 'region', 'holes', 'rating', 'woods', 'hills']
st.table(all_destinations[cols_to_display])
return
if __name__ == '__main__':
main()
``` |
{
"source": "jonkracht/jon-insight-project",
"score": 3
} |
#### File: jon_insight_project/features/process_raw_data.py
```python
import pandas as pd
import numpy as np
import csv
# Load json file of scraped data
#file_name, save_name = '/home/jon/PycharmProjects/jon-insight-project/jon_insight_project/data/pa_course_database.json', 'pa_course_database_processed'
file_name , save_name = '/home/jon/PycharmProjects/jon-insight-project/jon_insight_project/data/all_course_database.json', 'all_courses_database_processed'
df = pd.read_json(file_name)
# Reform website 'url' into 'dgcr_id' (for brevity, mainly)
df['dgcr_id'] = df['url'].str.split('id=', expand = True).iloc[:, 1]
#df = df.drop(columns = ['url'])
# Create 'hills' and 'woods' numerical features from 'landscape' categorical feature
landscape_split = df['landscape'].str.split("&", expand = True)
df['hills'], df['woods'] = landscape_split[0], landscape_split[1]
hill_dict = {"Mostly Flat": 0, "Moderately Hilly": 1, "Very Hilly": 2}
wood_dict = {"Lightly Wooded": 0, "Moderately Wooded": 1, "Heavily Wooded": 2}
df['hills'] = df['hills'].str.strip().map(hill_dict)
df['woods'] = df['woods'].str.strip().map(wood_dict)
# Recast 'multiple_tees_pins' into Boolean 'multiple_layouts' feature
df['multiple_layouts'] = df['multiple_tees_pins'].str.contains('Yes', regex = False)
# Address missing values
# Fill missing 'year_established'
# df['year_established'].replace('--', -666, inplace=True)
# df['year_established'] = df['year_established'].astype(float)
# df['year_established'].replace(-666, np.nan, inplace=True)
#df['year_established'].replace('--', '', inplace=True)
df['year_established'].replace('--', np.nan, inplace=True)
# Fill missing 'rating'
# df['rating'].replace('', -666, inplace=True)
# df['rating'] = df['rating'].astype(float)
# df['rating'].replace(-666, np.nan, inplace=True)
df['rating'].replace('', np.nan, inplace = True)
# Fill missing 'rating_count'
# df['rating_count'].replace('', -666, inplace=True)
# df['rating_count'].replace('--', -666, inplace=True)
# df['rating_count'] = df['rating_count'].astype(float)
# df['rating_count'].replace(-666, np.nan, inplace=True)
df['rating_count'].replace('--', np.nan, inplace=True)
# Fill missing 'par'
# df['par'].replace('', -666, inplace=True)
# df['par'].replace('--', -666, inplace=True)
# df['par'] = df['par'].astype(float)
# df['par'].replace(-666, np.nan, inplace=True)
df['par'].replace('--', np.nan, inplace=True)
# Fill missing "sse"
# df['sse'].replace('', -666, inplace=True)
# df['sse'].replace('--', -666, inplace=True)
# df['sse'] = df['sse'].astype(float)
# df['sse'].replace(-666, np.nan, inplace=True)
df['sse'].replace('--', np.nan, inplace=True)
# Compress 'length' data:
def largestNumber(in_str):
l=[int(x) for x in in_str.split() if x.isdigit()]
return max(l) if l else ''
new_lengths = []
for entry in df['length']:
new_lengths.append(largestNumber(entry))
df['length'] = new_lengths
# df['length'] = df['length'].astype(float)
# df['length'].replace(-666, np.nan, inplace=True)
df['length'].replace('--',np.nan, inplace = True)
df['length'].replace('n/a', np.nan, inplace = True)
df['par'].replace('--', np.nan, inplace=True)
# Compress holes data:
new_holes = []
for entry in df['holes']:
new_holes.append(largestNumber(entry))
#df['holes'] = new_holes
#df['holes'] = df['holes'].astype(float)
#df['holes'].replace(-666, np.nan, inplace=True)
df['holes'] = new_holes
# Correct (single) incorrect longitude value
df.loc[df['longitude'] > 0, 'longitude'] *= -1
# Save list of lat/long coordinates
x = []
for pos_x, pos_y in zip(df['longitude'].values, df['latitude'].values):
x.append((pos_x, pos_y))
with open("pa_positions.txt", 'w') as f:
for s in x:
f.write(str(s) + '\n')
# Create column estimating the time it takes to play the course
m = 3/(10742 - 2810)
b = 5 - 10742*m
df['length'] = pd.to_numeric(df['length'])
df['hours_to_play'] = round(df['length'].astype(float) * m + b,2)
# Remove missing data
df = df.loc[~df.isnull().any(axis=1), :]
df = df.loc[~(df['length'] == ''), :]
# Cast columns in correct form
df['hills'] = pd.to_numeric(df['hills']).astype(int)
df['woods'] = pd.to_numeric(df['woods']).astype(int)
df['rating_count'] = pd.to_numeric(df['rating_count']).astype(int)
df['length'] = pd.to_numeric(df['length']).astype(int)
df['year_established'] = pd.to_numeric(df['year_established']).astype(int)
# Compute a 'difficulty' metric
df['difficulty'] = (df['sse'] - pd.to_numeric(df['par']).astype(float)) / df['holes']
# Eliminate Alaska, Hawaii, and Saipan for simplicity
df = df[~df['region'].isin(['AK', 'HI', 'MP'])]
col_ordering = ['name', 'locality', 'region', 'postal_code', 'latitude', 'longitude',
'dgcr_id', 'year_established', 'tee_type', 'basket_type',
'holes', 'length', 'hours_to_play', 'multiple_layouts',
'hills', 'woods',
'par', 'sse', 'difficulty',
'rating', 'rating_count']
# Remove courses with 0 for latitude/longitude:
df = df[abs(df['longitude']) > 0.01]
# Remove length outlier (Princeton Country Club, IN)
if file_name == '/home/jon/PycharmProjects/jon-insight-project/jon_insight_project/data/all_course_database.json':
df = df.sort_values('length', ascending=False).iloc[1:]
df2 = df[col_ordering]
# Save processed dataframe as a .plk
df2.to_pickle(save_name + '.plk')
# Save via csv
df2.to_csv('all_courses_database_processed.csv')
print('Finished.')
``` |
{
"source": "jonkracht/pdga-tournament-analysis",
"score": 3
} |
#### File: jonkracht/pdga-tournament-analysis/pdgaTournamentScraperFunctions.py
```python
import requests
from bs4 import BeautifulSoup
def queryEventId(baseURL):
'''Query user for index of tournament number of interest. Check with number matches tournament name.'''
while True:
eventID = input('Enter index of tournament for which data is to be scraped:\n> ')
url = baseURL + eventID
r = requests.get(url)
soup = BeautifulSoup(r.text, 'html.parser')
tournamentName = soup.find(id='page-title').get_text()
tournamentDate = soup.find(class_='event-info info-list').find(class_='tournament-date').get_text().split()[1]
printBanner(f'Details for event number {eventID}')
print(f'Name: {tournamentName}\nDate: {tournamentDate}')
print('\nIs this the event of interest? (\'y\' for yes)')
response = input('> ')
if response == 'y':
break
return eventID
def printBanner(text):
'''Standardized method to print text to std out.'''
char, width, gap = '*', 80, 3
leftFill = int((width - len(text) - 2 * gap) / 2)
rightFill = width - leftFill - len(text) - 2 * gap
print('\n' + width * char)
print(leftFill * char + gap * ' ' + text + gap * ' ' + rightFill * char)
print(width * char)
return
def scrapePdgaData(url):
'''Function that takes in a PDGA tournament url and outputs two lists: tournament description and propagator information'''
import re
printBanner('Scraping Data')
# Pull html from website
r = requests.get(url)
# Format html
soup = BeautifulSoup(r.text, 'html.parser')
# Details of the event
eventInfo = {}
eventInfo['Name'] = soup.find("meta", property='og:title')['content']
# Locate tag in html which contains tournament eventInfo
infoTag = soup.find(class_='event-info info-list')
eventInfo['Date'] = infoTag.find(class_='tournament-date').text.split(' ', 1)[1]
eventInfo['Location'] = infoTag.find(class_='tournament-location').text.split(' ', 1)[1]
eventInfo['TD'] = infoTag.find(class_='tournament-director').text.split(' ', 2)[2]
eventInfo['nRounds'] = int(list(soup.findAll('th', text=re.compile('Rd'))[-1].text)[-1])
eventData = {}
# Examine all table rows in the document;
for tag in soup.select('tbody > tr'):
# Check whether table row include player info
if tag.find(class_='player')!=None:
name = tag.find(class_='player').text
# Handle missing information (PDGA numbers or ratings)
try:
pdgaNumber = tag.find(class_='pdga-number').text
except:
pdgaNumber = None
try:
playerRating = tag.find(class_='player-rating').text
except:
playerRating = None
if tag.find(class_='propagator') == None:
isPropagator = 0
else:
isPropagator = 1
# Input round scores and ratings
roundScores, roundRatings = [], []
for r in tag.findAll(class_='round'):
roundScores.append(r.text)
roundRatings.append(r.findNext(class_='round-rating').text)
# Store info in a dictionary
playerInfoDict = {}
playerInfoDict['PDGA Number'] = pdgaNumber
playerInfoDict['Player Rating'] = playerRating
playerInfoDict['Propagator'] = isPropagator
playerInfoDict['Round Scores'] = roundScores
playerInfoDict['Round Ratings'] = roundRatings
eventData[name] = playerInfoDict
print('\nCompleted scraping.')
return eventInfo, eventData
def printData(data, info):
'''Prints event info and data to stdout.'''
# Query user if data is to be printed to debug
view = str(input('\nWould you like to view the data? (Type \'y\' for yes.) '))
if view == 'y':
printBanner('Event Details')
for key, val in info.items():
print(f"{key+':':<35} {val}")
printBanner('Event Data')
for key, val in data.items():
print(f"{key+ ':':<25}{list(val.values())}")
return
def saveData(data, info, eventNumber):
'''Save scraped data via json'''
import json
# Create dictionary with two keys: (event) info and data
newDict = {}
newDict['info'] = info
newDict['data'] = data
savename = 'pdgaEvent' + str(eventNumber)
outpath = './tournament-data/' + savename + '.json'
with open(outpath, "w") as f:
f.write(json.dumps(newDict))
print('Data saved to ' + outpath)
return
if __name__ == '__main__':
'''A few function calls to test behavior'''
baseURL = 'https://www.pdga.com/tour/event/'
#ID = queryEventId(baseURL)
scrapePdgaData(baseURL + '40638')
#sampInfo = {'Name': '<NAME>', 'Date': '20-Jul-2019', 'Location': 'Sellersville, Pennsylvania, United States', 'TD': 'Dustin Leatherman'}
#sampData = {'<NAME>': {'PDGA Number': '58320', 'Propagator': True, 'Round Scores': ['51', '54'], 'Round Ratings': ['1058', '1059']}, '<NAME>': {'PDGA Number': '16287', 'Propagator': True, 'Round Scores': ['55', '59'], 'Round Ratings': ['1019', '1018']}}
#printData(sampData, sampInfo)
print('Finished')
``` |
{
"source": "jonkracht/wordle",
"score": 4
} |
#### File: jonkracht/wordle/wordleFunctions.py
```python
def printBanner(text):
'''Standardized means of printing information to stdout.'''
bannerMarker = '*'
bannerWidth = 80
gap = 3
leftFill = int((bannerWidth - len(text) - 2 * gap)/2)
rightFill = bannerWidth - leftFill - len(text) - 2*gap
print('\n')
print(bannerWidth * bannerMarker)
print(leftFill * bannerMarker + gap * ' ' + text + gap * ' ' + rightFill * bannerMarker)
print(bannerWidth * bannerMarker)
return()
def displayIntroduction():
'''Prints explanation of Wordle and this solver.'''
printBanner('Wordle Solver')
print('\nEncoding scheme:')
print(f"{'Match type':<25} {'Color':<20}{'Encoding'}")
print(f"{'----------':<25} {'-----':<20}{'--------'}")
print(f"{'Exact':<25} {'Green':<20}{'2'}")
print(f"{'Inexact':<25} {'Yellow':<20}{'1'}")
print(f"{'No match':<25} {'Gray':<20}{'0'}")
print('\nExample: \nIf the letters of a guess are colored (yellow, green, gray, gray, yellow),')
print('the score to input into the solver should be \'12001\'.')
return
def loadWordList():
'''Load a list of words over which solving occurs.'''
with open('wordleWordList.txt', 'r') as open_file:
wordList = open_file.read().split()
return wordList
def scoreGuess(guess, answer):
'''Return a Wordle score for an input guessed word and answer.
Encoding: exact match is scored as '2', inexact match is '1', and miss is '0'.'''
guessList, answerList = list(guess), list(answer)
remainingAnswerLetters = list(answer)
score = len(guessList) * [0]
# Find exact matches
for i in range(len(guessList)):
if guess[i] == answer[i]:
score[i] = 2
remainingAnswerLetters.remove(guess[i])
# Find inexact matches
for i in range(len(guessList)):
if score[i] != 2 and guessList[i] in remainingAnswerLetters:
score[i] = 1
remainingAnswerLetters.remove(guessList[i])
return score
def queryUser():
'''Input guessed word and score. Enforce proper input type.'''
# Input word guessed
while True:
guess = str(input('\nWord guessed (or \'q\' to quit):\n>> ')).lower()
if guess == 'q':
printBanner('Exiting Wordle Solver. Byeee.')
exit()
# Verify guess is of the appropriate form (5 letters)
if len(list(guess)) == 5:
break
print('Incorrect number of letters. Try again.')
# Input score: Check input length and either 0, 1, 2
while True:
score = input('Score of the guess:\n>> ')
score = [int(a) for a in list(score)] # convert string to list
if len(score) == 5 and False not in [x in [0, 1, 2] for x in score]:
break
print('\nNot a possible Wordle score. Try again.')
return guess, score
def refineWordList(wordList, guess, score):
'''Remove entries of a word list which would not produce a deisred score for a guessed word.'''
return [w for w in wordList if scoreGuess(guess, w) == score]
def wordsContainLetters(wordList, letters):
'''Returns words in wordList that contain desired letters.'''
newList = []
letters = list(letters)
for word in wordList:
hasAllLetters = True
for l in letters:
if l not in word:
hasAllLetters = False
if hasAllLetters:
newList.append(word)
return newList
def nextGuessHelper():
'''For a set of letters input by user, provide candidate words for next guess.'''
printBanner('Next Guess Helper')
print('\nEnter letters (or \'c\' to continue to next guess):')
while True:
letters = input('> ')
if letters == 'c':
break
else:
newList = wordsContainLetters(loadWordList(), letters)
print(f'\n{len(newList)} possible words from the letters \'{letters}\': \n')
printWordList(newList)
return
def letterCount(wordlist):
'''Given a list of words, compute letter frequency'''
from collections import Counter, OrderedDict
letterCount = Counter(''.join(wordlist))
return OrderedDict(letterCount.most_common())
def updateKnownLetters(guess, score, placedList, unplacedList):
'''Monkies'''
guessList, scoreList = list(guess), list(score)
remainingLetters = list(guess)
# Check for exact matches
for i in range(len(guessList)):
if scoreList[i] == 2:
placedList[i] = guessList[i]
remainingLetters.remove(guessList[i])
# Check for inexact matches
for i in range(len(guessList)):
if scoreList[i] == 1 and guessList[i] in remainingLetters:
unplacedList.append(guessList[i])
remainingLetters.remove(guessList[i])
return placedList, unplacedList
def printWordList(wordList):
''''''
width, gap = 10, 3
while len(wordList) > 0:
string = ''
if len(wordList) >= width:
temp = wordList[:width]
else:
temp = wordList
for t in temp:
string += t + gap * ' '
print(string)
wordList = wordList[width:]
return
if __name__ == '__main__':
# Some sample function calls to test functionality
#print(scoreGuess('lymph', 'crimp'))
#guess, score = queryUser()
#c = refineWordList(loadWordList(), 'beast', [0,2,2,2,2])
#print(c)
#print(wordsContainLetters(loadWordList(), 'bhm'))
#nextGuessHelper()
#print(letterCount(['cat', 'cot']))
displayIntroduction()
print('Finished')
``` |
{
"source": "JonKramme/Python-script-collection",
"score": 4
} |
#### File: JonKramme/Python-script-collection/binary_tree_basic_traversal_inclass.py
```python
class Node:
def __init__(self,val):
self.data = val
self._left = None
self._right = None
def left(self):
return self._left
def right(self):
return self._right
def insert_left(self, val):
if self._left is None:
self._left = Node(val)
return -1
def insert_right(self, val):
if self._right is None:
self._right = Node(val)
return -1
def preorder_query(self):
print(self.data)
if self._left is not None:
self._left.preorder_query()
if self._right is not None:
self._right.preorder_query()
def inorder_query(self):
if self._left is not None:
self._left.inorder_query()
print(self.data)
if self._right is not None:
self._right.inorder_query()
def postorder_query(self):
if self._left is not None:
self._left.postorder_query()
if self._right is not None:
self._right.postorder_query()
print(self.data)
def breadth_first_query(self):
height = self.height(self)
for i in range(1, height+1):
self.print_curr_level(self,i)
def print_curr_level(self,node,level):
if node is None:
return
if level == 1:
print(node.data)
elif level > 1:
self.print_curr_level(node.left(),level-1)
self.print_curr_level(node.right(),level-1)
def height(self,node):
if node is None:
return 0
else:
lheight = node.height(node._left)
rheight = node.height(node._right)
return lheight+1 if lheight > rheight else rheight+1
#Test Driver-code
N = Node(25)
N.insert_left(15)
N.left().insert_left(10)
N.left().left().insert_left(4)
N.left().left().insert_right(12)
N.left().insert_right(22)
N.left().right().insert_left(18)
N.left().right().insert_right(24)
N.insert_right(50)
N.right().insert_left(35)
N.right().left().insert_left(31)
N.right().left().insert_right(44)
N.right().insert_right(70)
N.right().right().insert_left(66)
N.right().right().insert_right(90)
#N.postorder_query()
``` |
{
"source": "jonkrohn/SLM-Lab",
"score": 3
} |
#### File: agent/algorithm/random.py
```python
from slm_lab.agent.algorithm.base import Algorithm
import numpy as np
class Random(Algorithm):
'''
Example Random agent that works in both discrete and continuous envs
'''
def post_body_init(self):
'''Initializes the part of algorithm needing a body to exist first.'''
pass
def body_act_discrete(self, body, state):
'''Random discrete action'''
action = np.random.randint(body.action_dim)
return action
def body_act_continuous(self, body, state):
'''Random continuous action'''
action = np.random.randn(body.action_dim)
return action
def train(self):
loss = np.nan
return loss
def update(self):
explore_var = np.nan
return explore_var
```
#### File: agent/net/net_util.py
```python
from functools import partial
import pydash as _
import torch
import torch.nn as nn
import torch.nn.functional as F
from slm_lab.lib import logger
def get_activation_fn(activation):
'''Helper to generate activation function layers for net'''
layer = None
if activation == 'sigmoid':
layer = nn.Sigmoid()
elif activation == 'lrelu':
layer = nn.LeakyReLU(negative_slope=0.05)
elif activation == 'tanh':
layer = nn.Tanh()
elif activation == 'relu':
layer = nn.ReLU()
else:
logger.debug("No activation fn or unrecognised activation fn")
layer = nn.ReLU()
return layer
def get_loss_fn(cls, loss_param):
'''Helper to parse loss param and construct loss_fn for net'''
loss_param = loss_param or {}
loss_fn = getattr(F, _.get(loss_param, 'name', 'mse_loss'))
loss_param = _.omit(loss_param, 'name')
if not _.is_empty(loss_param):
loss_fn = partial(loss_fn, **loss_param)
return loss_fn
def get_optim(cls, optim_param):
'''Helper to parse optim param and construct optim for net'''
optim_param = optim_param or {}
OptimClass = getattr(torch.optim, _.get(optim_param, 'name', 'Adam'))
optim_param = _.omit(optim_param, 'name')
optim = OptimClass(cls.parameters(), **optim_param)
return optim
def get_optim_multinet(params, optim_param):
'''Helper to parse optim param and construct optim for net'''
optim_param = optim_param or {}
OptimClass = getattr(torch.optim, _.get(optim_param, 'name', 'Adam'))
optim_param.pop('name', None)
optim = OptimClass(params, **optim_param)
return optim
def flatten_params(net):
'''Flattens all of the parameters in a net
Source: https://discuss.pytorch.org/t/running-average-of-parameters/902/2'''
return torch.cat([param.data.view(-1) for param in net.parameters()], 0)
def load_params(net, flattened):
'''Loads flattened parameters into a net
Source: https://discuss.pytorch.org/t/running-average-of-parameters/902/2'''
offset = 0
for param in net.parameters():
param.data.copy_(
flattened[offset:offset + param.nelement()]).view(param.size())
offset += param.nelement()
return net
def init_layers(layers, layer_type):
'''
Initializes all of the layers of type 'Linear' or 'Conv' using xavier uniform initialization for the weights and 0.01 for the biases
Initializes all layers of type 'BatchNorm' using univform initialization for the weights and the same as above for the biases
'''
biasinit = 0.01
for layer in layers:
classname = layer.__class__.__name__
if classname.find(layer_type) != -1:
if layer_type == 'BatchNorm':
torch.nn.init.uniform(layer.weight.data)
else:
torch.nn.init.xavier_uniform(layer.weight.data)
layer.bias.data.fill_(biasinit)
```
#### File: SLM-Lab/test/conftest.py
```python
from slm_lab.agent import AgentSpace
from slm_lab.agent.memory import Replay
from slm_lab.agent.net.convnet import ConvNet
from slm_lab.agent.net.feedforward import MLPNet, MultiMLPNet
from slm_lab.env import EnvSpace
from slm_lab.experiment.control import Trial
from slm_lab.experiment.monitor import AEBSpace
from slm_lab.lib import util
from slm_lab.spec import spec_util
from torch.autograd import Variable
import pandas as pd
import pytest
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
spec = None
aeb_space = None
agent = None
env = None
@pytest.fixture(scope='session')
def test_spec():
global spec
spec = spec_util.get('base.json', 'base_case')
spec['meta']['train_mode'] = True
return spec
# TODO properly use in tests
@pytest.fixture(scope='session')
def test_session(test_spec):
trial = Trial(test_spec)
session = trial.init_session()
yield session
session.close()
@pytest.fixture(scope='session')
def test_aeb_space(test_spec):
global aeb_space
if aeb_space is None:
aeb_space = AEBSpace(test_spec)
env_space = EnvSpace(test_spec, aeb_space)
agent_space = AgentSpace(test_spec, aeb_space)
aeb_space.init_body_space()
return aeb_space
@pytest.fixture(scope='session')
def test_agent(test_aeb_space):
agent = test_aeb_space.agent_space.agents[0]
return agent
@pytest.fixture(scope='session')
def test_env(test_aeb_space):
env = test_aeb_space.env_space.envs[0]
return env
@pytest.fixture
def test_df():
data = pd.DataFrame({
'integer': [1, 2, 3],
'square': [1, 4, 9],
'letter': ['a', 'b', 'c'],
})
assert isinstance(data, pd.DataFrame)
return data
@pytest.fixture
def test_dict():
data = {
'a': 1,
'b': 2,
'c': 3,
}
assert isinstance(data, dict)
return data
@pytest.fixture
def test_list():
data = [1, 2, 3]
assert isinstance(data, list)
return data
@pytest.fixture
def test_obj():
class Foo:
bar = 'bar'
return Foo()
@pytest.fixture
def test_str():
data = 'lorem ipsum dolor'
assert isinstance(data, str)
return data
@pytest.fixture
def test_multiline_str():
data = '''
lorem ipsum dolor
sit amet
consectetur adipiscing elit
'''
assert isinstance(data, str)
return data
@pytest.fixture(scope="class", params=[
(
MLPNet,
{
'in_dim': 10, 'hid_dim': [5, 3],
'out_dim':2,
'hid_layers_activation': 'tanh',
},
None,
2
), (
MLPNet,
{
'in_dim': 20, 'hid_dim': [10, 50, 5],
'out_dim':2, 'hid_layers_activation': 'tanh',
},
None,
2
), (
MLPNet,
{
'in_dim': 10, 'hid_dim': [],
'out_dim':5, 'hid_layers_activation': 'tanh',
},
None,
2
), (
ConvNet,
{
'in_dim': (3, 32, 32),
'hid_layers': ([],
[]),
'out_dim': 10,
'optim_param':{'name': 'Adam'},
'loss_param': {'name': 'mse_loss'},
'clamp_grad': False,
'batch_norm': False,
},
None,
2
), (
ConvNet,
{
'in_dim': (3, 32, 32),
'hid_layers': ([[3, 16, (5, 5), 2, 0, 1],
[16, 32, (5, 5), 2, 0, 1]],
[100]),
'out_dim': 10,
'optim_param':{'name': 'Adam'},
'loss_param': {'name': 'mse_loss'},
'clamp_grad': False,
'batch_norm': False,
},
None,
2
), (
ConvNet,
{
'in_dim': (3, 32, 32),
'hid_layers': ([[3, 16, (5, 5), 2, 0, 1],
[16, 32, (5, 5), 2, 0, 1]],
[100, 50]),
'out_dim': 10,
'optim_param': {'name': 'Adam'},
'loss_param': {'name': 'mse_loss'},
'clamp_grad': False,
'batch_norm': True,
},
None,
2
), (
ConvNet,
{
'in_dim': (3, 32, 32),
'hid_layers': ([[3, 16, (5, 5), 2, 0, 1],
[16, 32, (5, 5), 1, 0, 1],
[32, 64, (5, 5), 1, 0, 2]],
[100]),
'out_dim': 10,
'optim_param':{'name': 'Adam'},
'loss_param': {'name': 'mse_loss'},
'clamp_grad': True,
'batch_norm': False,
},
None,
2
), (
ConvNet,
{
'in_dim': (3, 32, 32),
'hid_layers': ([[3, 16, (5, 5), 2, 0, 1],
[16, 32, (5, 5), 1, 0, 1],
[32, 64, (5, 5), 1, 0, 2]],
[100]),
'out_dim': 10,
'optim_param':{'name': 'Adam'},
'loss_param': {'name': 'mse_loss'},
'clamp_grad': True,
'batch_norm': True,
},
None,
2
), (
ConvNet,
{
'in_dim': (3, 32, 32),
'hid_layers': ([[3, 16, (7, 7), 1, 0, 1],
[16, 32, (5, 5), 1, 0, 1],
[32, 64, (3, 3), 1, 0, 1]],
[100, 50]),
'out_dim': 10,
'optim_param':{'name': 'Adam'},
'loss_param': {'name': 'mse_loss'},
'clamp_grad': False,
'batch_norm': False,
},
None,
2
), (
ConvNet,
{
'in_dim': (3, 32, 32),
'hid_layers': ([[3, 16, (7, 7), 1, 0, 1],
[16, 32, (5, 5), 1, 0, 1],
[32, 64, (3, 3), 1, 0, 1]],
[100, 50]),
'out_dim': 10,
'optim_param':{'name': 'Adam'},
'loss_param': {'name': 'mse_loss'},
'clamp_grad': False,
'batch_norm': True,
},
None,
2
), (
MultiMLPNet,
{
'in_dim': [[5, 10], [8, 16]],
'hid_dim': [64],
'out_dim': [[3], [2]],
'hid_layers_activation': 'tanh',
'optim_param':{'name': 'Adam'},
'loss_param': {'name': 'mse_loss'},
},
None,
2
), (
MultiMLPNet,
{
'in_dim': [[5, 10], [8, 16]],
'hid_dim': [],
'out_dim': [[3], [2]],
'hid_layers_activation': 'tanh',
'optim_param':{'name': 'Adam'},
'loss_param': {'name': 'mse_loss'},
},
None,
2
), (
MultiMLPNet,
{
'in_dim': [[5, 10], [8, 16]],
'hid_dim': [],
'out_dim': [[5, 3], [8, 2]],
'hid_layers_activation': 'tanh',
'optim_param':{'name': 'Adam'},
'loss_param': {'name': 'mse_loss'},
},
None,
2
), (
MultiMLPNet,
{
'in_dim': [[5, 10, 15], [8, 16]],
'hid_dim': [],
'out_dim': [[5, 3], [12, 8, 2]],
'hid_layers_activation': 'tanh',
'optim_param':{'name': 'Adam'},
'loss_param': {'name': 'mse_loss'},
},
None,
2
), (
MultiMLPNet,
{
'in_dim': [[5, 10, 15], [8, 16]],
'hid_dim': [32, 64],
'out_dim': [[5, 3], [12, 8, 2]],
'hid_layers_activation': 'tanh',
'optim_param':{'name': 'Adam'},
'loss_param': {'name': 'mse_loss'},
},
None,
2
), (
MultiMLPNet,
{
'in_dim': [[5, 10], [8, 16, 24]],
'hid_dim': [32, 64],
'out_dim': [[9, 6, 3], [2]],
'hid_layers_activation': 'tanh',
'optim_param':{'name': 'Adam'},
'loss_param': {'name': 'mse_loss'},
},
None,
2
),
])
def test_nets(request):
net = request.param[0](**request.param[1])
res = (net,) + request.param[2:]
return res
@pytest.fixture(scope="class", params=[(None, None)])
def test_data_gen(request):
return request.param
@pytest.fixture(scope="session", params=[
((5, 1, 1),
2,
[[1, 1, 1, 2, 0], [2, 2, 2, 3, 0], [3, 3, 3, 4, 0], [4, 4, 4, 5, 0],
[5, 5, 5, 6, 0], [6, 6, 6, 7, 0], [7, 7, 7, 8, 0], [8, 8, 8, 9, 0],
[9, 9, 9, 10, 0], [10, 10, 10, 11, 0], [11, 11, 11, 0, 1]]),
((8, 3, 2),
3,
[[[1, 1, 1], [1, 1], 1, [2, 2, 2], 0],
[[2, 2, 2], [2, 2], 2, [3, 3, 3], 0],
[[3, 3, 3], [3, 3], 3, [4, 4, 4], 0],
[[4, 4, 4], [4, 4], 4, [5, 5, 5], 0],
[[5, 5, 5], [5, 5], 5, [6, 6, 6], 0],
[[6, 6, 6], [6, 6], 6, [7, 7, 7], 0],
[[7, 7, 7], [7, 7], 7, [8, 8, 8], 0],
[[8, 8, 8], [8, 8], 8, [9, 9, 9], 0],
[[9, 9, 9], [9, 9], 9, [10, 10, 10], 0],
[[10, 10, 10], [10, 10], 10, [11, 11, 11], 0],
[[11, 11, 11], [11, 11], 11, [0, 0, 0], 1]])])
def test_memory(request, test_agent):
max_size, state_dim, action_dim = request.param[0]
batch_size = request.param[1]
experiences = request.param[2]
body = test_agent.bodies[0]
body.max_size = max_size
body.state_dim = state_dim
body.action_dim = action_dim
memory = Replay(test_agent)
memory.post_body_init()
return [memory, batch_size, experiences]
``` |
{
"source": "JonKruger/checkers",
"score": 3
} |
#### File: ai/games/ai_game.py
```python
from ai.players.random_ai_player import RandomAIPlayer
from checkers.game import Game
import random
class AIGame:
def __init__(self, black_player, white_player, verbose=False):
self._black = black_player
self._white = white_player
self._game = Game()
self._verbose = verbose
def play(self):
while self._game.is_over() == False:
if self._verbose:
print(f"It's {'Black' if self._game.whose_turn() == 1 else 'White'}'s turn")
moves = self._game.get_possible_moves()
move = self.current_turn_player().select_move(self._game, moves)
if self._verbose:
print(f'Moving {move}...')
self._game = self._game.move(move)
if self._verbose:
print(f'Pieces remaining: {self._game.get_uncaptured_pieces()}')
if self._verbose:
print(f"Winner is {'Black' if self._game.get_winner() == 1 else 'White'}!")
print(self._game.moves)
return self._game
def current_turn_player(self):
return self._black if self._game.whose_turn() == 1 else self._white
def current_state(self):
return self._game
```
#### File: checkers/checkers/game.py
```python
from copy import deepcopy
from .board import Board
class Game:
CONSECUTIVE_NONCAPTURE_MOVE_LIMIT = 40
def __init__(self, width=4, height=8, rows_per_user_with_pieces=3, board=None):
self.board = board or Board(width, height, rows_per_user_with_pieces)
self.moves = []
self.moves_since_last_capture = 0
self.previous_state = None
self._possible_next_states = None
self._winner = None
self._is_draw = False
# Overriding this so that deepcopy doesn't include previous_state when copying
def __deepcopy__(self, memo):
board = deepcopy(self.board, memo)
copy = Game(board=board)
copy.moves = deepcopy(self.moves, memo)
copy.moves_since_last_capture = self.moves_since_last_capture
copy._winner = self._winner
copy._is_draw = self._is_draw
# Don't copy cached values
copy.previous_state = None
copy._possible_next_states = None
return copy
def move(self, move):
assert type(move) == list and len(move) == 2, move
if move not in self.get_possible_moves():
raise ValueError('The provided move is not possible')
copy = deepcopy(self)
copy.previous_state = self
copy.board.move(move)
copy.moves.append(move)
copy.moves_since_last_capture = 0 if copy.board.previous_move_was_capture else copy.moves_since_last_capture + 1
copy._determine_result()
return copy
def resign(self, resigning_player=None):
copy = deepcopy(self)
copy.previous_state = self
if resigning_player is None:
resigning_player = copy.whose_turn()
copy._winner = 1 if resigning_player == 2 else 2
copy._is_draw = False
return copy
def agree_to_draw(self):
copy = deepcopy(self)
copy.previous_state = self
copy._is_draw = True
copy._winner = None
return copy
def _determine_result(self):
if self._move_limit_reached():
self._is_draw = True
elif not self.board.count_movable_player_pieces(1):
self._winner = 2
elif not self.board.count_movable_player_pieces(2):
self._winner = 1
def _move_limit_reached(self):
return self.moves_since_last_capture >= Game.CONSECUTIVE_NONCAPTURE_MOVE_LIMIT
def is_over(self):
return self._is_draw or self._winner is not None
def get_winner(self):
return self._winner
def black_wins(self):
return self._winner == 1
def white_wins(self):
return self._winner == 2
def is_draw(self):
return self._is_draw
def get_uncaptured_pieces(self):
return self.board.get_uncaptured_pieces()
def get_possible_moves(self):
return self.board.get_possible_moves()
def get_possible_capture_moves(self):
return self.board.get_possible_capture_moves()
def get_possible_next_states(self, actual_next_state=None, force_reload=False):
# If we have a cached list of next states and someone passes in an actual_next_state
# that isn't in that list, we have to reload the list.
if self._possible_next_states is not None and actual_next_state is not None and actual_next_state not in self._possible_next_states:
force_reload = True
if self._possible_next_states is None or force_reload == True:
self._possible_next_states = []
actual_next_move = None
if actual_next_state is not None:
actual_next_move = actual_next_state.last_move()
for move in self.get_possible_moves():
if move == actual_next_move:
self._possible_next_states.append(actual_next_state)
else:
self._possible_next_states.append(self.move(move))
return self._possible_next_states
def whose_turn(self):
return self.board.player_turn
def get_position_layout_2d(self, player):
return Board.flip_2d(self.board.position_layout_2d) if player == 2 else self.board.position_layout_2d
def get_game_states(self):
return [*(self.previous_state.get_game_states() if self.previous_state is not None else []), self]
def board_height(self):
return self.board.height
def board_width(self):
return self.board.width
def last_move(self):
return self.moves[-1] if (len(self.moves) > 0) else None
```
#### File: checkers/checkers/pdn_parser.py
```python
import numpy as np
import re
import tqdm
from checkers.game import Game
class PDNParser():
def parse(self, text):
text = self._find_move_data(text)
text = re.sub(r'[0-9]+\. ', '', text)
splits = text.split(' ')
moves, result = self._parse_moves(splits)
game = self._replay_game(moves, result)
return game
def _parse_moves(self, splits):
moves = []
game_result = None
for s in splits:
parsed_move, parsed_game_result = self._parse_move(s)
if parsed_game_result is not None:
game_result = parsed_game_result
if parsed_move is None:
continue
elif type(parsed_move[0]) == int:
moves.append(parsed_move)
elif type(parsed_move[0] == list):
for inner_move in parsed_move:
moves.append(inner_move)
else:
raise Exception(f'Unable to parse move: {s}')
return moves, game_result
def _parse_move(self, s):
s = s.strip()
if (s == '0-1' or s == '1-0' or s == '1/2-1/2'):
return None, s
elif '-' in s:
return list(map(int, s.split('-'))), None
elif 'x' in s:
jump_splits = list(map(int, s.split('x')))
moves = []
for i in range(0, len(jump_splits) - 1):
moves.append([jump_splits[i], jump_splits[i + 1]])
return moves, None
raise Exception(f'Unable to parse move: {s}')
def _find_move_data(self, text):
data = None
for line in text.split('\n'):
if any([x in line for x in ('[', ']')]):
continue
line = line.strip().lower()
if line.startswith('1.'):
data = ""
elif data is None:
continue
if line:
data += " {}".format(line)
if not line:
break
return re.sub(r" \{[^\}]+\}", "", data.strip().lower())
def _replay_game(self, moves, result):
game = Game()
for move in moves:
game = self._replay_move(game, move)
if result == '0-1' and game.get_winner() != 2:
game = game.resign(resigning_player=1)
elif result == '1-0' and game.get_winner() != 1:
game = game.resign(resigning_player=2)
elif result == '1/2-1/2' and game.is_draw() == False:
game = game.agree_to_draw()
return game
def _replay_move(self, game, move):
try:
return game.move(move)
except ValueError:
return self._replay_implicit_capture_moves(game, move, move[1])
def _replay_implicit_capture_moves(self, game, move, final_destination):
capture_move_for_this_piece = next(m for m in game.get_possible_capture_moves() if m[0] == move[0])
if capture_move_for_this_piece is None:
raise ValueError('The provided move is not possible')
game = game.move(capture_move_for_this_piece)
if capture_move_for_this_piece[1] == final_destination:
return game
else:
return self._replay_implicit_capture_moves(game, [capture_move_for_this_piece[1], final_destination], final_destination)
```
#### File: test/test_helpers/board_test_helper.py
```python
from checkers.board import Board
from checkers.piece import Piece
class BoardTestHelper(Board):
def __init__(self, width=4, height=8, rows_per_user_with_pieces=3):
Board.__init__(self, width, height, rows_per_user_with_pieces)
def set_pieces(self, pieces, whose_turn):
pieces = [BoardTestHelper.parse_piece_notation(p) for p in pieces]
for piece in pieces:
piece.board = self
self.pieces = pieces
self.player_turn = whose_turn
@classmethod
def parse_piece_notation(cls, notation):
values = notation.split()
piece = Piece()
if values[0] == 'B':
piece.player = 1
elif values[0] == 'W':
piece.player = 2
else:
raise Exception('Invalid player notation, should be "B" or "W"')
piece.king = (values[1] == 'king')
piece.position = int(values[-1])
return piece
``` |
{
"source": "Jonksar/whitebox",
"score": 3
} |
#### File: whitebox/whitebox/reporting.py
```python
import seaborn as sns
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import sys
from termcolor import cprint
import matplotlib.pylab as plt
from imageio import imread
import time
from sklearn.metrics import precision_recall_curve
from sklearn.utils.fixes import signature
from sklearn.metrics import average_precision_score
def eprint(*args, **kwargs):
cprint(*args, file=sys.stderr, **kwargs)
def cut_dataset( df, feature_column=None, label_column=None, cutoff=None):
positive_df = df[ df[label_column] ]
negative_df = df[ np.logical_not( df[label_column] ) ]
return positive_df, negative_df
def classifier_full_report( df, feature_column='confidence', label_column='label', cutoff=None, dropna=False, display_lambda=None):
if dropna: df = df.dropna( subset=[ feature_column, label_column] )
positive_df, negative_df = cut_dataset( df, feature_column, label_column )
#
# Aggregate metric plots
#
# Plot 1: Confidence distributions
if df[label_column].dtype == 'bool':
p_label = "positive" + " size: %d" % len(positive_df)
n_label = "negative" + " size: %d" % len(negative_df)
elif df[label_column].dtype == 'float' or df[label_column].dtype == 'int':
p_label = label_column + " positive" + ("" if cutoff is None else "at cutoff: " + str(cutoff)) + " size: %d" % len(positive_df)
n_label = label_column + " negative" + ("" if cutoff is None else "at cutoff: " + str(cutoff)) + " size: %d" % len(negative_df)
fig1 = plt.figure( figsize=(16, 8))
plt.subplot(1, 3, 1)
_range = ( float(np.min( df[feature_column])), float(np.max(df[feature_column])) )
try: ax = sns.distplot( positive_df[ feature_column ], label=p_label, hist_kws={"range": _range}, bins=50 )
except Exception as e: eprint( str(e) )
try: ax = sns.distplot( negative_df[ feature_column ], label=n_label, hist_kws={"range": _range}, bins=50 )
except Exception as e: eprint( str(e) )
plt.legend()
plt.title( "Confidence distributions on positive & negative classes (n=%d)" % len(df) )
# Plot 2: Precision recall curve
plt.subplot(1, 3, 2)
precision, recall, _ = precision_recall_curve( df[label_column], df[feature_column])
# In matplotlib < 1.5, plt.fill_between does not have a 'step' argument
step_kwargs = ({'step': 'post'}
if 'step' in signature(plt.fill_between).parameters
else {})
plt.step(recall, precision, color='b', alpha=0.2,
where='post')
plt.fill_between(recall, precision, alpha=0.2, color='b', **step_kwargs)
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.ylim([0.0, 1.05])
plt.xlim([0.0, 1.0])
average_precision = average_precision_score( df[label_column], df[feature_column])
plt.title('Precision-Recall curve: AP={0:0.2f}'.format(
average_precision))
# Plot 3: Confidence & Precision-recall curves
plt.subplot(1, 3, 3)
"""Show precision and recall as a function of different decision thresholds."""
precision, recall, thresholds = precision_recall_curve( df[label_column], df[feature_column], pos_label=1)
plot_p, = plt.plot(thresholds, precision[1:], "--", label="precision")
plot_r, = plt.plot(thresholds, recall[1:], label="recall")
plt.xlabel('Confidence')
plt.ylabel('Metric')
plt.ylim([0.0, 1.05])
plt.xlim([0.0, 1.0])
plt.legend()
plt.show()
# Showing examples of dataset
print( "Now showing random datapoints from dataset: ")
for _, row in df.sample(10).iterrows():
display_lambda(row)
plt.title('confidence: %6.3f, label %d' % (row[feature_column], row[label_column]))
plt.show()
if cutoff is None:
print( 'cutoff point was not given, defaulting to 0.5')
cutoff = 0.5
predictions = df[feature_column] > cutoff
FP_mask = np.logical_and( np.logical_not(df[label_column]), predictions )
TP_mask = np.logical_and( df[label_column] , predictions )
FN_mask = np.logical_and( df[label_column] , np.logical_not(predictions))
TN_mask = np.logical_and( np.logical_not(df[label_column]), np.logical_not(predictions) )
print( "Now showing FP predictions from dataset: ")
for _, row in df[ FP_mask ][:10].iterrows():
display_lambda(row)
plt.title(' FP (confidence: %6.3f, label %d)' % (row[feature_column], row[label_column]))
plt.show()
print( "Now showing FN predictions from dataset: ")
for _, row in df[ FN_mask ][:10].iterrows():
display_lambda(row)
plt.title('FN (confidence: %6.3f, label %d)' % (row[feature_column], row[label_column]))
plt.show()
print( "Now showing TN predictions from dataset: ")
for _, row in df[ TN_mask ][:10].iterrows():
display_lambda(row)
plt.title(' TN (confidence: %6.3f, label %d)' % (row[feature_column], row[label_column]))
plt.show()
print( "Now showing TP predictions from dataset: ")
for _, row in df[ TP_mask ][:10].iterrows():
display_lambda(row)
plt.title(' TP (confidence: %6.3f, label %d)' % (row[feature_column], row[label_column]))
plt.show()
return
if __name__ == "__main__":
num_dp = 100
confidences = np.append(
np.random.normal(0.3, 0.2, num_dp).flatten(),
np.random.normal(0.7, 0.2, num_dp).flatten() , axis=0
)
labels = np.append(
[ 0 for _ in range(num_dp)],
[ 1 for _ in range(num_dp)], axis=0
)
random_value = np.random.randint(0, 10000, num_dp * 2)
labels = labels.astype(bool)
report_df = pd.dataframe({'confidence': confidences, 'label': labels, 'dp_data': random_value})
def display_image( row ):
np.random.seed( row['dp_data'] )
img = np.random.rand( 100, 100 )
plt.imshow( img )
classifier_full_report( report_df, display_lambda=display_image )
``` |
{
"source": "jonkuhn/reinteract-jk",
"score": 2
} |
#### File: lib/reinteract/editor.py
```python
import os
import gobject
import gtk
import pango
from application import application
from format_escaped import format_escaped
from notebook import NotebookFile
from shell_buffer import ShellBuffer
from shell_view import ShellView
from save_file import SaveFileBuilder
class Editor(gobject.GObject):
def __init__(self, notebook):
gobject.GObject.__init__(self)
self.notebook = notebook
self._unsaved_index = application.allocate_unsaved_index()
#######################################################
# Utility
#######################################################
def _clear_unsaved(self):
if self._unsaved_index != None:
application.free_unsaved_index(self._unsaved_index)
self._unsaved_index = None
def _update_filename(self, *args):
self.notify('filename')
self.notify('title')
def _update_modified(self, *args):
self.notify('modified')
self.notify('title')
def _update_state(self, *args):
self.notify('state')
def _update_file(self):
self.notify('file')
def __prompt_for_name(self, title, save_button_text, action, check_name=None):
builder = SaveFileBuilder(title, self._get_display_name(), save_button_text, check_name)
builder.dialog.set_transient_for(self.widget.get_toplevel())
if self._get_filename() != None:
builder.name_entry.set_text(os.path.basename(self._get_filename()))
while True:
response = builder.dialog.run()
if response != gtk.RESPONSE_OK:
break
raw_name = builder.name_entry.get_text()
error_message = None
try:
raw_name = application.validate_name(raw_name)
except ValueError, e:
error_message = e.message
if not error_message:
extension = "." + self._get_extension()
if not (raw_name.lower().endswith(extension)):
raw_name += extension
if not error_message:
fullname = os.path.join(self.notebook.folder, raw_name)
if os.path.exists(fullname):
error_message = "'%s' already exists" % raw_name
if error_message:
dialog = gtk.MessageDialog(parent=self.widget.get_toplevel(), buttons=gtk.BUTTONS_OK,
type=gtk.MESSAGE_ERROR)
dialog.set_markup("<big><b>Please choose a different name</b></big>")
dialog.format_secondary_text(error_message)
dialog.run()
dialog.destroy()
continue
action(fullname)
break
builder.dialog.destroy()
#######################################################
# Implemented by subclasses
#######################################################
def _get_display_name(self):
raise NotImplementedError()
def _get_modified(self):
raise NotImplementedError()
def _get_state(self):
return NotebookFile.NONE
def _get_filename(self):
return NotImplementedError()
def _get_file(self):
return NotImplementedError()
def _get_extension(self):
return NotImplementedError()
def _save(self, filename):
return NotImplementedError()
#######################################################
# Public API
#######################################################
def close(self):
if self._unsaved_index != None:
application.free_unsaved_index(self._unsaved_index)
self._unsaved_index = None
self.widget.destroy()
def confirm_discard(self, before_quit=False):
if not self.modified:
return True
if before_quit:
message_format = self.DISCARD_FORMAT_BEFORE_QUIT
continue_button_text = '_Quit without saving'
else:
message_format = self.DISCARD_FORMAT
continue_button_text = '_Discard'
if self._get_filename() == None:
save_button_text = gtk.STOCK_SAVE_AS
else:
save_button_text = gtk.STOCK_SAVE
message = format_escaped("<big><b>" + message_format + "</b></big>", self._get_display_name())
dialog = gtk.MessageDialog(parent=self.widget.get_toplevel(), buttons=gtk.BUTTONS_NONE,
type=gtk.MESSAGE_WARNING)
dialog.set_markup(message)
dialog.add_buttons(continue_button_text, gtk.RESPONSE_OK,
gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL,
save_button_text, 1)
dialog.set_default_response(1)
response = dialog.run()
dialog.destroy()
if response == gtk.RESPONSE_OK:
return True
elif response == 1:
self.save()
if self.modified:
return False
else:
return True
else:
return False
def load(self, filename):
raise NotImplementedError()
def save(self, filename=None):
if filename == None:
filename = self._get_filename()
if filename == None:
def action(fullname):
self._save(fullname)
self._clear_unsaved()
self.notebook.refresh()
self.__prompt_for_name(title="Save As...", save_button_text="_Save", action=action)
else:
self._save(filename)
def rename(self):
if self._get_filename() == None:
self.save()
return
old_name = os.path.basename(self._get_filename())
title = "Rename '%s'" % old_name
def check_name(name):
return name != "" and name != old_name
def action(fullname):
old_filename = self._get_filename()
self._save(fullname)
self._clear_unsaved()
os.remove(old_filename)
self.notebook.refresh()
self.__prompt_for_name(title=title, save_button_text="_Rename", action=action, check_name=check_name)
@property
def needs_calculate(self):
return (self.state != NotebookFile.EXECUTE_SUCCESS and
self.state != NotebookFile.NONE and
self.state != NotebookFile.EXECUTING)
def calculate(self):
pass
def undo(self):
pass
def redo(self):
pass
@gobject.property
def filename(self):
return self._get_filename()
@gobject.property
def file(self):
return self._get_file()
@gobject.property
def modified(self):
return self._get_modified()
@gobject.property
def state(self):
return self._get_state()
@gobject.property
def title(self):
if self.modified:
return "*" + self._get_display_name()
else:
return self._get_display_name()
```
#### File: lib/reinteract/library_editor.py
```python
import os
import gobject
import gtk
import pango
try:
import gtksourceview
use_sourceview = True
except:
use_sourceview = False
if use_sourceview:
language_manager = gtksourceview.SourceLanguagesManager()
from application import application
from editor import Editor
from shell_view import ShellView
from window_builder import WindowBuilder
class LibraryEditor(Editor):
DISCARD_FORMAT = 'Discard unsaved changes to library "%s"?'
DISCARD_FORMAT_BEFORE_QUIT = 'Save the changes to library "%s" before quitting?'
def __init__(self, notebook):
Editor.__init__(self, notebook)
self.__filename = None
self.__modified = False
self.__file = None
if use_sourceview:
self.buf = gtksourceview.SourceBuffer()
self.buf.set_highlight(True)
language = language_manager.get_language_from_mime_type("text/x-python")
if language != None:
self.buf.set_language(language)
self.view = gtksourceview.SourceView(self.buf)
self.view.set_insert_spaces_instead_of_tabs(True)
self.view.set_tabs_width(4)
else:
self.buf = gtk.TextBuffer()
self.view = gtk.TextView(self.buf)
self.view.modify_font(pango.FontDescription("monospace"))
self.buf.connect_after('insert-text', lambda *args: self.__set_modified(True))
self.buf.connect_after('delete-range', lambda *args: self.__set_modified(True))
self.widget = gtk.ScrolledWindow()
self.widget.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
self.widget.add(self.view)
self.widget.show_all()
#######################################################
# Overrides
#######################################################
def _get_display_name(self):
if self.__filename == None:
return "Unsaved Library %d" % self._unsaved_index
else:
return os.path.basename(self.__filename)
def _get_filename(self):
return self.__filename
def _get_file(self):
return self.__file
def _get_modified(self):
return self.__modified
def _get_extension(self):
return "py"
def _save(self, filename):
start = self.buf.get_start_iter()
end = self.buf.get_end_iter()
contents = self.buf.get_slice(start, end)
f = open(filename, "w")
f.write(contents)
f.close()
self.__set_filename_and_modified(filename, False)
self.notebook.reset_module_by_filename(self.__filename)
#######################################################
# Utility
#######################################################
def __update_file(self):
if self.__filename:
new_file = self.notebook.file_for_absolute_path(self.__filename)
else:
new_file = None
if new_file == self.__file:
return
if self.__file:
self.__file.active = False
self.__file.modified = False
self.__file = new_file
if self.__file:
self.__file.active = True
self.__file.modified = self.__modified
self._update_file()
def __set_filename(self, filename):
if filename == self.__filename:
return
self.__filename = filename
self._update_filename()
self.notebook.refresh()
self.__update_file()
def __set_modified(self, modified):
if modified == self.__modified :
return
self.__modified = modified
if self.__file:
self.__file.modified = modified
self._update_modified()
def __set_filename_and_modified(self, filename, modified):
self.freeze_notify()
self.__set_modified(modified)
self.__set_filename(filename)
self.thaw_notify()
#######################################################
# Public API
#######################################################
def load(self, filename):
if os.path.exists(filename):
f = open(filename, "r")
contents = f.read()
f.close()
if use_sourceview:
self.buf.begin_not_undoable_action()
pos = self.buf.get_start_iter()
self.buf.insert(pos, contents)
if use_sourceview:
self.buf.end_not_undoable_action()
self.__set_filename_and_modified(filename, False)
def close(self):
Editor.close(self)
if self.__file:
self.__file.active = False
self.__file.modified = False
def undo(self):
if use_sourceview:
self.view.emit('undo')
def redo(self):
if use_sourceview:
self.view.emit('redo')
```
#### File: tools/common/am_parser.py
```python
import logging
import re
_logger = logging.getLogger("AMParser")
# Simple class to suck in a AM file and get variables from it with substitution
class AMParser(object):
# We ignore possibility of \\\n - a literal backslash at the end of a line
VARIABLE_RE = re.compile(
r'^([a-zA-Z_][a-zA-Z0-9_]*)[ \t]*=[ \t]*((?:.*\\\n)*.*)',
re.MULTILINE);
REFERENCE_RE = re.compile(r'\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)')
def __init__(self, filename, overrides={}):
_logger.debug('Scanning %s', filename)
f = open(filename, "r")
contents = f.read()
f.close()
self.d = {}
for m in AMParser.VARIABLE_RE.finditer(contents):
name = m.group(1)
value = m.group(2).replace('\\\n', '')
# Canonicalize whitespace for clean debugg output, would break
# quoted strings but we don't have any
value = re.sub(r'\s+', ' ', value.strip())
self.d[name] = value
# _logger.debug(' %s = %s', name, value)
self.d.update(overrides)
def __getitem__(self, key):
return AMParser.REFERENCE_RE.sub(lambda m: self[m.group(1)], self.d[key])
def __iter__(self):
return self.d.iterkeys()
def __contains__(self, item):
return item in self.d
def iterkeys(self):
return self.d.iterkeys()
def iteritems(self):
return ((x, self[x]) for x in self.d.iterkeys())
``` |
{
"source": "jonl1096/seelvizorg",
"score": 3
} |
#### File: graphfiles/scripts/plot3d.py
```python
import sys
import math
points = []
#files = ['Control182localeq','Control189localeq','Control239localeq']
class Plot3d():
def getNodesAndEdges(infile):
inpath = 'csv/'
outpath = 'graphml/'
filename = infile.name[:-4} if infile.name.endswith('.csv') else infile.name
infilename = inpath + sys.argv[1] + '.csv'
nodename = outpath + sys.argv[1] + '.nodes.csv'
edgename = outpath + sys.argv[1] + '.edges.csv'
for line in infile:
line = line.strip().split(',')
points.append(str(line[0]) + "," + str(line[1]) + "," + str(line[2]))
#with open(sys.argv[2], 'w') as nodefile:
with open(nodename, 'w') as nodefile:
#with open(sys.argv[3], 'w') as edgefile:
with open(edgename, 'w') as edgefile:
for ind in range(len(points)):
temp = points[ind].strip().split(',')
x = temp[0]
y = temp[1]
z = temp[2]
radius = 18
nodefile.write("s" + str(ind + 1) + "," + str(x) + "," + str(y) + "," + str(z) + "\n")
for index in range(ind + 1, len(points)):
tmp = points[index].strip().split(',')
distance = math.sqrt(math.pow(int(x) - int(tmp[0]), 2) + math.pow(int(y) - int(tmp[1]), 2) + math.pow(int(z) - int(tmp[2]), 2))
if distance < radius:
edgefile.write("s" + str(ind + 1) + "," + "s" + str(index + 1) + "\n")
return [nodefile, edgefile]
#old shit =======================================================================
#for f in files:
# filename = inpath + f + '.csv'
# nodename = outpath + f + '.nodes.csv'
# edgename = outpath + f + '.edges.csv'
# #with open(sys.argv[1], 'r') as infile:
# with open(filename, 'r') as infile:
# for line in infile:
# line = line.strip().split(',')
# points.append(str(line[0]) + "," + str(line[1]) + "," + str(line[2]))
#
# #with open(sys.argv[2], 'w') as outfile:
# with open(nodename, 'w') as outfile:
# #with open(sys.argv[3], 'w') as edgefile:
# with open(edgename, 'w') as edgefile:
# for ind in range(len(points)):
# temp = points[ind].strip().split(',')
# x = temp[0]
# y = temp[1]
# z = temp[2]
# radius = 18
# outfile.write("s" + str(ind + 1) + "," + str(x) + "," + str(y) + "," + str(z) + "\n")
# for index in range(ind + 1, len(points)):
# tmp = points[index].strip().split(',')
# distance = math.sqrt(math.pow(int(x) - int(tmp[0]), 2) + math.pow(int(y) - int(tmp[1]), 2) + math.pow(int(z) - int(tmp[2]), 2))
# if distance < radius:
# edgefile.write("s" + str(ind + 1) + "," + "s" + str(index + 1) + "\n")
``` |
{
"source": "jonl112/namedportassociationconverter",
"score": 3
} |
#### File: jonl112/namedportassociationconverter/NamedPortAssociationConverter.py
```python
import pyperclip, sys
def convert( str ):
if str =="":
return str
outstr = ""
pre = ""
post = ""
S = str.split("(")
str = S[-1]
if len(S)==2:
pre = S[0] +"("
elif len(S)!=1:
return "wtf"
S = str.split(")")
str = S[0]
if len(S)==2:
post = ")" + S[1]
elif len(S)!=1:
return "wtf"
S = str.split(",")
outstr = ""
for s in S:
if s.strip():
outstr += "." + s.strip() + "(" + s.strip() + "), "
else:
outstr += ","
return pre + outstr.strip(" ").strip(",") + post
def convert_tb( str ):
if str =="":
return str
outstr = ""
pre = ""
post = ""
S = str.split("(")
str = S[-1]
if len(S)==2:
pre = S[0] +"("
elif len(S)!=1:
return "wtf"
S = str.split(")")
str = S[0]
if len(S)==2:
post = ")" + S[1]
elif len(S)!=1:
return "wtf"
S = str.split(",")
outstr = ""
for s in S:
if s.strip():
outstr += "." + s.strip() + "(" + "sim_" + s.strip() + "), "
else:
outstr += ","
return pre + outstr.strip(" ").strip(",") + post
if (len(sys.argv)==1):
f = pyperclip.paste().split("\n")
toWrite = ""
for line in f:
if line == "\n":
toWrite += line
else:
S = line.split("//")
if len(S)==2:
toWrite += convert(S[0]) + "//" + S[1] + "\n"
else:
toWrite += convert(line.strip("\n\t ")) + "\n"
pyperclip.copy(toWrite);
elif ("tb" in str(sys.argv[1])):
f = pyperclip.paste().split("\n")
toWrite = ""
for line in f:
if ("module" in line):
short = line[line.find(" ")+1:]
bracket = short.split("(")
toWrite += bracket[0] + " DUT(" + bracket[1] + "\n"
elif line == "\n":
toWrite += line
else:
S = line.split("//")
if len(S)==2:
toWrite += convert_tb(S[0]) + "//" + S[1] + "\n"
else:
toWrite += convert_tb(line.strip("\n\t ")) + "\n"
pyperclip.copy(toWrite);
``` |
{
"source": "jonl112/Software",
"score": 3
} |
#### File: control/python_controller/generate_difference_equation.py
```python
import sys
import control as ct
from control.matlab import *
# {} required to format string
# {{{}}} required to format string, initialize array, and 'escape' brackets
CONTROLLER_DIFFERENCE_EQUATION_COEFFICIENTS = """
#define WHEEL_SPEED_CONTROLLER_TICK_TIME {tick_time};
#define GENERATED_NUM_WHEEL_SPEED_COEFFICIENTS {numerator_length};
#define GENERATED_NUM_PREVIOUS_WHEEL_SPEED_COMMAND_COEFFICIENTS {denominator_length};
float GENERATED_WHEEL_SPEED_COEFFICIENTS[GENERATED_NUM_WHEEL_SPEED_COEFFICIENTS] =
{{{numerator_coefficients}}};
float GENERATED_PREVIOUS_WHEEL_SPEED_COMMAND_COEFFICIENTS[GENERATED_NUM_PREVIOUS_WHEEL_SPEED_COMMAND_COEFFICIENTS] =
{{{denominator_coefficients}}};
"""
def generate(discrete_tf: ct.TransferFunction):
# Copy the numerator and denominator of the input TF
# Convert from list to string to remove list brackets
numerator_coefficients = str(discrete_tf.num[0][0].tolist())[1:-1]
denominator_coefficients = str(discrete_tf.den[0][0].tolist())[1:-1]
numerator_length = len(discrete_tf.num[0][0])
denominator_length = len(discrete_tf.den[0][0])
tick_time = discrete_tf.dt
return CONTROLLER_DIFFERENCE_EQUATION_COEFFICIENTS.format(
numerator_coefficients=numerator_coefficients,
denominator_coefficients=denominator_coefficients,
numerator_length=numerator_length,
denominator_length=denominator_length,
tick_time=tick_time,
)
#######################################################################
# MAIN #
#######################################################################
if __name__ == "__main__":
with open(sys.argv[-1], "w") as controller_coefficient_generator:
# TODO need to pass parameters from controller design file
# https://github.com/UBC-Thunderbots/Software/issues/1520
numerator = [1, 2]
denominator = [3, 4, 5]
interpolation_period = 0.1
transfer_function = tf(numerator, denominator, interpolation_period)
controller_coefficients = generate(transfer_function)
# Generate difference equation coefficients
controller_coefficient_generator.write(controller_coefficients)
print("INFO: Generated Difference Equation Controller Coefficients")
```
#### File: control/python_controller/generate_difference_equation_test.py
```python
import sys
import control as ct
from control.matlab import *
import unittest
import generate_difference_equation as gde
class TestDifferenceEquationCoefficientGeneration(unittest.TestCase):
def test_generate_string(self):
"""
Test that the generate function correctly formats CONTROLLER_DIFFERENCE_EQUATION_COEFFICIENTS
with the provided transfer function
"""
expected = """
#define WHEEL_SPEED_CONTROLLER_TICK_TIME 0.1;
#define GENERATED_NUM_WHEEL_SPEED_COEFFICIENTS 2;
#define GENERATED_NUM_PREVIOUS_WHEEL_SPEED_COMMAND_COEFFICIENTS 3;
float GENERATED_WHEEL_SPEED_COEFFICIENTS[GENERATED_NUM_WHEEL_SPEED_COEFFICIENTS] =
{5, 4};
float GENERATED_PREVIOUS_WHEEL_SPEED_COMMAND_COEFFICIENTS[GENERATED_NUM_PREVIOUS_WHEEL_SPEED_COMMAND_COEFFICIENTS] =
{3, 2, 1};
"""
numerator = [5, 4]
denominator = [3, 2, 1]
interpolation_period = 0.1
transfer_function = tf(numerator, denominator, interpolation_period)
difference_equation_coefficients = gde.generate(transfer_function)
# Remove whitespace to keep comparison consistent
self.assertEqual(difference_equation_coefficients.strip(), expected.strip())
if __name__ == "__main__":
unittest.main()
```
#### File: python_tools/notebooks/plot_ssl_wrapperpacket.py
```python
from proto.messages_robocup_ssl_wrapper_pb2 import SSL_WrapperPacket
from python_tools.proto_log import ProtoLog
import ipywidgets
from IPython.display import display
wrapper_proto_log = ProtoLog(
"test_data/SensorFusion_SSL_WrapperPacket", SSL_WrapperPacket,
)
# +
from bokeh.plotting import figure
from bokeh.io import output_notebook, show, push_notebook
from python_tools.plotting.plot_ssl_wrapper import SSLWrapperPlotter
output_notebook()
fig = figure(plot_width=900, plot_height=900, match_aspect=True)
ssl_wrapper_plotter = SSLWrapperPlotter(fig)
def plot_ssl_wrapper_at_idx(idx):
ssl_wrapper_plotter.plot_ssl_wrapper(wrapper_proto_log[idx])
push_notebook()
show(fig, notebook_handle=True)
slider = ipywidgets.IntSlider(min=0, max=len(wrapper_proto_log) - 1)
ipywidgets.interact(plot_ssl_wrapper_at_idx, idx=slider)
# -
```
#### File: src/python_tools/proto_log.py
```python
from proto.repeated_any_msg_pb2 import RepeatedAnyMsg
from google.protobuf.internal.decoder import _DecodeVarint32
from typing import TypeVar, Generic, Type, Any, Iterator, List, Dict
import os
MsgClass = TypeVar("MsgClass")
class ProtoLog(Generic[MsgClass]):
"""
ProtoLog allows users to work with directories containing serialized, delimited "RepeatedAnyMsg"
chunks, representing a consecutive series of messages encapsulated in "Any" messages.
Usage:
`proto_log = ProtoLog('/tmp/test/PrimitiveSet/', PrimitiveSet)`
loads the directory of chunks that represent a series of PrimitiveSet
`proto_log[1000]` gets the 1000th PrimitiveSet message in the directory
```
for primitive_set_msg in proto_log:
print(primitive_set_msg.time_sent)
```
will iterate over the messages in the directory and print the `time_sent` field.
"""
def __init__(self, directory: str, msg_class: Type[MsgClass]):
"""
Constructs a ProtoLog from the directory of delimited Protobuf 'RepeatedAnyMsg' messages
at the given path.
:param directory: The path of a directory containing delimited RepeatedAnyMsg messages in files
:param msg_class: The type of the message contained in the RepeatedAnyMsg chunks
"""
self.msg_class: Type[MsgClass] = msg_class
self.repeated_any_msgs: List[RepeatedAnyMsg] = []
self.chunk_start_idxs: List[int] = []
self.cached_unpacked_msgs: Dict[int, MsgClass] = dict()
cur_start_idx = 0
for file in os.listdir(directory):
filepath = os.path.join(directory, file)
if file.isnumeric() and os.path.isfile(filepath):
buf = open(filepath, "rb").read()
msg_len, new_pos = _DecodeVarint32(buf, 0)
repeated_any_msg = RepeatedAnyMsg()
repeated_any_msg.ParseFromString(buf[new_pos : new_pos + msg_len])
self.repeated_any_msgs.append(repeated_any_msg)
self.chunk_start_idxs.append(cur_start_idx)
cur_start_idx += len(repeated_any_msg.messages)
def __len__(self) -> int:
"""
Returns the total number of messages in the data directory.
:return: the total number of messages in the data directory.
"""
return self.chunk_start_idxs[-1] + len(self.repeated_any_msgs[-1].messages)
def _get_item_at_idx(self, idx: int) -> MsgClass:
"""
Returns the idx'th message out of all the messages in the data directory.
:param idx: index of the message
:return: the message at the given index
"""
if idx in self.cached_unpacked_msgs:
return self.cached_unpacked_msgs[idx]
if idx >= self.chunk_start_idxs[-1] + len(self.repeated_any_msgs[-1].messages):
raise IndexError(
"Tried to access msg idx {} when we only have {} msgs!".format(
idx,
self.chunk_start_idxs[-1]
+ len(self.repeated_any_msgs[-1].messages),
)
)
item_chunk_idx = len(self.chunk_start_idxs) - 1
for chunk_idx in range(len(self.chunk_start_idxs) - 1):
if self.chunk_start_idxs[chunk_idx + 1] > idx:
item_chunk_idx = chunk_idx
break
msg_idx = idx - self.chunk_start_idxs[item_chunk_idx]
msg = self.msg_class()
self.repeated_any_msgs[item_chunk_idx].messages[msg_idx].Unpack(msg)
self.cached_unpacked_msgs[idx] = msg
return msg
def __getitem__(self, key: Any) -> MsgClass:
"""
Returns the message at the given index if key is an integer, returns every message
of messages from the slice.start and slice.stop at intervals of slice.step, if the key
is a slice, else throws IndexError
:param key: an integer or a slice
:return: a message at the given index or a series of messages as specified by the slice
"""
if isinstance(key, slice):
start = key.start if key.start else 0
stop = key.stop if key.stop else len(self)
step = key.step if key.step else 1
result = []
for idx in range(start, stop, step):
result.append(self._get_item_at_idx(idx))
return result
elif isinstance(key, int):
return self._get_item_at_idx(key)
else:
raise IndexError("Invalid index type!")
def __iter__(self) -> Iterator[MsgClass]:
"""
Returns an iterator over the messages in the data directory.
:return: An iterator
"""
self.iter_idx = 0
return self
def __next__(self) -> MsgClass:
"""
Increments the iterator and returns the message at the current iterator position.
:return: the message at the current iterator position.
"""
try:
result = self[self.iter_idx]
self.iter_idx += 1
return result
except IndexError:
raise StopIteration
def get_chunk_count(self) -> int:
"""
Returns the number of chunk files in the data directory that this object was
constructed with.
:return: the number of chunk files in the data directory.
"""
return len(self.repeated_any_msgs)
```
#### File: parameter/generation_scripts/case_conversion.py
```python
import re
def to_pascal_case(snake_str):
return "".join(x.title() for x in snake_str.split("_"))
def to_camel_case(snake_str):
pascal_case = to_pascal_case(snake_str)
return pascal_case[0].lower() + pascal_case[1:]
def to_snake_case(camel_str):
return re.sub(r"(?<!^)(?=[A-Z])", "_", camel_str).lower()
```
#### File: parameter/generation_scripts/generate_cpp_dynamic_parameters.py
```python
import yaml
import os
from pathlib import Path
import argparse
from config_yaml_loader import (
ConfigYamlLoader,
ConfigYamlException,
ConfigYamlCycleDetected,
ConfigYamlMalformed,
)
from cpp_writer import CppWriter
# Path relative to the bazel WORKSPACE root
# This path is included in the data for the py_binary bazel target
PARAMETER_CONFIG_PATH = Path(os.path.dirname(__file__), "../config_definitions")
def generate_cpp_dynamic_parameters(output_header, output_source, include_headers):
yamls = list(PARAMETER_CONFIG_PATH.glob("*.yaml"))
config_metadata = ConfigYamlLoader.get_config_metadata(yamls)
CppWriter.write_config_metadata_header(
output_header, include_headers, "ThunderbotsConfig", config_metadata
)
CppWriter.write_config_metadata_source(
output_source, output_header, "ThunderbotsConfig", config_metadata
)
def main():
parser = argparse.ArgumentParser(description="Generate DynamicParameters")
parser.add_argument(
"--output_header",
type=str,
required=True,
help="The header file that will be generated",
)
parser.add_argument(
"--output_source",
type=str,
required=True,
help="The source file that will be generated",
)
parser.add_argument(
"--include_headers",
nargs="+",
required=False,
help=(
"Filepaths (relative to the bazel WORKSPACE) for any header "
"files that should be included at the top of the generated code"
),
)
args = parser.parse_args()
generate_cpp_dynamic_parameters(
args.output_header, args.output_source, args.include_headers,
)
#######################################################################
# Main #
#######################################################################
if __name__ == "__main__":
main()
```
#### File: tactic/goalie/goalie_tactic_test.py
```python
import sys
import pytest
import software.python_bindings as tbots
from proto.primitive_pb2 import MaxAllowedSpeedMode
from proto.tactic_pb2 import AssignedTacticPlayControlParams, GoalieTactic, Tactic
from software.simulated_tests.robot_enters_region import *
from software.simulated_tests.ball_enters_region import *
from software.simulated_tests.ball_moves_forward import *
from software.simulated_tests.friendly_has_ball_possession import *
from software.simulated_tests.ball_speed_threshold import *
from software.simulated_tests.robot_speed_threshold import *
from software.simulated_tests.excessive_dribbling import *
from software.simulated_tests.simulated_test_fixture import tactic_runner
@pytest.mark.parametrize(
"ball_initial_position,ball_initial_velocity,robot_initial_position",
[
# test panic ball very fast in straight line
(tbots.Point(0, 0), tbots.Vector(-5, 0), tbots.Point(-4, -1)),
# test panic ball very_fast in diagonal line
(
tbots.Point(0, 0),
tbots.Vector(-5.5, 0.25),
tbots.Field.createSSLDivisionBField().friendlyGoalCenter()
+ tbots.Vector(0, -0.5),
),
# test ball very fast misses net
(tbots.Point(0, 0), tbots.Vector(-5, 1), tbots.Point(-4.5, 0)),
# test slow ball at sharp angle to friendly goal
# ball slow inside friendly defense area
(tbots.Point(-4, 0.8), tbots.Vector(-0.2, 0), tbots.Point(0, 0)),
# ball slow inside friendly defense area
(tbots.Point(-4, 0.8), tbots.Vector(-0.2, 0), tbots.Point(0, 2)),
# ball slow inside friendly defense area
(tbots.Point(-4, 0.8), tbots.Vector(-0.2, 0), tbots.Point(0, 2)),
# ball slow inside friendly defense area
(tbots.Point(-4, 0.8), tbots.Vector(-0.2, 0), tbots.Point(-4, 0),),
# ball stationary inside friendly defense area
(
tbots.Point(-4, 0.0),
tbots.Vector(0.0, 0),
tbots.Field.createSSLDivisionBField().friendlyGoalpostPos(),
),
# ball stationary inside no-chip rectangle
(
tbots.Field.createSSLDivisionBField().friendlyGoalCenter()
+ tbots.Vector(0.1, 0.1),
tbots.Vector(-0.2, 0),
tbots.Point(-4, -1),
),
# ball fast inside no-chip rectangle but no intersection with goal
(
tbots.Field.createSSLDivisionBField().friendlyGoalCenter()
+ tbots.Vector(0.1, 0),
tbots.Vector(0, -0.5),
tbots.Point(-3.5, 1),
),
# ball moving out from inside defense area
(
tbots.Field.createSSLDivisionBField().friendlyGoalCenter()
+ tbots.Vector(0.5, 0),
tbots.Vector(0.5, 0),
tbots.Point(-3.5, 0),
),
# ball slow inside no-chip rectangle
(
tbots.Field.createSSLDivisionBField().friendlyGoalCenter()
+ tbots.Vector(0.1, 0),
tbots.Vector(0.1, -0.1),
tbots.Point(-3.5, 1),
),
# TODO (#2167): This test fails so disabling for Robocup
# ball moving into goal from inside defense area
(
tbots.Field.createSSLDivisionBField().friendlyGoalCenter()
+ tbots.Vector(0.5, 0),
tbots.Vector(-0.5, 0),
tbots.Point(-3.5, 0),
),
# TODO (#2167): This test fails so disabling for Robocup
# ball moving up and out of defense area
(
tbots.Field.createSSLDivisionBField().friendlyGoalCenter()
+ tbots.Vector(0.3, 0),
tbots.Vector(0, 1),
tbots.Point(-3.5, 0),
),
# TODO (#2167): This test fails so disabling for Robocup
# ball moving down and out goal from defense area
(
tbots.Field.createSSLDivisionBField().friendlyGoalCenter()
+ tbots.Vector(0.3, 0),
tbots.Vector(0, -0.7),
tbots.Point(-3.5, 0),
),
],
)
def test_goalie_blocks_shot(
ball_initial_position, ball_initial_velocity, robot_initial_position, tactic_runner
):
# Setup Robot
tactic_runner.simulator.setup_yellow_robots([robot_initial_position])
# Setup Tactic
params = AssignedTacticPlayControlParams()
params.assigned_tactics[0].goalie.CopyFrom(
GoalieTactic(max_allowed_speed_mode=MaxAllowedSpeedMode.PHYSICAL_LIMIT)
)
tactic_runner.yellow_full_system.send_tactic_override(params)
# Setup ball with initial velocity using our software/geom
tactic_runner.simulator.setup_ball(
ball_position=ball_initial_position, ball_velocity=ball_initial_velocity
)
# Always Validation
always_validation_sequence_set = [
[
RobotNeverEntersRegion(
regions=[tbots.Field.createSSLDivisionBField().enemyDefenseArea()]
),
BallNeverEntersRegion(
regions=[tbots.Field.createSSLDivisionBField().friendlyGoal()]
),
NeverExcessivelyDribbles(),
]
]
# Eventually Validation
eventually_validation_sequence_set = [
[
# Goalie should be in the defense area
RobotEventuallyEntersRegion(
regions=[tbots.Field.createSSLDivisionBField().friendlyDefenseArea()]
),
]
]
tactic_runner.run_test(
eventually_validation_sequence_set=eventually_validation_sequence_set,
always_validation_sequence_set=always_validation_sequence_set,
)
if __name__ == "__main__":
sys.exit(pytest.main([__file__, "-svv"]))
```
#### File: jetson_nano/broadcasts/robot_broadcast_receiver.py
```python
import argparse
import socket
from time import time
from proto.announcement_pb2 import Announcement
RECEIVE_TIMEOUT_SECONDS = 0.2
def receive_announcements(port: int, duration: int) -> [Announcement]:
"""
Returns a list of Announcements, without duplicates received within a time window of 4s on a specified port
:param duration: how long to listen for announcements
:param port: the port to listen for announcements on
:return: a list of Announcements, without duplicates
"""
receiver = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
receiver.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
receiver.settimeout(RECEIVE_TIMEOUT_SECONDS)
receiver.bind(("", port))
announcements = []
timeout = time() + duration
while time() < timeout:
try:
data = receiver.recv(1024)
except socket.timeout: # ignore timeout errors
continue
else:
# parse announcement protobuf
announcement = Announcement()
announcement.ParseFromString(data)
# filter out duplicates
if announcement not in announcements:
announcements.append(announcement)
return announcements
def main():
# get command line args
ap = argparse.ArgumentParser()
ap.add_argument("-p", "--port", required=True, type=int, help="port to listen on")
ap.add_argument(
"-d" "--duration",
required=True,
type=int,
help="how long to listen for announcements. Recommended > 2",
)
args = vars(ap.parse_args())
port = args["port"]
duration = args["duration"]
announcements = receive_announcements(port, duration)
for announcement in announcements:
print(
f"robot_id: {announcement.robot_id} \nip_addr: {announcement.ip_addr} \nmac_addr: {announcement.mac_addr} \n"
)
if __name__ == "__main__":
main()
```
#### File: display/screens/home_screen.py
```python
from software.display.screens.screen import Screen
import software.jetson_nano.display.constants as constants
ROBOT_ID_INDEX = 0
CHANNEL_ID_INDEX = 1
BATTERY_VOLTAGE_BASE = 60
class HomeScreen(Screen):
"""
This is the dashboard screen which shows basic robot diagnostics
"""
def __init__(self, lcd_display, redis_dict, screen_actions):
"""
@param lcd_display, an instance of the LcdDisplay class
@param redis_dict, a dict of values from redis client to init variables on this screen
@param screen_actions, an instance of ScreenActions class
"""
actions = [
{
"redis key": "robot id",
"value": redis_dict["robot id"],
"type": float,
"delta": 1,
"screen action": screen_actions.EDIT_SCREEN,
},
{
"redis key": "channel id",
"value": redis_dict["channel id"],
"type": float,
"delta": 1,
"screen action": screen_actions.EDIT_SCREEN,
},
{
"redis key": None,
"value": "Menu",
"type": str,
"delta": None,
"screen action": screen_actions.CHANGE_SCREEN,
},
]
self.battery_voltage = redis_dict["battery voltage"]
self.cap_voltage = redis_dict["cap voltage"]
self.packet_loss = redis_dict["packet loss"]
def draw_screen():
""" Home Screen Layout """
def setup_robot_id(x, y):
""" Draw the robot id setting on the screen """
robot_id_str = "Robot ID"
self.lcd_display.draw.text(
(x, y), robot_id_str, font=self.font, fill=constants.WHITE
)
channel_id_x = (
x
+ self.font.getsize(robot_id_str)[0]
+ cursor_size
+ constants.PADDING
)
x = self.font.getsize(robot_id_str)[0] / 2
y += self.font_size
self.lcd_display.draw.text(
(x, y),
str(int(self.actions[ROBOT_ID_INDEX]["value"])),
font=self.big_font,
fill=constants.YELLOW,
)
return channel_id_x
def setup_channel_id(x, y):
""" Draw the channel id setting on the screen """
channel_id_str = "Channel ID"
self.lcd_display.draw.text(
(x, y), channel_id_str, font=self.font, fill=constants.WHITE
)
y += self.font.getsize(channel_id_str)[1]
x += self.font.getsize(channel_id_str)[0] / 2
self.lcd_display.draw.text(
(x, y),
str(int(self.actions[CHANNEL_ID_INDEX]["value"])),
font=self.big_font,
fill=constants.YELLOW,
)
def setup_battery_voltage(x, y):
""" Draw the battery voltage diagnostic on the screen """
battery_str = "Battery Voltage: "
x = cursor_size
y = BATTERY_VOLTAGE_BASE
self.lcd_display.draw.text(
(x, y), battery_str, font=self.font, fill=constants.WHITE
)
x += (self.font.getsize(battery_str))[0]
self.lcd_display.draw.text(
(x, y),
str(round(self.battery_voltage, 1)),
font=self.font,
fill=constants.YELLOW,
)
return y + self.font_size
def setup_capacitor_voltage(x, y):
""" Draw the capacitor voltage diagnostic on the screen """
cap_str = "Capacitor Voltage: "
x = cursor_size
self.lcd_display.draw.text(
(x, y), cap_str, font=self.font, fill=constants.WHITE
)
x += (self.font.getsize(cap_str))[0]
self.lcd_display.draw.text(
(x, y),
str(round(self.cap_voltage, 1)),
font=self.font,
fill=constants.YELLOW,
)
return y + self.font_size
def setup_packet_loss(x, y):
""" Draw the packet loss diagnostic on the screen """
packet_loss_str = "Packet Loss %: "
x = cursor_size
self.lcd_display.draw.text(
(x, y), packet_loss_str, font=self.font, fill=constants.WHITE
)
x += (self.font.getsize(packet_loss_str))[0]
self.lcd_display.draw.text(
(x, y),
str(round(self.packet_loss, 1)),
font=self.font,
fill=constants.YELLOW,
)
self.lcd_display.prepare()
cursor_size = self.font.getsize(constants.CURSOR)[0]
x = cursor_size
y = constants.BASE_Y
x = setup_robot_id(x, y)
setup_channel_id(x, y)
y = setup_battery_voltage(x, y)
y = setup_capacitor_voltage(x, y)
setup_packet_loss(x, y)
# draw the back screen
self.lcd_display.draw.text(
(
cursor_size,
self.lcd_display.height - constants.PADDING - self.font_size,
),
"Go to Menu screen",
font=self.font,
fill=constants.WHITE,
)
# draw the cursor
x = constants.BASE_X
y = constants.BASE_Y
if self.curr_action == 1:
x = self.font.getsize("Robot ID")[0] + cursor_size + constants.PADDING
elif self.curr_action == 2:
y = self.lcd_display.height - constants.PADDING - self.font_size
self.lcd_display.draw.text(
(x, y), constants.CURSOR, font=self.font, fill=constants.WHITE
)
# Pass Home Screen parameters to super class
super().__init__(lcd_display, screen_actions, actions, draw_screen)
def update_values(self, redis_dict):
""" Sync values with those from redis """
if not self.edit_mode:
for i in range(self.len):
if self.actions[i]["redis key"] == None:
continue
self.actions[i]["value"] = redis_dict[self.actions[i]["redis key"]]
self.battery_voltage = redis_dict["battery voltage"]
self.cap_voltage = redis_dict["cap voltage"]
self.packet_loss = redis_dict["packet loss"]
# For testing
if __name__ == "__main__":
import sys
sys.path.append("../")
from lcd_user_interface import LcdDisplay
home_screen = HomeScreen(LcdDisplay())
home_screen.on_clockwise_rotate()
```
#### File: display/screens/screen.py
```python
from PIL import ImageFont
import software.jetson_nano.display.constants as constants
from software.jetson_nano.display.utils import get_ip_address, get_signal_strength
class Screen:
"""
All new screens will inherit from this class. This class will handles editing variables and maintaining
the current action the cursor is hovering. It also accepts the list of actions so that it can display
them on the screen.
"""
def __init__(self, lcd_display, screen_actions, actions, draw_screen=None):
"""
@param lcd_display, an instance of the LcdDisplay class
@param screen_actions, an instance of ScreenActions class
@param draw_screen, a callback function to re-render screen on lcd display
@param actions, a list of dictionaries for the interactive elements on the screen
"""
self.actions = actions
self.edit_mode = False
self.action = None
self.screen_actions = screen_actions
self.draw_screen = draw_screen
# Maintain current action
self.len = len(self.actions)
self.curr_action = 0
self.lcd_display = lcd_display
self.font_size = 12
self.font = ImageFont.truetype("DejaVuSans.ttf", self.font_size)
self.big_font_size = 22
self.big_font = ImageFont.truetype("DejaVuSans.ttf", self.big_font_size)
def draw_header(self):
""" Draw the display header """
self.lcd_display.draw.rectangle(
(0, 0, self.lcd_display.width, self.font_size + 6),
outline=constants.BLACK,
fill=constants.WHITE,
)
# Get IP address
IP = " IP: " + get_ip_address()
self.lcd_display.draw.text((0, 2), IP, font=self.font, fill=constants.BLACK)
# Get signal strength
signal_strength = " " + get_signal_strength()
self.lcd_display.draw.text(
(self.lcd_display.width - self.font.getsize(signal_strength)[0], 2),
signal_strength,
font=self.font,
fill=constants.BLACK,
)
self.lcd_display.show()
def update_screen(self):
""" Draw the updated screen """
if self.draw_screen != None:
self.draw_screen()
else:
self.draw_actions()
self.lcd_display.show()
self.draw_header()
def on_click(self):
""" Execute the current action """
action = self.actions[self.curr_action]
# For editing settings
if self.screen_actions.EDIT_SCREEN == action["screen action"]:
self.action = action
if not self.edit_mode:
action = {"screen action": self.screen_actions.NONE}
else:
action = {
"redis key": self.action["redis key"],
"value": self.action["value"],
"screen action": self.screen_actions.UPDATE_REDIS,
}
self.edit_mode = not self.edit_mode
elif action["type"] == bool:
action["value"] = 0 if action["value"] else 1
self.update_screen()
elif self.screen_actions.CHANGE_SCREEN == action["screen action"]:
self.curr_action = 0
return action
def on_clockwise_rotate(self):
""" Update current action and update screen """
if not self.edit_mode:
self.curr_action = (self.curr_action + 1) % self.len
else:
self.inc_val()
self.update_screen()
def on_counterclockwise_rotate(self):
""" Update current action and update screen """
if not self.edit_mode:
self.curr_action = (self.curr_action - 1) % self.len
else:
self.dec_val()
self.update_screen()
def inc_val(self):
""" Increment self.param by self.delta"""
self.action["value"] += self.action["delta"]
def dec_val(self):
""" Decrement self.param by self.delta """
self.action["value"] -= self.action["delta"]
def draw_actions(self):
""" Draws our list of actions """
self.lcd_display.prepare()
cursor = constants.CURSOR
cursor_size = self.font.getsize(cursor)[0]
cursor_pos_x = constants.BASE_X
if self.curr_action != len(self.actions) - 1:
cursor_pos_y = constants.BASE_Y + self.font_size * self.curr_action
else:
cursor_pos_y = self.lcd_display.height - self.font_size - constants.PADDING
self.lcd_display.draw.text(
(cursor_pos_x, cursor_pos_y), cursor, font=self.font, fill=constants.WHITE
)
x = cursor_size
y = constants.BASE_Y
for action in self.actions:
if (
action == self.actions[-1]
): # last action should take you to previous screen
y = self.lcd_display.height - self.font_size - constants.PADDING
self.lcd_display.draw.text(
(x, y), action["display string"], font=self.font, fill=constants.WHITE
)
# Display the value for an action; no value when changing screens
if action["screen action"] != self.screen_actions.CHANGE_SCREEN:
x += self.font.getsize(action["display string"])[0]
if action["type"] == bool:
en = True if action["value"] else False
self.lcd_display.draw.text(
(x, y),
"{}".format(en),
font=self.font,
fill=constants.GREEN if en else constants.RED,
)
else:
self.lcd_display.draw.text(
(x, y),
str(round(action["value"], 1)),
font=self.font,
fill=constants.YELLOW,
)
x = cursor_size
y += self.font_size
def update_values(self, redis_dict):
""" Sync values with those from redis """
if not self.edit_mode:
for i in range(self.len):
if self.actions[i]["redis key"] == None:
continue
if self.actions[i]["type"] == bool:
self.actions[i]["value"] = (
1 if redis_dict[self.actions[i]["redis key"]] else 0
)
else:
self.actions[i]["value"] = redis_dict[self.actions[i]["redis key"]]
```
#### File: software/simulated_tests/excessive_dribbling.py
```python
import software.python_bindings as tbots
from proto.import_all_protos import *
from software.simulated_tests.validation import (
Validation,
create_validation_geometry,
create_validation_types,
)
class ExcessivelyDribbling(Validation):
"""Checks if any friendly robot is excessively dribbling the ball, i.e. for over 1m."""
def __init__(self):
self.continous_dribbling_start_point = None
def get_validation_status(self, world) -> ValidationStatus:
"""Checks if any friendly robot is excessively dribbling the ball, i.e. for over 1m.
:param world: The world msg to validate
:returns: FAILING when the robot is excessively dribbling
PASSING when the robot is not excessively dribbling
"""
ball_position = tbots.createPoint(world.ball.current_state.global_position)
for robot in world.friendly_team.team_robots:
if not tbots.Robot(robot).isNearDribbler(ball_position, 0.01):
# if ball is not near dribbler then de-activate this validation
self.continous_dribbling_start_point = None
elif (
ball_position - (self.continous_dribbling_start_point or ball_position)
).length() > 1.0:
return ValidationStatus.FAILING
elif self.continous_dribbling_start_point is None:
# ball is in dribbler, but previously wasn't in dribbler, so set continuous dribbling start point
self.continous_dribbling_start_point = ball_position
return ValidationStatus.PASSING
def get_validation_geometry(self, world) -> ValidationGeometry:
"""
(override) Shows the max allowed dribbling circle
"""
return create_validation_geometry(
[tbots.Circle(self.continous_dribbling_start_point, 1.0)]
if self.continous_dribbling_start_point is not None
else []
)
def __repr__(self):
return "Check that the dribbling robot has not dribbled for more than 1m"
(
EventuallyStopExcessivelyDribbling,
EventuallyStartsExcessivelyDribbling,
NeverExcessivelyDribbles,
AlwaysExcessivelyDribbles,
) = create_validation_types(ExcessivelyDribbling)
```
#### File: software/simulated_tests/full_system.py
```python
from subprocess import Popen
from proto.import_all_protos import *
from software.networking.threaded_unix_listener import ThreadedUnixListener
from software.networking.threaded_unix_sender import ThreadedUnixSender
from software.py_constants import *
class FullSystem(object):
def __init__(self, runtime_dir="/tmp/tbots"):
"""Runs our standalone er-force simulator binary and sets up the unix
sockets to communicate with it
:param runtime_dir: The runtime directory
"""
# inputs to full_system
self.robot_status_sender = ThreadedUnixSender(runtime_dir + ROBOT_STATUS_PATH)
self.ssl_wrapper_sender = ThreadedUnixSender(runtime_dir + SSL_WRAPPER_PATH)
self.ssl_referee_sender = ThreadedUnixSender(runtime_dir + SSL_REFEREE_PATH)
self.sensor_proto_sender = ThreadedUnixSender(runtime_dir + SENSOR_PROTO_PATH)
# outputs from full_system
self.world_listener = ThreadedUnixListener(runtime_dir + WORLD_PATH, World)
self.primitive_listener = ThreadedUnixListener(
runtime_dir + PRIMITIVE_PATH, PrimitiveSet
)
# override the tactic
self.tactic_override = ThreadedUnixSender(runtime_dir + TACTIC_OVERRIDE_PATH)
# TODO (#2510) rename to full_system
self.full_system_process = Popen(["software/unix_full_system"])
def send_robot_status(self, robot_status):
"""Send the robot_status to full_system
:param robot_status: The RobotStatus to send
"""
self.robot_status_sender.send(robot_status)
def send_ssl_wrapper(self, ssl_wrapper_packet):
"""Send the ssl_wrapper_packet to full_system
:param ssl_wrapper_packet: The packet to send
"""
self.ssl_wrapper_sender.send(ssl_wrapper_packet)
def send_ssl_referee(self, ssl_referee_packet):
"""Send the ssl_referee_packet to full_system
:param ssl_referee_packet: The packet to send
"""
self.ssl_referee_sender.send(ssl_referee_packet)
def send_sensor_proto(self, sensor_proto):
"""Send a sensor msg to full system.
:param sensor_proto: The sensor msg to send
"""
self.sensor_proto_sender.send(sensor_proto)
def send_tactic_override(self, assigned_tactic_play_control_params):
"""Send the control params for the assigned tactic play to
run specific tactics on assigned robots.
:param assigned_tactic_play_control_params:
The control params of the AssignedTacticPlay
"""
self.tactic_override.send(assigned_tactic_play_control_params)
def get_world(self, block=False):
"""Grabs the world msg from the buffer if it exists, returns None
if buffer is empty.
:param block: Whether or not we should block until we receive a packet
:return: World or None
"""
return self.world_listener.get_most_recent_message(block)
def get_primitive_set(self):
"""Grabs the primitive msg set from the buffer if it exists, returns
None if buffer is empty.
:return: PrimitiveSet or None
"""
return self.primitive_listener.get_most_recent_message()
def stop():
"""Stop all listeners and senders.
"""
for unix_socket in [
self.robot_status_sender,
self.ssl_wrapper_sender,
self.ssl_referee_sender,
self.tactic_override,
self.sensor_proto_sender,
self.world_listener,
]:
unix_socket.force_stop()
self.primitive_listener.force_stop()
```
#### File: thunderscope/field/obstacle_layer.py
```python
import queue
import pyqtgraph as pg
from proto.geometry_pb2 import Circle, Polygon
from proto.visualization_pb2 import Obstacles
from pyqtgraph.Qt import QtCore, QtGui
from software.thunderscope.colors import Colors
import software.thunderscope.constants as constants
from software.networking.threaded_unix_listener import ThreadedUnixListener
from software.thunderscope.field.field_layer import FieldLayer
class ObstacleLayer(FieldLayer):
def __init__(self, buffer_size=10):
FieldLayer.__init__(self)
self.cached_obstacles = Obstacles()
self.obstacle_buffer = queue.Queue(buffer_size)
def paint(self, painter, option, widget):
"""Paint this layer
:param painter: The painter object to draw with
:param option: Style information (unused)
:param widget: The widget that we are painting on
"""
try:
obstacles = self.obstacle_buffer.get_nowait()
except queue.Empty as empty:
obstacles = self.cached_obstacles
self.cached_obstacles = obstacles
painter.setPen(pg.mkPen(Colors.NAVIGATOR_OBSTACLE_COLOR, width=2))
for polyobstacle in obstacles.polygon:
polygon_points = [
QtCore.QPoint(
constants.MM_PER_M * point.x_meters,
constants.MM_PER_M * point.y_meters,
)
for point in polyobstacle.points
]
poly = QtGui.QPolygon(polygon_points)
painter.drawPolygon(poly)
for circleobstacle in obstacles.circle:
painter.drawEllipse(
self.createCircle(
constants.MM_PER_M * circleobstacle.origin.x_meters,
constants.MM_PER_M * circleobstacle.origin.y_meters,
constants.MM_PER_M * circleobstacle.radius,
)
)
```
#### File: thunderscope/log/g3log_checkboxes.py
```python
from pyqtgraph.Qt.QtWidgets import QVBoxLayout, QWidget, QGridLayout, QCheckBox
class g3logCheckboxes(QWidget):
def __init__(self):
QWidget.__init__(self)
layout = QGridLayout()
self.setLayout(layout)
# Creates 4 checkboxes based on the 4 log types
self.debug_checkbox = QCheckBox("DEBUG")
self.debug_checkbox.setChecked(True)
layout.addWidget(self.debug_checkbox, 0, 0)
self.info_checkbox = QCheckBox("INFO")
self.info_checkbox.setChecked(True)
layout.addWidget(self.info_checkbox, 0, 1)
self.warning_checkbox = QCheckBox("WARNING")
self.warning_checkbox.setChecked(True)
layout.addWidget(self.warning_checkbox, 0, 2)
self.fatal_checkbox = QCheckBox("FATAL")
self.fatal_checkbox.setChecked(True)
layout.addWidget(self.fatal_checkbox, 0, 3)
self.setStyleSheet(
"color : white;"
"background-color: black;"
"selection-background-color: #606060;"
"selection-color: #ffffff;"
)
```
#### File: software/thunderscope/proto_receiver.py
```python
import queue
from threading import Thread
import software.thunderscope.constants as constants
from software.networking.threaded_unix_listener import ThreadedUnixListener
class ProtoReceiver:
def __init__(self):
self.proto_map = dict()
self.proto_receiver = ThreadedUnixListener(
constants.UNIX_SOCKET_BASE_PATH + "protobuf", max_buffer_size=3,
)
self.thread = Thread(target=self.start, daemon=True)
self.thread.start()
def start(self):
"""
Distributes protobuf from the proto_receiver to all widgets that consume that specific protobuf
"""
while True:
proto = self.proto_receiver.buffer.get()
if proto.DESCRIPTOR.full_name in self.proto_map:
for buffer in self.proto_map[proto.DESCRIPTOR.full_name]:
try:
buffer.put_nowait(proto)
except queue.Full:
pass
def register_observer(self, proto_type, buffer):
"""Register a widget to consume from a given protobuf class
param: proto_type: Class of protobuf to consume
param: buffer: buffer from the widget to register
"""
if proto_type in self.proto_map:
self.proto_map[proto_type.DESCRIPTOR.full_name].append(buffer)
else:
self.proto_map[proto_type.DESCRIPTOR.full_name] = [buffer]
``` |
{
"source": "Jon-LaFlamme/cribbage",
"score": 3
} |
#### File: Jon-LaFlamme/cribbage/games.py
```python
import hand
import players
import board
import deck
import verbose
class Cribbage():
def __init__(self, player_one, player_two):
self.player_one = player_one #players.Player() object
self.player_two = player_two
self.board = board.Classic(self.player_one, self.player_two)
self.deck = deck.Deck()
self.game_not_over = True
self.peg_count = 0
self.crib = []
self.turncard = None
def update_board(self):
if self.player_one.score >= 121:
self.player_one.score = 121
self.board.update_pegs()
self.end_sequence()
elif self.player_two.score >= 121:
self.player_two.score = 121
self.board.update_pegs()
self.end_sequence()
else:
self.board.update_pegs()
def determine_dealer_sequence(self):
verbose.start_game(self.player_one, self.player_two)
verbose.cut_for_deal()
#lower cut wins the deal
undetermined = True
while undetermined:
card_1 = self.player_one.cut_deck(self.deck, for_first_deal=True)
verbose.cuts_card(self.player_one, card_1)
card_2 = self.player_two.cut_deck(self.deck, for_first_deal=True)
verbose.cuts_card(self.player_two, card_2)
if card_1.rank < card_2.rank:
self.player_one.is_dealer = True
verbose.win_first_deal(self.player_one)
undetermined = False
elif card_2.rank < card_1.rank:
self.player_two.is_dealer = True
verbose.win_first_deal(self.player_two)
undetermined = False
else:
print('Tie! Return cards to deck and cut again for first deal.')
self.deck.deck.append(card_1)
self.deck.deck.append(card_2)
def deal_sequence(self):
verbose.continue_prompt()
if self.player_one.is_dealer:
verbose.dealing(self.player_one)
for i in range(6):
self.player_one.cards.append(self.deck.deal_one())
self.player_two.cards.append(self.deck.deal_one())
else:
verbose.dealing(self.player_two)
for i in range(6):
self.player_two.cards.append(self.deck.deal_one())
self.player_one.cards.append(self.deck.deal_one())
def discard_sequence(self):
p1_discards = self.player_one.discard()
verbose.discard(self.player_one)
p2_discards = self.player_two.discard()
verbose.discard(self.player_two)
p1_discards.extend(p2_discards)
self.crib = p1_discards
def turncard_sequence(self):
if self.player_one.is_dealer:
self.player_two.cut_deck(self.deck)
self.turncard = self.deck.deal_one()
verbose.turncard(self.player_two, self.player_one, self.turncard)
if self.turncard.rankname == 'jack':
self.player_one.score += 2
verbose.post_score(self.player_one, self.player_two)
verbose.heels(self.player_one, self.turncard)
self.update_board()
else:
self.player_one.cut_deck(self.deck)
self.turncard = self.deck.deal_one()
verbose.turncard(self.player_one, self.player_two, self.turncard)
if self.turncard.rankname == 'jack':
self.player_two.score += 2
verbose.post_score(self.player_one, self.player_two)
verbose.heels(self.player_two, self.turncard)
self.update_board()
def peg_sequence(self):
verbose.pegging()
verbose.continue_prompt()
#temporary copy to restore player.cards to original state after peg_sequence() is complete
hand1 = self.player_one.cards.copy()
hand2 = self.player_two.cards.copy()
if self.player_one.is_dealer:
is_p1_turn = False
else:
is_p1_turn = False
while self.game_not_over and (self.player_one.cards or self.player_two.cards):
self.peg_count = 0
stack = hand.Hand([])
while (self.game_not_over and self.peg_count < 31) and (self.player_one.can_peg(self.peg_count) or self.player_two.can_peg(self.peg_count)):
if is_p1_turn and self.player_one.can_peg(self.peg_count):
selected = self.player_one.peg_one(stack.hand, self.peg_count, self.turncard)
self.player_one.cards.remove(selected)
stack.hand.append(selected)
self.peg_count += selected.value
verbose.peg_one(self.player_one, selected, self.peg_count)
peg_points = stack.determine_peg_points(self.peg_count)
if peg_points > 0:
self.player_one.score += peg_points
verbose.peg_points(self.player_one, peg_points)
if self.player_one.score <= 121:
verbose.post_score(self.player_one, self.player_two)
self.update_board()
is_p1_turn = False
p1_played_last = True
if not self.game_not_over:
break
if self.player_two.can_peg(self.peg_count):
selected = self.player_two.peg_one(stack.hand, self.peg_count, self.turncard)
self.player_two.cards.remove(selected)
stack.hand.append(selected)
self.peg_count += selected.value
verbose.peg_one(self.player_two, selected, self.peg_count)
peg_points = stack.determine_peg_points(self.peg_count)
if peg_points > 0:
self.player_two.score += peg_points
verbose.peg_points(self.player_two, peg_points)
if self.player_two.score <= 121:
verbose.post_score(self.player_one, self.player_two)
self.update_board()
if not self.game_not_over:
break
p1_played_last = False
is_p1_turn = True
elif self.player_one.can_peg(self.peg_count):
is_p1_turn = True
if self.peg_count < 31:
if p1_played_last:
verbose.peg_go(self.player_one)
self.player_one.score += 1
verbose.post_score(self.player_one, self.player_two)
self.update_board()
else:
verbose.peg_go(self.player_two)
self.player_two.score += 1
verbose.post_score(self.player_one, self.player_two)
self.update_board()
if not self.game_not_over:
break
self.player_one.cards = hand1
self.player_two.cards = hand2
self.board.display_board()
def show_sequence(self):
verbose.counting()
h1 = hand.Hand(self.player_one.cards, turncard=self.turncard)
h2 = hand.Hand(self.player_two.cards, turncard=self.turncard)
cr = hand.Hand(self.crib, turncard=self.turncard, is_crib=True)
h1_pts = h1.compute_score()
h2_pts = h2.compute_score()
cr_pts = cr.compute_score()
verbose.continue_prompt()
if self.player_one.is_dealer:
self.player_two.score += h2_pts
verbose.show_hand(self.player_two, self.turncard, h2_pts)
verbose.post_score(self.player_one, self.player_two)
self.update_board()
self.board.display_board()
if self.game_not_over:
verbose.continue_prompt()
self.player_one.score += h1_pts
verbose.show_hand(self.player_one, self.turncard, h1_pts)
verbose.post_score(self.player_one, self.player_two)
self.update_board()
self.board.display_board()
if self.game_not_over:
verbose.continue_prompt()
self.player_one.score += cr_pts
verbose.show_hand(self.player_one, self.turncard, cr_pts, hand=self.crib)
verbose.post_score(self.player_one, self.player_two)
self.update_board()
self.board.display_board()
else:
self.player_one.score += h1_pts
verbose.show_hand(self.player_one, self.turncard, h1_pts)
verbose.post_score(self.player_one, self.player_two)
self.update_board()
self.board.display_board()
if self.game_not_over:
verbose.continue_prompt()
self.player_two.score += h2_pts
verbose.show_hand(self.player_two, self.turncard, h2_pts)
verbose.post_score(self.player_one, self.player_two)
self.update_board()
self.board.display_board()
if self.game_not_over:
verbose.continue_prompt()
self.player_two.score += cr_pts
verbose.show_hand(self.player_two, self.turncard, cr_pts, hand=self.crib)
verbose.post_score(self.player_one, self.player_two)
self.update_board()
self.board.display_board()
def cleanup(self):
verbose.new_round()
self.deck.deck.append(self.turncard)
self.deck.deck.extend(self.player_one.cards)
self.deck.deck.extend(self.player_two.cards)
self.deck.deck.extend(self.crib)
self.player_one.cards = []
self.player_two.cards = []
self.crib = []
if len(self.deck.deck) != 52:
self.deck = deck.Deck()
self.deck.shuffle()
if self.player_one.is_dealer:
self.player_one.is_dealer = False
self.player_two.is_dealer = True
else:
self.player_one.is_dealer = True
self.player_two.is_dealer = False
def end_sequence(self):
# MATCH_TEMPLATE = {'win': 0, 'was_skunked': 0, 'was_dbl_skunked': 0, 'skunked_opponent': 0, 'dbl_skunked_oppenent': 0}
if self.player_one.score >= 121:
print(f'{self.player_one.name} wins!')
if isinstance(self.player_one, players.Human):
self.player_one.user.match_stats['win'] += 1
if self.player_two.score < 61:
self.player_one.user.match_stats['dbl_skunked_opponent'] += 1
self.player_two.user.match_stats['was_dbl_skunked'] += 1
elif self.player_two.score < 91:
self.player_one.user.match_stats['skunked_opponent'] += 1
self.player_two.user.maatch_stats['was_skunked'] += 1
else:
print(f'{self.player_two.name} wins!')
if isinstance(self.player_two, players.Human):
self.player_two.user.match_stats['win'] += 1
if self.player_one.score < 61:
self.player_two.user.match_stats['dbl_skunked_opponent'] += 1
self.player_one.user.match_stats['was_dbl_skunked'] += 1
elif self.player_one.score < 91:
self.player_two.user.match_stats['skunked_opponent'] += 1
self.player_one.user.match_stats['was_skunked'] += 1
#update user stats
if isinstance(self.player_one, players.Human):
self.player_one.user.update_profile()
self.player_two.user.save_profile()
if isinstance(self.player_two, players.Human):
self.player_two.user.update_profile()
self.player_one.user.save_profile()
#end the game
self.game_not_over = False
def game_driver(self):
#TODO(Jon) Put everything together in a while loop
self.deck.shuffle()
self.determine_dealer_sequence()
while self.game_not_over:
self.deal_sequence()
self.discard_sequence()
self.turncard_sequence()
if self.game_not_over:
self.peg_sequence()
if self.game_not_over:
self.show_sequence()
if self.game_not_over:
self.cleanup()
class Ultimate(Cribbage):
def __init__(self, board):
super().__init__()
class Classic(Cribbage):
def __init__(self):
super().__init__()
```
#### File: Jon-LaFlamme/cribbage/test_board.py
```python
import board, players, users
def test_Classic_constructor():
print('\n--------- Test Board constructor -------------\n')
u1 = users.User(username='test_user1', email='<EMAIL>')
u2 = users.User(username='test_user2', email='<EMAIL>')
p1 = players.Human(name='Jon', user=u1, lane=1)
#p2 = players.Human(name='Rick', user=u2, lane=2)
p2 = players.Computer(difficulty='hard', lane=2)
b = board.Classic(player_one=p1, player_two=p2)
print(f'- Player 1 name: {b.player_one.name}')
print(f'- Player 2 name: {b.player_two.name}\n')
b.display_board()
def test_update_pegs():
print('\n--------- Test Update Pegs -------------\n')
u1 = users.User(username='test_user1', email='<EMAIL>')
u2 = users.User(username='test_user2', email='<EMAIL>')
p1 = players.Human(name='Jon', user=u1, lane=1)
p2 = players.Human(name='Rick', user=u2, lane=2)
b = board.Classic(player_one=p1, player_two=p2)
p1.score = 15
b.update_pegs()
p2.score = 25
b.update_pegs()
b.display_board()
p1.score = 35
b.update_pegs()
p2.score = 40
b.update_pegs()
b.display_board()
print(f'Player one: {b.player_one.score}')
print(f'Player two: {b.player_two.score}')
print(f'lane1 lead: {b.lane1_lead_peg}')
print(f'lane1 hind: {b.lane1_hind_peg}')
print(f'lane2 lead: {b.lane2_lead_peg}')
print(f'lane2 hind: {b.lane2_hind_peg}')
def test_display_board():
print('\n--------- Test Update Pegs -------------\n')
u1 = users.User(username='test_user1', email='<EMAIL>')
u2 = users.User(username='test_user2', email='<EMAIL>')
p1 = players.Human(name='Jon', user=u1, lane=1)
p2 = players.Human(name='Rick', user=u2, lane=2)
b = board.Classic(player_one=p1, player_two=p2)
b.display_board()
if __name__ == "__main__":
#test_Classic_constructor() #Test1: Human vs Human, Test2: Human vs Computer, PASSED 12/22/20
#test_update_pegs() #Test1: update scores and pegs and check results on board for lead peg and hind peg, PASSED 12/22/20
#test_display_board() #PASSED 12/22/20
```
#### File: Jon-LaFlamme/cribbage/users.py
```python
import os
import json
MATCH_TEMPLATE = {'win': 0, 'was_skunked': 0, 'was_dbl_skunked': 0, 'skunked_opponent': 0, 'dbl_skunked_opponent': 0}
DIFFICULTY_MAP = {'beginner': 1, 'intermediate': 2, 'expert': 3}
GAME_MODES = {'vs_humans','computer_easy','computer_med','computer_hard'}
BADGES = {'win_streak_3','hand_of_eight','hand_of_twelve','hand_of_sixteen','hand_of_twenty',
'hand_of_twenty-four','hand_of_twenty-eight','hand_of_twenty-nine','peg_five',
'peg_eight','peg_twelve','three_skunks','three_dbl_skunks','rank_status'}
PROFILE_TEMPLATE = {'email': 'none',
'rank': 0,
'credits': 0,
'badges': {'win_streak_3': 0, #0: not achieved, #1 achieved on easy, #2 achieved on medium, #3 achieved on hard
'hand_of_eight': 0,
'hand_of_twelve': 0,
'hand_of_sixteen': 0,
'hand_of_twenty': 0,
'hand_of_twenty-four': 0,
'hand_of_twenty-eight': 0,
'hand_of_twenty-nine': 0,
'peg_five': 0,
'peg_eight': 0,
'peg_twelve': 0,
'three_skunks': 0,
'three_dbl_skunks': 0,
'rank_status': 0,}, #0: beginner, #1: intermediate, #2: advanced, #3: elite
'unlocked_boards': {'classic_1': 0,'ultimate_1': 0}, #0: not won, #1 won on easy, #2 won on medium, #3 won on hard
'vs_humans': {'skunks':0,'skunked':0,'dbl_skunks':0,'dbl_skunked':0,'wins':0,'losses':0},
'computer_beginner': {'skunks':0,'skunked':0,'dbl_skunks':0,'dbl_skunked':0,'wins':0,'losses':0},
'computer_intermediate': {'skunks':0,'skunked':0,'dbl_skunks':0,'dbl_skunked':0,'wins':0,'losses':0},
'computer_expert': {'skunks':0,'skunked':0,'dbl_skunks':0,'dbl_skunked':0,'wins':0,'losses':0}
}
#returns the new or existing user after successfull sign-in or new profile created
def sign_in():
invalid = True
while invalid:
print('\n ======== User Sign-In ======== \n')
print('1) Sign-in to an existing account.')
print('2) Create a new account.')
selection = int(input('Make a selection: '))
if selection == 1:
while invalid:
username = input('\nEnter your username: ').lower()
email = input('Enter your email: ').lower()
feedback = lookup_user(username=username, email=email)
if feedback == 'fna':
print('email does not match username.')
option = input('Enter 0 to return to menu. Any other key to try again:')
if option == 0:
break
if feedback == 'fa':
u = User(username=username, email=email)
print('Loading profile.')
return u
elif selection == 2:
while invalid:
username = input('\nCreate a username: ').lower()
email = input('Enter your email: ').lower()
feedback = lookup_user(username=username, email=email)
if feedback == 'nf':
add_user(username=username, email=email)
u = User(username=username, email=email)
print('User profile created.')
return u
else:
print(f'username: {username} is already claimed. Please try again.')
else:
print('Invalid selection. Please try again.')
#Found but not authenticated: 'fna', Found and authenticated: 'fa', Not found: 'nf'
def lookup_user(username=None, email=None):
with open('user_directory.json','r') as f:
user_directory = json.load(f)
if username in user_directory:
if user_directory[username]['email'] == email:
return 'fa'
else:
return 'fna'
else:
return 'nf'
def add_user(username=None, email=None):
with open('user_directory.json','r') as f:
user_directory = json.load(f)
user_directory[username] = {'email': email, 'rank': 0}
with open('user_directory.json', 'w') as f:
json.dump(user_directory, f)
class User():
def __init__(self, username=None, email=None):
self.name = username
self.match_stats = MATCH_TEMPLATE
if os.path.exists(f'{self.name}.json'):
with open(f'{self.name}.json','r') as f:
self.profile = json.load(f)
else:
self.profile = {username: PROFILE_TEMPLATE}
self.profile[username]['email'] = email
with open(f'{self.name}.json', 'w') as f:
json.dump(self.profile, f)
def add_badge(self, badge=None, difficulty=None):
if badge in BADGES and difficulty in DIFFICULTY_MAP:
self.profile[self.name]['badges'][badge] = DIFFICULTY_MAP[difficulty]
def new_credits_calculator(self):
#TODO(Jon) Create function that calculates the new credits awarded a user after achieving various tasks. Done once per game at end.
#Requires a credits dictionary mapping credit value for various achievements
#In-app purchases can also purchase credits
pass
def add_credits(self, credits=None):
self.profile[self.name]['credits'] += credits
def update_unlocked_boards(self, board=None, difficulty=None):
if difficulty in DIFFICULTY_MAP:
value = DIFFICULTY_MAP[difficulty]
if board in self.profile[self.name]['unlocked_boards']:
#Only overwrite old scores if achieved at a greater difficulty level
if value > self.profile[self.name]['unlocked_boards'][board]:
self.profile[self.name]['unlocked_boards'][board] = value
else: self.profile[self.name]['unlocked_boards'][board] = value
def compute_new_rank(self):
rank = self.profile[self.name]['rank']
outcome = 0
penalty = 0
bonus = 0
if rank < 1000:
weighted_gain = 100
weighted_loss = 50
elif rank < 2000:
weighted_gain = 75
weighted_loss = 50
elif rank < 3000:
weighted_gain = 50
weighted_loss = 50
else:
weighted_gain = 25
weighted_loss = 50
if self.match_stats['win'] == 1:
outcome += weighted_gain
else:
outcome -= weighted_loss
if self.match_stats['was_skunked'] == 1:
penalty = 50
elif self.match_stats['was_dbl_skunked'] == 1:
penalty = 100
elif self.match_stats['skunked_opponent'] == 1:
bonus = 50
elif self.match_stats['dbl_skunked_opponent'] == 1:
bonus = 100
return rank + outcome + bonus - penalty
def update_profile(self, game_mode=None):
if game_mode in GAME_MODES:
#stats to update: {'skunks':0,'skunked':0,'dbl_skunks':0,'dbl_skunked':0,'wins':0,'losses':0}
self.profile[self.name][game_mode]['skunks'] += self.match_stats['skunked_opponent']
self.profile[self.name][game_mode]['skunked'] += self.match_stats['was_skunked']
self.profile[self.name][game_mode]['dbl_skunks'] += self.match_stats['dbl_skunked_opponent']
self.profile[self.name][game_mode]['dbl_skunked'] += self.match_stats['was_dbl_skunked']
if self.match_stats['win'] == 1:
self.profile[self.name][game_mode]['wins'] += 1
else:
self.profile[self.name][game_mode]['losses'] += 1
self.profile[self.name]['rank'] = self.compute_new_rank()
def save_updated_profile(self):
with open(f'{self.name}.json', 'w') as f:
json.dump(self.profile, f)
def display_stats(self):
rank = self.profile[self.name]['rank']
credits = self.profile[self.name]['credits']
badges = self.profile[self.name]['badges']
boards = self.profile[self.name]['unlocked_boards']
wins = self.profile[self.name]['vs_humans']['wins']
losses = self.profile[self.name]['vs_humans']['wins']
skunks = self.profile[self.name]['vs_humans']['skunks']
skunked = self.profile[self.name]['vs_humans']['skunked']
dbl_skunks = self.profile[self.name]['vs_humans']['dbl_skunks']
dbl_skunked = self.profile[self.name]['vs_humans']['dbl_skunked']
easy = self.profile[self.name]['computer_easy']
medium = self.profile[self.name]['computer_med']
hard = self.profile[self.name]['computer_hard']
print(f'======== Player stats for {self.name} ========\n')
print(f'Rank: {rank}')
print(f'Credits: {credits}')
print(f'Badges: {badges}')
print(f'Boards unlocked: {boards}')
print('============================================== \n')
print(' Versus Humans \n')
print(f'WINS: {wins}')
print(f'LOSSES: {losses}')
print(f'SKUNKS: {skunks}')
print(f'SKUNKED: {skunked}')
print(f'DOUBLE SKUNKS: {dbl_skunks}')
print(f'DOUBLE SKUNKED: {dbl_skunked}')
print('============================================== \n')
print(' Versus Computer \n')
print(f'BEGINNER: {easy}')
print(f'INTERMEDIATE: {medium}')
print(f'EXPERT: {hard}')
``` |
{
"source": "jonlatorre/MatriculaEIDE",
"score": 2
} |
#### File: MatriculaEIDE/cambridge/forms.py
```python
from django import forms
from django.forms import ModelForm, DateField
from models import *
from django.forms.models import inlineformset_factory
#~ from django.forms.extras.widgets import SelectDateWidget
from bootstrap3_datetime.widgets import DateTimePicker
from localflavor.es.forms import *
from django.contrib.admin import widgets
from django.utils.translation import gettext_lazy as _
from datetime import date
class ExamForm(ModelForm):
class Meta:
model = Exam
fields = '__all__'
def __init__(self, *args, **kwargs):
super(ModelForm, self).__init__(*args, **kwargs)
#self.fields['exam_date'].widget = DateTimePicker(options={"format": "DD-MM-YYYY", "pickTime": False})
#self.fields['registration_end_date'].wifget = DateTimePicker(options={"format": "DD-MM-YYYY", "pickTime": False})
#self.fields['exam_date'].input_formats = ['%Y-%m-%d']
#self.fields['registration_end_date'].input_formats = ['%Y-%m-%d']
self.fields['exam_date'].widget = forms.widgets.DateInput(format='%Y-%m-%d')
self.fields['registration_end_date'].widget = forms.widgets.DateInput(format='%Y-%m-%d')
class SchoolExamForm(ModelForm):
class Meta:
model = SchoolExam
fields = '__all__'
def __init__(self, school_name, *args, **kwargs):
super(ModelForm, self).__init__(*args, **kwargs)
self.fields['exam_date'].widget.format = '%d-%m-%Y'
self.fields['registration_end_date'].widget.format = '%d-%m-%Y'
self.fields['exam_date'].input_formats = ['%d-%m-%Y']
self.fields['registration_end_date'].input_formats = ['%d-%m-%Y']
self.school_name = school_name
#Limitamos los examenes a los de la escuela
school = School.objects.get(name=school_name)
self.fields['level'].queryset = SchoolLevel.objects.filter(school=school)
class RegistrationForm(ModelForm):
telephone = ESPhoneNumberField(label=_("Teléfono"))
#dni = ESIdentityCardNumberField()
postal_code = ESPostalCodeField(label=_("Código Postal"))
birth_date = DateField(label="Fecha Nac. (DD-MM-AAAA)", input_formats=['%d-%m-%Y'])
class Meta:
model = Registration
#~ exclude = ('paid')
fields = ['exam','minor','tutor_name','tutor_surname','name','surname','address','location','postal_code','sex','birth_date','telephone','email','eide_alumn','centre_name']
widgets = {
'birth_date' : DateTimePicker(options={"format": "DD-MM-YYYY", "pickTime": False}),
}
def __init__(self, *args, **kwargs):
try:
exam_id = kwargs.pop('exam_id')
except:
exam_id = None
super(ModelForm, self).__init__(*args, **kwargs)
if exam_id == None:
print "No tenemos examen fijado"
self.fields['exam'].queryset = Exam.objects.filter(registration_end_date__gte=date.today(),schoolexam__isnull=True,venueexam__isnull=True).exclude(exam_type=5)
else:
self.fields['exam'].queryset = Exam.objects.filter(id=exam_id)
class LinguaskillRegistrationForm(ModelForm):
telephone = ESPhoneNumberField(label=_("Teléfono"))
#dni = ESIdentityCardNumberField()
birth_date = DateField(label="Fecha Nac. (DD-MM-AAAA)", input_formats=['%d-%m-%Y'])
proposed_date = DateField(label="Fecha Examen (DD-MM-AAAA)", input_formats=['%d-%m-%Y'])
class Meta:
model = LinguaskillRegistration
#~ exclude = ('paid')
fields = ['name','surname','birth_date','address','location','telephone','email','proposed_date']
widgets = {
'birth_date' : DateTimePicker(options={"format": "DD-MM-YYYY", "pickTime": False}),
'proposed_date' : DateTimePicker(options={"format": "DD-MM-YYYY", "pickTime": False}),
}
def __init__(self, *args, **kwargs):
super(ModelForm, self).__init__(*args, **kwargs)
self.fields['exam'].queryset = Exam.objects.filter(exam_type=5)
class SchoolRegistrationForm(ModelForm):
telephone = ESPhoneNumberField(label=_("Teléfono"))
#dni = ESIdentityCardNumberField()
postal_code = ESPostalCodeField(label=_("Código Postal"))
birth_date = DateField(label="Fecha Nac. (DD-MM-AAAA)", input_formats=['%d-%m-%Y'])
class Meta:
model = Registration
#~ exclude = 'paid','minor','eide_alumn','centre_name')
fields = ['exam','tutor_name','tutor_surname','name','surname','address','location','postal_code','sex','birth_date','telephone','email']
widgets = {
'birth_date' : DateTimePicker(options={"format": "DD-MM-YYYY", "pickTime": False}),
}
def __init__(self, school_name, *args, **kwargs):
super(SchoolRegistrationForm, self).__init__(*args, **kwargs)
self.school_name = school_name
#Limitamos los examenes a los de la escuela
school = School.objects.get(name=school_name)
#~ print school
self.fields['exam'].queryset = SchoolExam.objects.filter(school=school,registration_end_date__gte=datetime.date.today())
#~ self.fields['minor'].initial = True
#~ self.fields['eide_alumn'].initial = False
#~ self.fields['birth_date'].widget.format = '%d-%m-%Y'
# at the same time, set the input format on the date field like you want it:
#~ self.fields['birth_date'].input_formats = ['%d-%m-%Y']
class RegistrationEditForm(ModelForm):
telephone = ESPhoneNumberField(label=_("Teléfono"))
#dni = ESIdentityCardNumberField()
postal_code = ESPostalCodeField(label=_("Código Postal"))
class Meta:
model = Registration
fields = '__all__'
def __init__(self, *args, **kwargs):
super(ModelForm, self).__init__(*args, **kwargs)
self.fields['birth_date'].widget.format = '%d-%m-%Y'
# at the same time, set the input format on the date field like you want it:
self.fields['birth_date'].input_formats = ['%d-%m-%Y']
class VenueExamForm(ModelForm):
class Meta:
model = VenueExam
fields = '__all__'
widgets = {
'birth_date' : DateTimePicker(options={"format": "DD-MM-YYYY", "pickTime": False}),
}
def __init__(self, *args, **kwargs):
super(ModelForm, self).__init__(*args, **kwargs)
self.fields['exam_date'].widget.format = '%d-%m-%Y'
self.fields['registration_end_date'].widget.format = '%d-%m-%Y'
self.fields['exam_date'].input_formats = ['%d-%m-%Y']
self.fields['registration_end_date'].input_formats = ['%d-%m-%Y']
self.fields['level'].queryset = Level.objects.filter(schoollevel__isnull=True)
class VenueRegistrationForm(ModelForm):
telephone = ESPhoneNumberField(label=_("Teléfono"))
#dni = ESIdentityCardNumberField()
postal_code = ESPostalCodeField(label=_("Código Postal"))
class Meta:
model = Registration
#~ exclude = ('paid','minor','eide_alumn','centre_name','tutor_name','tutor_surname')
fields = ['exam','tutor_name','tutor_surname','name','surname','address','location','postal_code','sex','birth_date','telephone','email']
def __init__(self, venue_name, *args, **kwargs):
super(ModelForm, self).__init__(*args, **kwargs)
self.venue_name = venue_name
#Limitamos los examenes a los de la escuela
venue = Venue.objects.get(name=venue_name)
self.fields['exam'].queryset = VenueExam.objects.filter(venue=venue)
self.fields['birth_date'].widget.format = '%d-%m-%Y'
self.fields['birth_date'].input_formats = ['%d-%m-%Y']
```
#### File: MatriculaEIDE/hobetuz/forms.py
```python
from django import forms
from django.forms import ModelForm
from models import *
from django.forms.models import inlineformset_factory
from django.forms.extras.widgets import SelectDateWidget
from localflavor.es.forms import *
from django.contrib.admin import widgets
from django.utils.translation import gettext_lazy as _
class CursoForm(ModelForm):
class Meta:
fields = '__all__'
model = Curso
class RegistrationForm(ModelForm):
class Meta:
model = Registration
fields = '__all__'
telephone = ESPhoneNumberField(label=_("Teléfono Fijo (*)"))
telephone2 = ESPhoneNumberField(label=_("Teléfono Móvil (*)"))
#dni = ESIdentityCardNumberField()
#~ postal_code = ESPostalCodeField(label=_("Código Postal"))
#~ class Meta:
#~ model = Registration
#~ exclude = ('paid')
#~
#~ def __init__(self, *args, **kwargs):
#~ super(ModelForm, self).__init__(*args, **kwargs)
#~ self.fields['birth_date'].widget.format = '%d-%m-%Y'
#~
#~ # at the same time, set the input format on the date field like you want it:
#~ self.fields['birth_date'].input_formats = ['%d-%m-%Y']
class Registration2019Form(ModelForm):
class Meta:
model = Registration2019
fields = '__all__'
telephone = ESPhoneNumberField(label=_("Teléfono (*)"))
#curso = forms.MultipleChoiceField(choices=CURSOS_2019, widget=forms.CheckboxSelectMultiple())
#~ class RegistrationEditForm(ModelForm):
#~ telephone = ESPhoneNumberField(label=_("Teléfono"))
#~ #dni = ESIdentityCardNumberField()
#~ postal_code = ESPostalCodeField(label=_("Código Postal"))
#~ class Meta:
#~ model = Registration
#~ def __init__(self, *args, **kwargs):
#~ super(ModelForm, self).__init__(*args, **kwargs)
#~ self.fields['birth_date'].widget.format = '%d-%m-%Y'
#~
#~ # at the same time, set the input format on the date field like you want it:
#~ self.fields['birth_date'].input_formats = ['%d-%m-%Y']
```
#### File: MatriculaEIDE/inscripciones/models.py
```python
from django.db import models
from django.utils.translation import gettext_lazy as _
import datetime
from django.conf import settings
#Para la autogneración de passwd
from random import choice
from string import letters
#Para el envio del mail de confirmacion
from django.core.mail import EmailMultiAlternatives
from django.template.loader import render_to_string
# favour django-mailer but fall back to django.core.mail
if "mailer" in settings.INSTALLED_APPS:
from mailer import send_mail, mail_admins
else:
from django.core.mail import send_mail, mail_admins
SEXO = (
(1, _('Male')),
(2, _('Female')),
)
CURSO = (
(1, _('Inglés nivel A2')),
(2, _('Inglés nivel B2')),
(3, _('Inglés nivel C1')),
)
ENGLISH_LEVEL = (
(1, _('A1')),
(2, _('A2')),
(3, _('B1')),
(4, _('B2.1')),
(5, _('B2.2')),
(6, _('C1.1')),
(7, _('C1.2')),
(8, _('C2'))
)
class Registration(models.Model):
course = models.DecimalField('¿En que curso estás interesado?',max_digits=1, decimal_places=0,choices=CURSO)
password = models.CharField(_('Password'),max_length=6,blank=True,editable=False)
name = models.CharField(_('Nombre (*)'),max_length=50)
surname = models.CharField(_('Apellido(s) (*)'),max_length=100)
#~ address = models.CharField(_('Address'),max_length=100)
#~ location = models.CharField(_('Location'),max_length=100)
birth_date = models.DateField(_('Birth Date'),help_text=_('Formato: AAAA-MM-DD(año-mes-día)'))
telephone = models.CharField('Tel. Fijo (*)',max_length=12)
email = models.EmailField('Email (*)')
registration_date = models.DateField(default=datetime.date.today, auto_now_add=True)
accept_conditions = models.BooleanField(_('Accept the conditions'), help_text=_('You must accept the conditions to register'),default=True,blank=True)
paid = models.BooleanField(_('Paid'),default=False)
estudios_superiores = models.BooleanField('¿Tienes titulación de ESO o Superior?')
english_level = models.DecimalField('¿Cuál es tu nivel de Inglés?',max_digits=1, decimal_places=0,choices=ENGLISH_LEVEL)
english_qualification = models.CharField('¿Tienes titulación oficial de Inglés?¿Cual?',max_length=25)
def get_absolute_url(self):
return '/inscripciones/edit/%d/'%self.id
def __unicode__(self):
return u"%s-%s"%(self.id,self.email)
def registration_name(self):
return self.__unicode__()
def send_confirmation_email(self):
##Para el alumno
subject = "Has solicitado un curso en EIDE"
html_content = u"""
<html>
<head>
<link rel="stylesheet" href="https://matricula-eide.es/site_media/static/css/bootstrap.min.css">
<link rel="stylesheet" href="https://matricula-eide.es/site_media/static/css/extra.css">
</head>
<body>
<div class="well">
<p>Buenas</p>
Acaba de realizar una solicitud de curso para: <br />
%s <br>
<p>Pronto nos pondremos en contacto desde EIDE para formalizar la inscripción.</p>
<p>Gracias.</p>
</div>
<div class="well">
<p></p>
</div>
</body>
</html>
"""%(self.get_course_display())
message_body = html_content
##send_mail(subject, message_body, settings.DEFAULT_FROM_EMAIL, [self.email])
msg = EmailMultiAlternatives(subject, message_body, settings.DEFAULT_FROM_EMAIL, [self.email])
msg.attach_alternative(html_content, "text/html")
##msg.content_subtype = "html"
msg.send()
##Para el secretaria
subject = "[EIDE] Matricula curso"
payload = {'registration': self}
html_content = render_to_string('inscripciones/registration_detail.html', payload)
message_body = html_content
##send_mail(subject, message_body, settings.DEFAULT_FROM_EMAIL, [self.email])
msg = EmailMultiAlternatives(subject, message_body, settings.DEFAULT_FROM_EMAIL, ["<EMAIL>","<EMAIL>"])
msg.attach_alternative(html_content, "text/html")
##msg.content_subtype = "html"
msg.send()
#~ def Curso(models.model):
#~ name = models.CharField(_('Nombre (*)'),max_length=50)
#~ description = models.CharField(_('Descripción (*)'),max_length=500)
#~ price = models.DecimalField(max_digits=5, decimal_places=2, default=0)
#~ def __unicode__:
#~ return self.name
```
#### File: MatriculaEIDE/leveltests/models.py
```python
from __future__ import unicode_literals
from django.db import models
from django.utils.translation import gettext_lazy as _
import datetime
from django.conf import settings
from random import choice
from string import letters
#Para el envio del mail de confirmacion password = models.CharField(_('Password'),max_length=6,blank=True,editable=False)
from django.core.mail import EmailMultiAlternatives
from django.template.loader import render_to_string
# favour django-mailer but fall back to django.core.mail
if "mailer" in settings.INSTALLED_APPS:
from mailer import send_mail, mail_admins
else:
from django.core.mail import send_mail, mail_admins
SEXO = (
(1, _('Male')),
(2, _('Female')),
)
ENGLISH_LEVEL = (
(1, _('A1')),
(2, _('A2')),
(3, _('B1.1')),
(4, _('B1.2 (PET)')),
(5, _('B2.1')),
(6, _('B2.2 (FIRST)')),
(7, _('C1.1')),
(8, _('C1.2')),
(9, _('C2 (ADVANCED)'))
)
VENUES = (
(1, "<NAME>"),
(2, "Kabiezes")
)
HOURS = (
(1,"10-11"),
(2,"11-12"),
(3,"16-17"),
(4,"17-18"),
(5,"18-19"),
(5,"19-20"),
)
WEEKDAYS = (
(1,_("Monday")),
(2,_("Tuesday")),
(3,_("Wednesday")),
(4,_("Thursday")),
(5,_("Friday")),
)
AVAILABILITY = (
(1, (
(1,True),
(2,True),
(3,False),
(4,True),
(5,True),
)
),
(2, (
(1,False),
(2,False),
(3,False),
(4,True),
(5,False),
)
),
)
class Availability(models.Model):
venue = models.DecimalField(_('Venue'), max_digits=1, decimal_places=0, choices=VENUES)
hour = models.DecimalField(_('Hour'), max_digits=1, decimal_places=0, choices=HOURS)
weekday = models.DecimalField(_('Week Day'), max_digits=1, decimal_places=0, choices=WEEKDAYS)
class Reservation(models.Model):
create_date = models.DateField(auto_now_add=True)
password = models.CharField(_('Password'), max_length=6, blank=True, editable=False)
venue = models.DecimalField(_('Venue'), max_digits=1, decimal_places=0, choices=VENUES)
name = models.CharField(_('Nombre (*)'), max_length=50)
surname = models.CharField(_('Apellido(s) (*)'), max_length=100)
#~ address = models.CharField(_('Address'),max_length=100)
#~ location = models.CharField(_('Location'),max_length=100)
birth_date = models.DateField(_('Birth Date'), help_text=_('Formato: DD/MM/AAAA'))
telephone = models.CharField('Tel. Fijo (*)', max_length=12)
email = models.EmailField('Email (*)')
registration_date = models.DateField(default=datetime.date.today)
hour = models.DecimalField(_('Hour'), max_digits=1, decimal_places=0, choices=HOURS)
english_level = models.DecimalField(_('English Level'), max_digits=1, decimal_places=0, choices=ENGLISH_LEVEL,default=1)
last_english_certificate = models.BooleanField(u'Tienes titulación oficial de Cambridge English o EOI.',default=False)
last_english_certificate_description = models.CharField(u'Cual es la más alta obtenida', max_length=100,blank=True)
last_english_course = models.CharField(u'Último curso de inglés más avanzado realizado.', max_length=100,blank=True)
centre = models.CharField(_('Academia/colegio/instituto/universidad'), max_length=100,blank=True)
course = models.CharField(_('Nivel o curso escolar realizado'), max_length=100,blank=True)
accept_conditions = models.BooleanField(_('Accept the conditions'), help_text=_('You must accept the conditions to register'),default=True,blank=True)
def send_confirmation_email(self):
##Para el alumno
subject = "Has realizado una reserva de prueba de nivel en EIDE"
html_content = u"""
<div class="well">
Acaba de realizar una reserva de nivel en el centro %s de EIDE el día %s<br />
</div>
""" % (self.get_venue_display(), self.registration_date)
message_body = html_content
##send_mail(subject, message_body, settings.DEFAULT_FROM_EMAIL, [self.email])
msg = EmailMultiAlternatives(subject, message_body, settings.DEFAULT_FROM_EMAIL, [self.email])
msg.attach_alternative(html_content, "text/html")
#send_mail(subject, message_body, settings.DEFAULT_FROM_EMAIL, [self.email], html_message=message_body)
### Para los admins
subject = "Hay una nueva reserva de prueba de nivel para cambridge %s"%self.get_venue_display()
message_body = u"""Se ha dado de alta una reserva para el centro %s el día %s.
Los datos son del alumno son:
Nombre: %s
Apellidos: %s
Telefono: %s
e-mail: %s """%(self.get_venue_display(),self.registration_date,self.name,self.surname,self.telephone,self.email)
#mail_admins(subject, message_body)
```
#### File: MatriculaEIDE/pasarela/models.py
```python
from django.db import models
from django.conf import settings
from django.utils.translation import gettext_lazy as _
import hashlib
import datetime
#from django.utils.text import slugify
from django.template.defaultfilters import slugify
from cambridge.models import Registration
from django.core.mail import send_mail, mail_admins
import logging
log = logging.getLogger("MatriculaEIDE")
class Pago(models.Model):
importe = models.DecimalField(max_digits=6, decimal_places=2)
descripcion = models.CharField(_('Concepto'),max_length=250,blank=True)
fecha_creacion = models.DateField(auto_now_add=True)
fecha_pago = models.DateField(null=True,blank=True)
def get_absolute_url(self):
return "/pagos/pago/%i/" % self.id
def set_as_paid(self):
log.debug("Vamos a marcar como pagado el pago: %s con la descripcion %s"%(self.id,self.descripcion))
self.fecha_pago = datetime.date.today()
log.debug("Mandamos un mail de confirmacion")
self.send_paiment_confirmation_email()
log.debug("Guardamos...")
self.save()
return True
def send_paiment_confirmation_email(self):
subject = "[PagosOnline] Se ha confirmado un pago manual online"
message_body = u"""Se acaba de confirmar un pago online creado manualmente. Los datos son: \n
\tid: %s. \n
\tfecha creacion: %s. \n
\tdescripcion: %s. \n
\timporte: %s. \n
"""%(self.id,self.fecha_creacion,self.descripcion,self.importe)
mail_admins(subject, message_body)
```
#### File: MatriculaEIDE/pasarela/views.py
```python
from django.views.decorators.csrf import csrf_exempt
from django.views.generic import DetailView, ListView, CreateView, UpdateView, DeleteView, View
from django.template.response import TemplateResponse
from django.http import HttpResponse
from django.shortcuts import render_to_response
from django.core.urlresolvers import reverse
from django.conf import settings
from django.dispatch import receiver
from sermepa.forms import SermepaPaymentForm
from sermepa.signals import payment_was_successful, payment_was_error, signature_error
from sermepa.models import SermepaIdTPV
import datetime
import sys
from pagosonline.models import Pago
from forms import *
from cambridge.models import Registration
import logging
log = logging.getLogger("MatriculaEIDE")
class pagos_lista(ListView):
model = Pago
template_name="pago_list.html"
class crear_pago_manual(CreateView):
#model = Pago
form_class = PagoForm
template_name="pago_manual_crear.html"
class editar_pago_manual(UpdateView):
model = Pago
#form_class = PagoForm
template_name="pago_manual_editar.html"
class borrar_pago_manual(DeleteView):
model = Pago
success_url ="/pagos/lista"
#form_class = PagoForm
template_name="pago_manual_borrar.html"
def pagar_manual(request,pago_id):
reference = "manual"
return direct_to_template(request,template = "pago_manual_pagar.html",extra_context={"payament_info": payament_info(reference, pago_id)})
def make_payment(request, reference, order_id):
""" Recibimos un texto de referencia, el ID de la orden y una cantidad en euros (sin decimales)"""
#~ return direct_to_template(request,
#~ template= "pago.html",
#~ extra_context={"payament_info": payament_info(reference, order_id)})
@csrf_exempt
def confirm_payment(request):
## FIXME habría que poner algun filtro a la confirmación del pago.
log.debug("Recibimos una confirmación de pago")
log.debug(request.POST)
print request
try:
#Leemos el bumero de operación donde tenemo s la referencia a la matricula
log.debug("Vamos a leer el Num_operacion para ver que vamos a confirmar")
reference = request.POST["Num_operacion"]
log.debug("tenemos la referencia: %s"%reference)
registration_type = reference.split('-')[0]
registration_id = reference.split('-')[1]
log.debug( "tenemos una matricula de %s con el id %s"%(registration_type, registration_id))
r = None
#Buscamos la matricula
if registration_type=="cambridge":
log.debug("Es cambridge la buscamos en BBDD")
r = Registration.objects.get(id=registration_id)
elif registration_type=="manual":
log.debug("Vamos a confirmar un pago manual. Lo buscamos en BBDD...")
r = Pago.objects.get(id=registration_id)
log.debug("Hemos encontrado el pago manual %s"%r.id)
else:
log.debug( "No sabemos que tipo de matricula es!" )
#Comprobamos si tenemos una matricula
if r:
log.debug( "Tenemos la matricula/pago, vamos a marcalo como pagado")
r.set_as_paid()
log.debug( "Mostramos al TPV la pagina de pago OK")
return TemplateResponse(request,"pago_confirmar.html")
else:
return TemplateResponse(request,"pago_noconfirmar.html")
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
log.debug("No hemos sido capaces de validar el pago de la matricula ha fallado el try con la excepcion: %s %s %s"%(exc_type,exc_value,exc_traceback))
log.debug(exc_type)
log.debug(exc_value)
log.debug(exc_traceback)
return TemplateResponse(request,"pago_noconfirmar.html")
@receiver(payment_was_successful)
def payment_ok(sender, **kwargs):
log.debug("Somos el evento payment_was_successful gestionado por payment_ok")
reference = sender.Ds_MerchantData
log.debug("tenemos la referencia: %s"%reference)
registration_type = reference.split('-')[0]
registration_id = reference.split('-')[1]
log.debug( "tenemos una matricula de %s con el id %s"%(registration_type, registration_id))
r = None
#Buscamos la matricula
if registration_type=="cam":
log.debug("Es cambridge la buscamos en BBDD")
r = Registration.objects.get(id=registration_id)
log.debug("Hemos encontrado el pago manual %s"%r.id)
log.debug( "Tenemos la matricula/pago, vamos a marcalo como pagado")
r.set_as_paid()
elif registration_type=="man":
log.debug("Vamos a confirmar un pago manual. Lo buscamos en BBDD...")
print Pago.objects.all()
r = Pago.objects.filter(id=registration_id)
if len(r)>0:
log.debug("Hemos encontrado el pago manual %s"%r[0].id)
log.debug( "Tenemos la matricula/pago, vamos a marcalo como pagado")
r.set_as_paid()
else:
log.debug("Problemas encontrando el pago manual con ID: %s"%registration_id)
else:
log.debug( "No sabemos que tipo de matricula es!" )
@receiver(payment_was_error)
def payment_ko(sender, **kwargs):
pass
def sermepa_ipn_error(sender, **kwargs):
pass
```
#### File: MatriculaEIDE/sermepa/utils.py
```python
import hashlib, json, base64, hmac
from Crypto.Cipher import DES3
from django.conf import settings
"""
Method to generate Ds_MerchantParameters & Ds_Signature needed by Redsys
@var merchant_parameters: Dict with all merchant parameters
@return Ds_MerchantParameters: Redsys ready encoded parameters
@return Ds_Signature: Redsys 256 valid signature
"""
def redsys_generate_request(merchant_parameters):
Ds_MerchantParameters = encode_parameters(merchant_parameters)
order_encrypted = encrypt_order_with_3DES(merchant_parameters['Ds_Merchant_Order'])
Ds_Signature = sign_hmac256(order_encrypted, Ds_MerchantParameters)
return Ds_MerchantParameters, Ds_Signature
"""
Method to check received Ds_Signature with the one we extract from Ds_MerchantParameters data.
We remove non alphanumeric characters before doing the comparison
@return Ds_Signature: Received signature
@return Ds_MerchantParameters: Received parameters
@return: True if signature is confirmed, False if not
"""
def redsys_check_response(Ds_Signature, Ds_MerchantParameters):
import re
merchant_parameters = decode_parameters(Ds_MerchantParameters)
order = merchant_parameters['Ds_Order']
order_encrypted = encrypt_order_with_3DES(order)
Ds_Signature_calculated = sign_hmac256(order_encrypted, Ds_MerchantParameters)
alphanumeric_characters = re.compile('[^a-zA-Z0-9]')
Ds_Signature_safe = re.sub(alphanumeric_characters, '', Ds_Signature)
Ds_Signature_calculated_safe = re.sub(alphanumeric_characters, '', Ds_Signature_calculated)
if Ds_Signature_safe == Ds_Signature_calculated_safe:
return True
else:
return False
"""
Given a dict; create a json object, codify it in base64 and delete their carrier returns
@var merchant_parameters: Dict with all merchant parameters
@return Ds_MerchantParameters: Encoded json structure with all parameters
"""
def encode_parameters(merchant_parameters):
parameters = (json.dumps(merchant_parameters)).encode()
return ''.join(unicode(base64.encodestring(parameters), 'utf-8').splitlines())
"""
Given the Ds_MerchantParameters from Redsys, decode it and eval the json file
@var Ds_MerchantParameters: Encoded json structure returned from Redsys
@return merchant_parameters: Json structure with all parameters
"""
def decode_parameters(Ds_MerchantParameters):
import ast
Ds_MerchantParameters_decoded = base64.standard_b64decode(Ds_MerchantParameters)
return ast.literal_eval(Ds_MerchantParameters_decoded)
"""
This method creates a unique key for every request,
based on the Ds_Merchant_Order and in the shared secret (SERMEPA_SECRET_KEY).
This unique key is Triple DES ciphered.
@var merchant_parameters: Dict with all merchant parameters
@return order_encrypted: The encrypted order
"""
def encrypt_order_with_3DES(Ds_Merchant_Order):
pycrypto = DES3.new(base64.standard_b64decode(settings.SERMEPA_SECRET_KEY), DES3.MODE_CBC, IV=b'\0\0\0\0\0\0\0\0')
order_padded = Ds_Merchant_Order.ljust(16, b'\0')
return pycrypto.encrypt(order_padded)
"""
Use the order_encrypted we have to sign the merchant data using a HMAC SHA256 algorithm
and encode the result using Base64
@var order_encrypted: Encrypted Ds_Merchant_Order
@var Ds_MerchantParameters: Redsys aleready encoded parameters
@return Ds_Signature: Generated signature encoded in base64
"""
def sign_hmac256(order_encrypted, Ds_MerchantParameters):
hmac_value = hmac.new(order_encrypted, Ds_MerchantParameters, hashlib.sha256).digest()
return base64.b64encode(hmac_value)
``` |
{
"source": "jonlee234/twitter-clone-pyrhon",
"score": 2
} |
#### File: twitter-clone-pyrhon/tweet/views.py
```python
from django.shortcuts import render, redirect
from tweet.forms import TweetForm
from tweet.models import Tweet
from django.contrib.auth import get_user_model
Myuser = get_user_model()
# Create your views here.
def post_tweet(request):
context = {}
if request.method == "POST":
form = TweetForm(request.POST)
if form.is_valid():
data = form.cleaned_data
newitem = Tweet.objects.create(
tweet=data['tweet'],
author=MyUser.objects.filter(id=request.user.id).first(),
)
return redirect(reverse("home"))
form = TweetForm
context.update({'form': form})
return render(request, "generic_form.html", {'form': form})
``` |
{
"source": "jonlee48/gwu_nn",
"score": 4
} |
#### File: gwu_nn/gwu_nn/activation_functions.py
```python
import numpy as np
from abc import ABC, abstractmethod
# Todo: Change activations to remove the need for this decorator
def vectorize_activation(activation):
"""Decorator that ensures that activations are vectorized when used"""
def wrapper(*args):
vec_activation = np.vectorize(activation)
input = args[1]
return vec_activation(args[0], input)
return wrapper
class ActivationFunction(ABC):
"""Abstract class that defines base functionality for activation functions"""
def __init__(self):
super().__init__()
@abstractmethod
def activation(cls, x):
pass
@abstractmethod
def activation_partial_derivative(cls, x):
pass
class SigmoidActivation(ActivationFunction):
"""Implements the sigmoid activation function typically used for logistic regression"""
@classmethod
@vectorize_activation
def activation(cls, x):
"""Scales inputs to (0,1)
Args:
x (np.array): input into the layer/activation function
Returns:
np.array(floats): sigmoid(x)
"""
out = 1 / (1 + np.exp(-x))
return out
@classmethod
@vectorize_activation
def activation_partial_derivative(cls, x):
"""Applies the partial derivative of the sigmoid function
Args:
x (np.array): partial derivative up to this layer/activation function
Returns:
np.array(floats): derivative of network up to this activation/layer
"""
return np.exp(-x) / (1 + np.exp(-x))**2
class RELUActivation(ActivationFunction):
@classmethod
@vectorize_activation
def activation(cls, x):
"""Zeroes out negative values
Args:
x (np.array): input into the layer/activation function
Returns:
np.array(floats): ReLU(x)
"""
if x > 0:
return x
else:
return 0
@classmethod
@vectorize_activation
def activation_partial_derivative(cls, x):
"""Applies the partial derivative of the ReLU function to the input
Args:
x (np.array): partial derivative up to this layer/activation function
Returns:
np.array(floats): derivative of network up to this activation/layer
"""
if x > 0:
return 1
else:
return 0
class SoftmaxActivation(ActivationFunction):
@classmethod
def activation(cls, x):
"""Applies the softmax function to the input array
Args:
x (np.array): input into the layer/activation function
Returns:
np.array(floats): Softmax(x)
"""
exps = np.exp(x - np.max(x))
return exps / np.sum(exps)
# TODO: Fix partial div implementation of softmax
@classmethod
def activation_partial_derivative(cls, x):
"""Applies the partial derivative of the sigmoid function
Args:
x (np.array): partial derivative up to this layer/activation function
Returns:
np.array(floats): derivative of network up to this activation/layer
"""
s = x.reshape(-1, 1)
return np.diagflat(s) - np.dot(s, s.T)
```
#### File: gwu_nn/gwu_nn/layers.py
```python
import numpy as np
from abc import ABC, abstractmethod
from gwu_nn.activation_layers import Sigmoid, RELU, Softmax
activation_functions = {'relu': RELU, 'sigmoid': Sigmoid, 'softmax': Softmax}
def apply_activation_forward(forward_pass):
"""Decorator that ensures that a layer's activation function is applied after the layer during forward
propagation.
"""
def wrapper(*args):
output = forward_pass(args[0], args[1])
if args[0].activation:
return args[0].activation.forward_propagation(output)
else:
return output
return wrapper
def apply_activation_backward(backward_pass):
"""Decorator that ensures that a layer's activation function's derivative is applied before the layer during
backwards propagation.
"""
def wrapper(*args):
output_error = args[1]
learning_rate = args[2]
if args[0].activation:
output_error = args[0].activation.backward_propagation(output_error, learning_rate)
return backward_pass(args[0], output_error, learning_rate)
return wrapper
class Layer():
def __init__(self, activation=None):
self.type = "Layer"
if activation:
self.activation = activation_functions[activation]()
else:
self.activation = None
@apply_activation_forward
def forward_propagation(cls, input):
pass
@apply_activation_backward
def backward_propogation(cls, output_error, learning_rate):
pass
class Dense(Layer):
def __init__(self, output_size, add_bias=False, activation=None, input_size=None):
super().__init__(activation)
self.type = None
self.name = "Dense"
self.input_size = input_size
self.output_size = output_size
self.add_bias = add_bias
def init_weights(self, input_size):
"""Initialize the weights for the layer based on input and output size
Args:
input_size (np.array): dimensions for the input array
"""
# if type(input_size) is tuple:
# input_size = input_size[1]
if self.input_size is None:
self.input_size = input_size
self.weights = np.random.randn(input_size, self.output_size) / np.sqrt(input_size + self.output_size)
if self.add_bias:
self.bias = np.random.randn(1, self.output_size) / np.sqrt(input_size + self.output_size)
@apply_activation_forward
def forward_propagation(self, input):
"""Applies the forward propagation for a densely connected layer. This will compute the dot product between the
input value (calculated during forward propagation) and the layer's weight tensor.
Args:
input (np.array): Input tensor calculated during forward propagation up to this layer.
Returns:
np.array(float): The dot product of the input and the layer's weight tensor."""
self.input = input
output = np.dot(input, self.weights)
if self.add_bias:
return output + self.bias
else:
return output
@apply_activation_backward
def backward_propagation(self, output_error, learning_rate):
"""Applies the backward propagation for a densely connected layer. This will calculate the output error
(dot product of the output_error and the layer's weights) and will calculate the update gradient for the
weights (dot product of the layer's input values and the output_error).
Args:
output_error (np.array): The gradient of the error up to this point in the network.
Returns:
np.array(float): The gradient of the error up to and including this layer."""
input_error = np.dot(output_error, self.weights.T)
weights_error = np.dot(self.input.T, output_error)
self.weights -= learning_rate * weights_error
if self.add_bias:
self.bias -= learning_rate * output_error
return input_error
class Convolutional(Layer):
def __init__(self, input_size=28, input_channels=1, kernel_size=3, num_kernels=1, activation=None):
super().__init__(activation)
self.type = None
self.name = "Convolutional"
self.input = None # (input_size, input_size, input_channels)
self.kernels = np.random.randn(kernel_size, kernel_size, num_kernels)
self.input_size = input_size
self.input_channels = input_channels
self.kernel_size = kernel_size # should be odd number
self.num_kernels = num_kernels # also corresponds to number of feature maps
self.output_size = input_size
def init_weights(self, input_size):
"""made arguments in __init__ mandatory"""
pass
@apply_activation_forward
def forward_propagation(self, input):
assert(input.shape[0] == self.input_size)
assert(input.shape[1] == self.input_size)
"""Applies the forward propagation for a convolutional layer. This will convolve the
input value (calculated during forward propagation) with the layer's kernels.
Args:
input (np.array): Input tensor calculated during forward propagation up to this layer.
Returns:
np.array(float): An output tensor with shape (img_width, img_height,self.num_kernels)"""
output = np.zeros(shape=(self.input_size, self.input_size))
self.input = input
input_pad = self.apply_2d_padding(input, self.kernel_size)
for i_w in range(input.shape[0]): # input width
for i_h in range(input.shape[1]): # input height
for k_w in range(self.kernel_size): # kernel width
for k_h in range(self.kernel_size): #kernel height
output[i_w][i_h] += self.kernels[k_w][k_h] * input_pad[i_w+k_w][i_h+k_h]
return output
@apply_activation_backward
def backward_propagation(self, output_error, learning_rate):
# input size is equal to output size
assert(output_error.shape[0] == self.output_size)
"""Applies the backward propagation for a convolutional layer. This will calculate the output error
and will calculate the update gradient for the kernel weights
Args:
output_error (np.array): The gradient of the error up to this point in the network.
Returns:
np.array(float): The gradient of the error up to and including this layer."""
# calculate kernel gradient (need padded input)
kernels_grad = np.zeros_like(self.kernels)
input_pad = self.apply_2d_padding(self.input, self.kernel_size)
# calculate input error (need padded output)
input_error = np.zeros_like(self.input)
output_error_pad = self.apply_2d_padding(output_error, self.kernel_size)
for i_w in range(self.input.shape[0]): # img width
for i_h in range(self.input.shape[1]): # img height
for k_w in range(self.kernel_size):
for k_h in range(self.kernel_size):
# calc kernel gradient and input_grad for i_w, i_h, k_w, k_h, i, k
kernels_grad[k_w][k_h] += input_pad[i_w+k_w][i_h+k_h] * output_error[k_w][k_h]
input_error[i_w][i_h] += output_error_pad[i_w+self.kernel_size-k_w-1][i_h+self.kernel_size-k_h-1] * self.kernels[k_w][k_h]
# update kernel
self.kernels -= learning_rate * kernels_grad
return input_error
def apply_1d_padding(self, row, kernel_size):
""" Helper function to pad 1d array with kernel_size//2 zeros on either side """
padding = kernel_size//2
return np.concatenate([np.zeros(padding), row, np.zeros(shape=(padding))])
def apply_2d_padding(self, input, kernel_size):
""" Helper function to apply 2d padding to a 2d array,
pads with kernel_size//2 zeros on all sides """
width = input.shape[0]
padding = kernel_size//2
pad_sides = np.stack([self.apply_1d_padding(row,kernel_size) for row in input])
zeros = np.zeros(shape=(padding,width+2*padding))
pad_full = np.vstack([zeros, pad_sides, zeros])
return pad_full
class Flatten(Layer):
def __init__(self, input_size, input_channels=1): # int input_size
super().__init__(None)
self.type = None
self.name = "Flatten"
self.input = None # (input_size, input_size, input_channels)
self.input_size = input_size
self.output_size = input_channels * input_size**2
def init_weights(self, input_size):
"""made arguments in __init__ mandatory"""
pass
@apply_activation_forward
def forward_propagation(self, input):
"""Applies the forward propagation for a flat layer. This will just reshape the input
Args:
input (np.array): Input tensor calculated during forward propagation up to this layer.
Returns:
np.array(float): An output tensor with shape (1, img_size**2 * input_channels)"""
self.input = input
return input.reshape(1,-1)
@apply_activation_backward
def backward_propagation(self, output_error, learning_rate):
"""Applies the backward propagation for a flat layer. This will just reshape the output error (undo the flatten operation)
Args:
output_error (np.array): The gradient of the error up to this point in the network.
Returns:
np.array(float): The gradient of the error up to and including this layer."""
return output_error.reshape(self.input.shape)
class MaxPool(Layer):
def __init__(self, input_size, pool_size, input_channels=1): # int input_size
super().__init__(None)
self.type = None
self.name = "MaxPool"
self.input = None # (input_size, input_size, input_channels)
self.pool_size = pool_size
self.input_size = input_size
self.output_size = input_size//pool_size
if (input_size % pool_size != 0):
print("input_size not evenly divisible by pool_size")
def init_weights(self, input_size):
"""made arguments in __init__ mandatory"""
pass
@apply_activation_forward
def forward_propagation(self, input):
"""Applies the forward propagation for a max pooling layer. This will just return the input in the pool with the max value
Args:
input (np.array): Input tensor calculated during forward propagation up to this layer.
Returns:
np.array(float): An output tensor with shape (input_size//pool_size, input_size//pool_size)"""
self.input = input # need this for back prop
size = self.input_size//self.pool_size
output = np.zeros(shape=(size,size))
# img width
for i_w in range(0, self.input.shape[0], self.pool_size):
# img height
for i_h in range(0, self.input.shape[1], self.pool_size):
mymax = float('-inf')
for p_w in range(self.pool_size): # pool width
for p_h in range(self.pool_size): #pool height
mymax = max(input[i_w+p_w][i_h+p_h], mymax)
# set output to max
output[i_w//self.pool_size][i_h//self.pool_size] = mymax
return output
@apply_activation_backward
def backward_propagation(self, output_error, learning_rate):
"""Applies the backward propagation for a max pooling layer. This will return the gradient error for only the max value, (otherwise zero)
Args:
output_error (np.array): The gradient of the error up to this point in the network.
Returns:
np.array(float): The gradient of the error up to and including this layer."""
input_error = np.zeros_like(self.input)
for i_w in range(0, self.input.shape[0], self.pool_size): # input width
for i_h in range(0, self.input.shape[1], self.pool_size): # input height
mymax = float('-inf')
max_w = 0
max_h = 0
for p_w in range(self.pool_size):
for p_h in range(self.pool_size):
if (self.input[i_w+p_w][i_h+p_h] > mymax):
mymax = self.input[i_w+p_w][i_h+p_h]
max_w = p_w
max_h = p_h
input_error[i_w+max_w][i_h+max_h] = output_error[i_w//self.pool_size][i_h//self.pool_size]
return input_error
``` |
{
"source": "Jon-Lein/ROP_Chain_Final_Prototypes",
"score": 2
} |
#### File: ROP_Chain_Final_Prototypes/imports/Chain_Generator.py
```python
from imports.Processes import *
from imports.Rop_File_Intake import *
from imports.Graphing_functions import *
from imports.ROP_Counting_Functions import *
from imports.Count_Side_Effects import *
import ropgadget
import os.path
from os import system, getcwd, chdir
import sys
import re
from textwrap import wrap
# f = "C:\\Users\\User\\Desktop\\ROP_Project\\Gadget_Sets\\EasyChat.exe_ROP\\EasyChat.exe_ROP_Gadgets.txt"
# f = "C:\\Users\\User\\Desktop\\ROP_Project\\Gadget_Sets\\PEview.exe_ROP\\PEview.exe_ROP_Gadgets.txt"
# f = "C:\\Users\\User\\Desktop\\ROP_Project\\Gadget_Sets\\one_note_ROP\\one_note_ROP_Gadgets.txt"
# f = "C:\\Users\\User\\Desktop\\ROP_Project\\Gadget_Sets\\frhed.exe_ROP\\frhed.exe_ROP_Gadgets.txt"
# f = "C:\\Users\\User\\Desktop\\ROP_Project\\Gadget_Sets\\one_note_ROP\\one_note_ROP_Gadgets.txt"
# gadgets = process_rop_file(f)
def is_sf_recoverable(instruction):
recoverable = ['inc', 'dec', 'mov', 'xor', 'pop']
no_action = ['test', 'cld', 'nop']
if instruction.find('ptr [') >= 0: # for right now, dismiss any gadget with derefrenced registers
return 0 # the reg would need to be a correct address
for r in no_action:
if instruction.find(r) >- 0:
return 2
for r in recoverable:
if instruction.find(r) >= 0:
return 1
return 0
def recover_side_effect(rop_list, instruction, reg1, reg2=None):
no_action = ['test', 'cld', 'nop']
for i in no_action:
if i.find(instruction) >= 0:
return 1
# determine the oppisite instruction
new_instruction = {}
new_instruction['mov'] = "mov"
new_instruction['inc'] = "dec"
new_instruction['dec'] = "inc"
new_instruction['xor'] = 'xor'
# determine how the registers need to be for the new instruction
new_regs = {}
new_regs['mov'] = [reg2, reg1]
new_regs['inc'] = [reg1]
new_regs['dec'] = [reg1]
new_regs['xor'] = [reg1, reg2]
# adjust function call for 1 or 2 registers.
try:
if len(new_regs[instruction]) == 2:
a = get_best_gadget_binary(rop_list, new_instruction[instruction], new_regs[instruction][0], new_regs[instruction][1])
elif len(new_regs[instruction]) == 1:
a = get_best_gadget_binary(rop_list, new_instruction[instruction], new_regs[instruction][0], None)
except Exception as e:
print("The Instruction isnt working :/")
return 0
if a['SE_len'] == 0:
return a
elif ['SE_len'] == 1:
# fix side effect in fix for side effect
# set warning that the side effect will be manually reversed
return a
else:
print("Too many side effects to be efficent")
return None
# if zero side effects return it
#if one additional side efect, try to resolve it
#if two or more side effects, throw it out as it's probably not wort using
def xor_swap(rop_list, reg1, reg2):
chain = []
address_chain = []
if reg1 == reg2:
return 0
# make a list for each configuration of xor
test1 = best_gadget_list(rop_list, "xor", reg1, reg2)
test2 = best_gadget_list(rop_list, "xor", reg2, reg1)
# the largest return value accepted
acceptable_ret = 8
# if one xor gadget isn't found, the algorithm won't work
if test1 == 0 or test2 == 0:
print("Not Enough Gadgets")
return 0
else:
# variables for each part making up the gadget chain
first_gadget = ""
first_fixes = []
first_addresses = []
second_gadget = ""
second_fixes = []
second_addresses = []
address_chain = []
# find first xor gadget
SE = 0
for i in range(0, len(test1)): # each xor instruction for first configuration
SE = len(test1[i]["Gadget"])-2 # number of side effects minus xor and return
for s in range(1, len(test1[i]["Gadget"])-1): # each side effect
if is_sf_recoverable(test1[i]["Gadget"][s]) >= 1 :
SE -= 1
if SE == 0 and test1[i]['Ret_len'] <= acceptable_ret:
first_gadget = (test1[i]["Gadget"])
first_addresses.append(test1[i]["Address"])
for se in range(1, len(test1[i]["Gadget"])-1): # each side effect
if test1[i]["Gadget"][se].find(',') >= 0:
pass # two registers
else:
p = test1[i]["Gadget"][se].split(' ')
fix = recover_side_effect(rop_list,p[1],p[2])
if fix == 1:
pass
elif fix is not None:
first_addresses.append(fix['Address'])
first_fixes.append(fix['Gadget'])
# print("fix")
# print(fix)
# fix = recover_side_effect()
break # if all side effects good, use this gadget
if first_gadget == "":
print("The Algorithm didn't work")
return 0
second_gadget = ""
SE = 0
for i in range(0,len(test2)): # each xor instruction
SE = len(test2[i]["Gadget"])-2
for s in range(1, len(test2[i]["Gadget"])-1): # each side effect
if is_sf_recoverable(test2[i]["Gadget"][s]) == 1:
SE = SE - 1
if SE == 0 and test2[i]['Ret_len'] <= acceptable_ret: # all side effects are alright as well at ret length
second_gadget = test2[i]["Gadget"]
second_addresses.append(test2[i]["Address"])
for se in range(1, len(test2[i]["Gadget"])-1): # each side effect
if test2[i]["Gadget"][se].find(',') >= 0:
pass # two registers
else:
p = test2[i]["Gadget"][se].split(' ')
fix = recover_side_effect(rop_list,p[1],p[2])
second_addresses.append(fix['Address'])
second_fixes.append(fix['Gadget'])
break
if second_gadget == "":
print("Algorithm didnt work")
return 0
print('\n')
print('---Instructions---')
print(first_gadget)
for i in first_fixes:
print(i)
print(second_gadget)
for i in second_fixes:
print(i)
print(first_gadget)
for i in first_fixes:
print(i)
for i in first_addresses:
address_chain.append(i)
for i in second_addresses:
address_chain.append(i)
for i in first_addresses:
address_chain.append(i)
print('---Addresses---')
for i in address_chain:
print("ROP_chain += (" + i + ")")
# xor_swap(gadgets, "eax", "edi")
# xor_swap(gadgets, "eax", "edi")
# recover_side_effect(gadgets, "dec","ecx",)
def hex_constant(rop_list, reg, constant):
#avoid reg xor reg instructions as they will always equal zero
remove = 9
all_regs = ['eax', 'ebx', 'ecx', 'edx', 'esi', 'edi']
for i in range(0, len(all_regs)):
if all_regs[i] == reg:
remove = i
all_regs.pop(remove)
#find the two values to load into register
xor_constant = 0x11111111
null_free = 0
# make sure no null bytes exist in values to add to stack
while null_free == 0:
second_value = xor_constant ^ constant
second_value = ("{0:#0{1}x}".format(second_value, 10))
all_bytes = wrap(second_value, 2)
if '00' not in all_bytes:
null_free = 1
else:
null_free = 0
xor_constant += 0x11111111 # increase the value to try to remove nulls again
xor_constant = ("{0:#0{1}x}".format(xor_constant, 10))
best_sf = 999
best_gadget = ""
second_reg = ""
for r in all_regs:
# print(get_best_gadget_binary(rop_list,'xor',reg,r))
a = get_best_gadget_binary(rop_list,'xor',r,reg)
if a is not None:
if best_sf > a['SE_len']:
best_sf = a['SE_len']
best_gadget = a['Gadget']
second_reg = r
if second_reg == "":
print("None avaliable")
return 0
xor_gadget_list = best_gadget_list(rop_list, 'xor', reg, second_reg)
pop_target = best_gadget_list(rop_list, 'pop', reg)
pop_second = best_gadget_list(rop_list, 'pop', second_reg)
gadget_sets = [pop_target, pop_second, xor_gadget_list]
saved_gadgets = []
failed = 0
acceptable_ret = 16
for gadget_list in gadget_sets:
SE = 0
for i in range(0, len(gadget_list)):
SE = len(gadget_list[i]['Gadget']) - 2
for s in range(1, len(gadget_list[i]['Gadget'])-1):
if is_sf_recoverable(gadget_list[i]["Gadget"][s]) == 1:
SE -= 1
if SE == 0 and gadget_list[i]['Ret_len'] <= acceptable_ret:
saved_gadgets.append(gadget_list[i])
for se in range(1, len(gadget_list[i]["Gadget"])-1): # each side effect
if gadget_list[i]["Gadget"][se].find(',') >= 0:
pass # two registers
else:
p = gadget_list[i]["Gadget"][se].split(' ')
fix = recover_side_effect(rop_list,p[1],p[2])
if fix is not None:
saved_gadgets.append(fix)
break
failed = 1
# first_addresses.append(test1[i]["Addy"])
# print(test1[i]["Module"])
# print(saved_gadgets)
# for i in saved_gadgets:
# print(i['Gadget'])
if failed == 1:
print("Couldn't do it :/")
return 0
addresses = []
total_chain = []
for i in saved_gadgets:
total_chain.append(i['Gadget'])
addresses.append(i['Address'])
for g in i['Gadget']:
if g == " pop " + reg + ' ':
total_chain.append(xor_constant)
elif g == ' pop ' + second_reg + ' ':
total_chain.append(second_value)
print('\n')
print("---Instructions---")
for i in range(len(total_chain)):
print(((total_chain[i])))
print('\n')
print("---Address chain---")
for i in addresses:
print("ROP_chain += (" + str(i) + ")")
print('\n')
# hex_constant(gadgets,"eax", 0x123123)
``` |
{
"source": "JonLevin25/LearningPlayground",
"score": 3
} |
#### File: LearningPlayground/base64/base64.py
```python
import helper
b64_to_int = helper.dict_builder(('A', 'Z'), ('a', 'z'), ('0', '9'), '+' '/')
int_to_b64 = {v: k for k, v in b64_to_int.items()}
def serialize_bin_str(ints, group_len: int = 8):
return "".join((helper.byte_str(i, group_len) for i in ints))
def encode(input_str: str, debug: bool = False) -> str:
# get raw binary string (without '0b' prefix) of entire input string
utf_codes = (ord(c) for c in input_str)
if debug:
# if using iterator for debug, should still be usable afterwards
# otherwise, it will be a one time use (lazy) generator, which should be more memory efficient
utf_codes = list(utf_codes)
print(F"utf codes: {', '.join(map(str, zip(input_str, utf_codes)))}")
binary = serialize_bin_str(utf_codes)
if debug:
print(f"binary: {binary} ({list(helper.group_str(8, binary, filler='0'))})")
# split binary to groups of 6 bits (each of which corresponds to 1 base64 digit)
grouped_binary = list(helper.group_str(6, binary, filler='0')) # TODO: convert to generator and get 'remaining' by final str len?
symbols = len(grouped_binary)
if debug:
grouped_binary = list(grouped_binary)
print(f"grouped: {grouped_binary}")
char_codes = map(lambda i: int(i, 2), grouped_binary)
result = "".join((int_to_b64[i] for i in char_codes))
remaining_symbols = (4 - (symbols % 4)) % 4 # invert modulo gets the "symbols left". modulo again takes care of edge case where symbols % 4 == 0
result += "=" * remaining_symbols
return result
def decode(base64_str: str, debug: bool = False) -> str:
try:
ints = (b64_to_int[i] for i in base64_str if i != '=')
if debug:
ints = list(ints)
print(f"input: {''.join(map(str, zip(base64_str, ints)))}")
binary = serialize_bin_str(ints, group_len=6)
if debug:
print(f"binary: {list(helper.group_str(8, binary))}")
# group binary into octets, parse to ints
utf_codes = (int(bin_str, 2) for bin_str in helper.group_str(8, binary, "0"))
if debug:
utf_codes = list(utf_codes)
print(f"decoded utf: {', '.join(utf_codes)}")
return "".join((chr(i) for i in utf_codes))
except KeyError:
raise ValueError("Invalid input string!")
DEBUG = 1
# result = decode("SGVsbG8=", DEBUG)
result = encode("Hello", DEBUG)
print(result)
``` |
{
"source": "jonlidgard/optoma-projector-rs232",
"score": 2
} |
#### File: jonlidgard/optoma-projector-rs232/setup.py
```python
from setuptools import setup
def readme():
with open('README.md') as f:
return f.read()
setup(
name=optoma-projector-rs232,
version='0.1',
description=A python class that communicates with Optoma Projector's over an RS232 serial link,
long_description=readme(),
classifiers=[
'Development Status :: 3 - Alpha',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries :: Python Modules'
],
url='https://github.com/jonlidgard/optoma-projector-rs232',
author='jonlidgard',
# author_email='<EMAIL>',
license=MIT,
packages=['optoma-projector-rs232'],
install_requires=[
'markdown',
],
include_package_data=True,
zip_safe=False
)
``` |
{
"source": "JonLinC07/Poker",
"score": 3
} |
#### File: JonLinC07/Poker/Dealer.py
```python
from Card import Card
from Player import Player
from random import shuffle
class Dealer:
def __init__(self):
self.__deck = Card.create_deck()
def deal_cards(self, players):
for n in range(2):
for player in players:
card = self.__deck[0]
player.recive_card(card)
self.__deck.remove(card)
def show_deck(self):
cards = []
for card in self.__deck:
cards.append(card.image)
print(cards)
def shuffle(self):
shuffle(self.__deck)
```
#### File: JonLinC07/Poker/Table.py
```python
from Dealer import Dealer
from Player import Player
from Card import Card
class Table:
def __init__(self, players, board=[], pot=0):
self.players = players
self.board = board
self.pot = pot
# def table_status(self):
``` |
{
"source": "jonlindg/pydero",
"score": 3
} |
#### File: pydero/pydero/connection.py
```python
import requests
class Connection:
"""This class handles JSON RPC calls to a DERO daemon and DERO cli-wallet (running with the --rpc-server flag enabled)."""
def __init__(self,access_point_wallet="http://127.0.0.1:30309",access_point_daemon="http://127.0.0.1:30306"):
self.access_point_wallet=access_point_wallet
self.access_point_daemon=access_point_daemon
self.headers = {'Content-Type':'application/json'}
def rpc_wallet(self,data,mode='/json_rpc'):
"""Function for sending JSON RPC data to the DERO cli-wallet"""
if self.access_point_wallet is None:
raise Exception("No access point provided for wallet rpc")
return requests.post(self.access_point_wallet+mode,headers=self.headers,json=data)
def rpc_daemon(self,data,mode='/gettransactions'):
"""Function for sending JSON RPC data to the DERO daemon"""
if self.access_point_daemon is None:
raise Exception("No access point provided for daemon rpc")
return requests.post(self.access_point_daemon+mode,headers=self.headers,json=data)
def get_balance(self,full_response=False):
"""Get balance of the wallet"""
response = self.rpc_wallet({'jsonrpc':'2.0','id':'0','method':'getbalance'})
if full_response:
return response
else:
return response.json()['result']['balance'],response.json()['result']['unlocked_balance']
def get_address(self,full_response=False):
"""Get address of the wallet"""
response = self.rpc_wallet({'jsonrpc':'2.0','id':'0','method':'getaddress'})
if full_response:
return response
else:
return response.json()['result']['address']
def get_wallet_height(self,full_response=False):
"""Get current height in the wallet"""
response = self.rpc_wallet({'jsonrpc':'2.0','id':'0','method':'getheight'})
if full_response:
return response
else:
return response.json()['result']['height']
def get_block_count(self,full_response=False):
"""Get current height in the daemon"""
response = self.rpc_daemon({'jsonrpc':'2.0','id':'0','method':'getblockcount'},mode='/json_rpc')
if full_response:
return response
else:
return response.json()['result']['count']
def get_info(self,full_response=False):
"""Returns various info about the daemon"""
response = self.rpc_daemon({'jsonrpc':'2.0','id':'0','method':'get_info'},mode='/json_rpc')
if full_response:
return response
else:
return response.json()['result']
def get_last_block_header(self,full_response=False):
"""The header of the last block"""
response = self.rpc_daemon({'jsonrpc':'2.0','id':'0','method':'getlastblockheader'},mode='/json_rpc')
if full_response:
return response
else:
return response.json()['result']['block_header']
def get_block_header_by_hash(self,block_hash,full_response=False):
"""The header of a block with a given hash"""
response = self.rpc_daemon({'jsonrpc':'2.0','id':'0','method':'getblockheaderbyhash','params':{'hash':block_hash}},mode='/json_rpc')
if full_response:
return response
else:
return response.json()['result']['block_header']
def get_block_header_by_height(self,height,full_response=False):
"""The header of a block with a given height"""
response = self.rpc_daemon({'jsonrpc':'2.0','id':'0','method':'getblockheaderbyheight','params':{'hash':height}},mode='/json_rpc')
if full_response:
return response
else:
return response.json()['result']['block_header']
def transfer(self,destinations,mixin=5,unlock_time=0,payment_id=0,get_tx_key=False,get_tx_hex=False,full_response=False,unit='dero'):
"""Transfer dero"""
if type(destinations)!=list:
raise ValueError("destinations must be of the form [(amount_1,address_1),...,(amount_n,address_n)]")
for i,dest in enumerate(destinations):
if type(dest) not in (list,tuple,dict):
raise ValueError("destinations must be of the form [(amount_1,address_1),...,(amount_n,address_n)]")
if len(dest)!=2:
raise ValueError("destinations must be of the form [(amount_1,address_1),...,(amount_n,address_n)]")
if type(dest) in (list,tuple):
if (unit=='dero'):
destinations[i]={'amount':dest[0]*10**12,'address':dest[1]}
else:
destinations[i]={'amount':dest[0],'address':dest[1]}
if 'amount' not in destinations[i].keys():
raise ValueError
if 'address' not in destinations[i].keys():
raise ValueError
response = requests.post(self.access_point_wallet+'/json_rpc',headers=self.headers,json={'jsonrpc':'2.0','id':'0','method':'transfer','params':{"destinations":destinations,'mixin':mixin,"unlock_time":unlock_time,"payment_id":0,"get_tx_key":get_tx_key,"get_tx_hex":get_tx_hex}})
if full_response:
return response
else:
return response.json()['result']
``` |
{
"source": "JonLMyers/Inb4-Danger",
"score": 3
} |
#### File: JonLMyers/Inb4-Danger/main.py
```python
import json
import timeline_grapher
import experiment
def main():
configuration_file = "config.json"
print("Reading: config.json")
with open(configuration_file) as config:
data = json.load(config)
APIKey = data["APIKey"]
Username = data["Username"]
Chats = data["Chats"]
print("Graph Options: ")
print(" 1: Chat Frequency Time Series Grapher")
print(" 2: Individual Chat Frequency Time Series Grapher")
print(" 3: Experimental: Build Message Buckets")
selector = input("Pick a grapher: ")
if int(selector) == 1:
print("Calling: Chat Frequency Time Series Grapher")
url = timeline_grapher.chat_frequency_ts_grapher(APIKey, Username, Chats)
elif int(selector) == 2:
print("Calling: Individual Chat Frequency Time Series Grapher")
url = timeline_grapher.freq_equality_ts_grapher(APIKey, Username, Chats)
elif int(selector) == 3:
print("Calling Message Block Builder")
url = experiment.test_conversation_buckets(Chats)
else:
url = "Na son."
print(url[0])
if __name__ == "__main__":
main()
``` |
{
"source": "JonLMyers/MetroTransitAPI",
"score": 2
} |
#### File: aaxus/api/organizations.py
```python
import json, re
from flask import jsonify, request
from flask_restful import Resource
from aaxus import rest_api
from flask_restful import Resource, reqparse
from flask_jwt_extended import jwt_required, get_jwt_identity
from aaxus.models.organization import Organization
from aaxus.models.user import User
from aaxus.models.org_group import OrgGroup
from aaxus.services.token_service import generate_confirmation_token, confirm_token
create_org_parser = reqparse.RequestParser()
create_org_parser.add_argument('name', help = 'Organization Name Required', required = True)
update_org_parser = reqparse.RequestParser()
update_org_parser.add_argument('id', help = 'Organization Name Required', required = True)
view_org_parser = reqparse.RequestParser()
view_org_parser.add_argument('id', help = 'Organization Name Required', required = True)
class ManageOrganization(Resource):
@jwt_required
def post(self):
user = User.find_by_username(get_jwt_identity())
if not user:
return {'Error': 'User {} does not exist'.format(get_jwt_identity())}
data = create_org_parser.parse_args()
if Organization.find_by_name(data['name']):
return {'error': 'Organization already exists'}, 500
token = generate_confirmation_token(data['name'])
new_org = Organization(
name = data['name'],
join_token = token
)
new_org.add_member(user)
new_org.add_admin(user)
try:
new_org.save()
return{
'message': 'Organization {} was created'.format(data['name'])
}
except:
return{'message': 'Something went wrong'}, 500
@jwt_required
def put(self):
memberadd = 0
adminadd = 0
memberremove = 0
adminremove = 0
message = {'Organization': 'Updated'}
user = User.find_by_username(get_jwt_identity())
if not user:
return {'Invalid_User_Error': 'User {} does not exist'.format(get_jwt_identity())}
#data = update_org_parser.parse_args()
data = request.get_json()
org = Organization.find_by_name(data['id'])
if org == None:
return {'No_Org_Error': 'Organization does not exist'}, 500
if not org.is_admin(user.username):
return {'Not_Authorized_Error': 'Not an admin of this organization'}, 500
for setting in data:
if setting == 'id' or data[setting] == None:
pass
elif setting == 'admin_username':
for user in data['admin_username']:
target_user = User.find_by_username(user)
if not target_user:
message.update({'Admin_Add_Error': 'User does not exist'})
org.add_admin(target_user)
adminadd += 1
message.update({'Admin_Added{}'.format(adminadd): '{}'.format(user)})
elif setting == 'member_username':
for user in data['member_username']:
target_user = User.find_by_username(user)
if not target_user:
message.update({'Member_Add_Error': 'User does not exist'})
print (user)
org.add_member(target_user)
memberadd += 1
message.update({'User_Added{}'.format(memberadd): '{}'.format(user)})
elif setting == 'remove_admin':
for user in data['remove_admin']:
target_user = User.find_by_username(user)
if not target_user:
message.update({'Admin_Remove_Error': 'User does not exist'})
org.remove_admin(target_user)
adminremove += 1
message.update({'Admin_Removed{}'.format(adminremove): '{}'.format(user)})
elif setting == 'remove_member':
for user in data['remove_member']:
target_user = User.find_by_username(user)
if not target_user:
message.update({'Member_Remove_Error': 'User does not exist'})
org.remove_member(target_user)
memberremove += 1
message.update({'User_Removed{}'.format(memberremove): '{}'.format(user)})
else:
message.update({'Setting_Error': '{} Setting is invalid'.format(setting)})
try:
org.save()
return message
except:
message.update({'Save_Error': 'Something went wrong'})
return message, 500
class ViewOrganization(Resource):
@jwt_required
def post(self):
message = {'Organization':'Info'}
user = User.find_by_username(get_jwt_identity())
if not user:
return {'Invalid_User_Error': 'User {} does not exist'.format(get_jwt_identity())}
data = view_org_parser.parse_args()
org = Organization.find_by_name(data['id'])
if org == None:
return {'No_Org_Error': 'Organization does not exist'}, 500
if not org.is_member(user):
return {'Invalid_User_Error': 'You are not a memeber of this organization.'}
member_list = []
admin_list = []
message = {'Organization': org.name}
for member in org.members:
member_list.append(member.username)
for admin in org.administrators:
admin_list.append(admin.username)
message.update({'Members': member_list})
message.update({'Administrators': admin_list})
return message
```
#### File: aaxus/api/tickets.py
```python
import json, re, uuid
from flask import jsonify, request
from flask_restful import Resource
from aaxus import rest_api
from flask_restful import Resource, reqparse
from flask_jwt_extended import jwt_required, get_jwt_identity
from aaxus.models.organization import Organization
from aaxus.models.user import User
from aaxus.models.ticket import Ticket
create_ticket_parser = reqparse.RequestParser()
create_ticket_parser.add_argument('start', help = 'Ticket Tite Required', required = True)
create_ticket_parser.add_argument('end', help = 'Ticket Description Required', required = True)
create_ticket_parser.add_argument('cost', help = 'Ticket Price Required', required = True)
update_ticket_parser = reqparse.RequestParser()
update_ticket_parser.add_argument('id', help = 'Ticket ID Required', required = True)
class ManageTickets(Resource):
def get(self):
response = []
identifier = 1
tickets = Ticket.find_tickets()
for tic in tickets:
ticket_dict = dict(id=tic.ticket_id, start=tic.start, end=tic.end, cost=tic.cost)
response.append(ticket_dict)
print(response)
return jsonify(items=response)
@jwt_required
def post(self):
#target_users = []
#target_option = 0
user = User.find_by_username(get_jwt_identity())
if not user:
return {'Error': 'User {} does not exist'.format(get_jwt_identity())}
req_data = create_ticket_parser.parse_args()
data = request.get_json()
ID = uuid.uuid4()
new_ticket = Ticket(
ticket_id = str(ID),
start = data['start'],
end = data['end'],
cost = data['cost']
)
try:
new_ticket.save()
return{'id': str(ID)}
except:
return{'message': 'Something went wrong'}, 500
@jwt_required
def put(self):
target_users = []
message = {'Ticket': 'Updated'}
user = User.find_by_username(get_jwt_identity())
if not user:
return {'Error': 'User {} does not exist'.format(get_jwt_identity())}
data = request.get_json()
ticket = Ticket.find_by_id(data['id'])
if not ticket:
return {'Error': 'Ticket does not exist'}
for setting in data:
if setting == 'id':
pass
elif setting == 'target_users':
for target_user in data['target_users']:
volitile_user = User.find_by_username(target_user)
if not volitile_user:
return {'Error': 'User {} does not exist'.format(target_user)}
else:
ticket.add_target_user(volitile_user)
elif setting == 'target_organization':
target_org = Organization.find_by_name(data['target_organization'])
if target_org == None:
return {'Error': 'Organization {} does not exist'.format(target_org)}
else:
ticket.target_organization = target_org
elif setting == 'is_active':
if data['is_active'] == 'True':
ticket.is_active = True
elif data['is_active'] == 'False':
ticket.is_active = False
else:
return {'Error': 'Invalid is_active Option.'.format(data['is_active'])}
elif setting == 'title':
if data['title'] == None:
return {'Error': 'Invalid title.'}
else:
ticket.title = data['title']
elif setting == 'description':
if data['description'] == None:
return {'Error': 'Invalid Description.'}
else:
ticket.title = data['description']
else:
message.update({'Setting_Error': '{} Setting is invalid'.format(setting)})
try:
ticket.save()
return message
except:
message.update({'Save_Error': 'Something went wrong'})
return message, 500
class TicketsApi(Resource):
"""Defines the tickets api endpoint"""
def post(self):
"""Creates a ticket document and inserts it into the ticket collection"""
ticket = Ticket.from_json(request.get_data())
ticket.save()
return jsonify(ticket)
def get(self):
"""Searches the tickets collection for tickets"""
tickets = Ticket.objects(
description__contains=request.args['description']
)
return jsonify(tickets)
class TicketApi(Resource):
"""Defines the ticket api endpoint"""
def get(self, ticket_id):
"""Gets from the ticket collection the ticket with specified id"""
ticket = Ticket.objects.get_or_404(id=ticket_id)
return jsonify(ticket)
def put(self, ticket_id):
"""Updates the ticket with specified id"""
ticket = Ticket.objects.get_or_404(id=ticket_id)
ticket.update()
return jsonify(ticket)
def delete(self, ticket_id):
"""Deletes a ticket from the ticket collection"""
return jsonify({
'entity':'tickets',
'method':'DELETE',
'entityId':ticket_id
})
```
#### File: aaxus/models/ticket.py
```python
from mongoengine import *
from aaxus.models.user import User
from aaxus.models.organization import Organization
class Ticket(Document):
meta = {'collection':'tickets', 'allow_inheritance': True}
ticket_id = StringField(required=True)
start = StringField(max_length=256, required=True)
end = StringField(required=True)
cost = StringField(required=True)
@classmethod
def find_by_id(self, ID):
for ticket in Ticket.objects(ticket_id = ID):
return ticket
@classmethod
def find_tickets(self):
tickets = []
for ticket in Ticket.objects():
tickets.append(ticket)
return tickets
``` |
{
"source": "JonLMyers/YubiAuth",
"score": 3
} |
#### File: YubiAuth/app/models.py
```python
from werkzeug.security import check_password_hash, generate_password_hash
from mongoengine import *
import datetime
import app.config
import jwt
class User(Document):
username = StringField(max_length=50, required=True, unique=True)
password_hash = StringField(max_length=128, required=True)
yubikey_id = StringField(max_length=20, required=True)
meta = {'unique': True}
@staticmethod
def hash_password(password):
return generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password_hash, password)
def get_u2f_devices(self):
"""Returns U2F devices"""
return json.loads(self.u2f_devices)
def set_u2f_devices(self, devices):
"""Saves U2F devices"""
self.u2f_devices = json.dumps(devices)
def has_u2f_devices(self):
"""Checks if user has any enrolled u2f devices"""
return len(self.get_u2f_devices()) > 0
@classmethod
def find_by_username(self, user_name):
for user in User.objects(username = user_name):
return user
""" Token Model """
from mongoengine import *
class RevokedToken(Document):
jti = StringField(max_length=120, required=True)
@classmethod
def is_jti_blacklisted(self, jtokeni):
for token in RevokedToken.objects(jti = jtokeni):
return True
return False
``` |
{
"source": "jonls/lpd-monitor",
"score": 2
} |
#### File: jonls/lpd-monitor/btcon.py
```python
import socket
import struct
import random
import hashlib
import errno
from gi.repository import GLib
from gi.repository import GObject
from bencode import bencode, bdecode, bdecode_all
class Bitfield(object):
def __init__(self, size, data=None):
if size < 0:
raise ValueError('Bitfield size must be non-negative')
self._size = size
self._data = bytearray((size+7)//8)
if data is not None:
for i in range(self._size):
bi = i // 8
if ord(data[bi]) & (1 << (7 - (i % 8))):
self.set(i)
def set(self, index):
if index >= self._size or index < 0:
raise IndexError('Invalid Bitfield index: %d' % index)
bi = index // 8
self._data[bi] |= 1 << (7 - (index % 8))
def count(self):
return sum(self)
def __iter__(self):
for i in range(self._size):
bi = i // 8
yield bool(self._data[bi] & (1 << (7 - (i % 8))))
def __len__(self):
return self._size
def __repr__(self):
return 'Bitfield(%d, %r)' % (self._size, ''.join(chr(x) for x in self._data))
class BTConnectionError(Exception):
pass
class BTConnection(GObject.GObject):
__gsignals__ = {
'state-changed': (GObject.SIGNAL_RUN_LAST, None, (int,)),
'metadata-changed': (GObject.SIGNAL_RUN_LAST, None, ()),
'peer-progress-changed': (GObject.SIGNAL_RUN_LAST, None, ())
}
STATE_NOT_CONNECTED = 0
STATE_HEADERS = 1
STATE_EXT_HEADERS = 2
STATE_RUNNING = 3
STATE_CLOSED = 4
HEADERS_LENGTH = 68
BYTE_EXT_EXTENSION = 44
BYTE_EXT_FAST_PEERS = 62
MSG_TYPE_CHOKE = 0
MSG_TYPE_UNCHOKE = 1
MSG_TYPE_INTERESTED = 2
MSG_TYPE_NOT_INTERESTED = 3
MSG_TYPE_HAVE = 4
MSG_TYPE_BITFIELD = 5
MSG_TYPE_REQUEST = 6
MSG_TYPE_PIECE = 7
MSG_TYPE_CANCEL = 8
MSG_TYPE_HAVE_ALL = 14
MSG_TYPE_HAVE_NONE = 15
MSG_TYPE_EXTENDED = 20
def __init__(self, infohash, peer_id=None):
super(BTConnection, self).__init__()
self._infohash = infohash
self._my_id = peer_id or ''.join(chr(random.randint(0, 255)) for i in range(20))
self._my_exts = {1: 'ut_metadata'}
self._metadata = None
self._ut_metadata_size = None
self._ut_metadata_buffer = ''
self._ut_metadata_last_req = None
self._peer_id = None
self._peer_byte_exts = set()
self._peer_exts = {}
self._peer_have = None
self._peer_have_queue = []
self._packet_len = None
self._packet = ''
self._packet_timeout = None
self._packet_callback = None
self._msg_len = None
self._msg_callback = None
self._socket = None
self._socket_queue = []
self._state = self.STATE_NOT_CONNECTED
self._input_source = None
self._output_source = None
self._connect_source = None
self._hangup_source = None
def open(self, address):
self._socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._socket.setblocking(0)
self._socket.bind(('', 0))
self._connect_source = GLib.io_add_watch(self._socket, GLib.IO_OUT, self._socket_connect_cb)
self._hangup_source = GLib.io_add_watch(self._socket, GLib.IO_HUP, self._socket_hangup_cb)
self._packet_expect_input(self.HEADERS_LENGTH, self._handle_headers, 30)
err = self._socket.connect_ex(address)
if err not in (0, errno.EINPROGRESS):
raise BTConnectionError('Unable to connect: {}'.format(errno.errorcode[err]))
self._send_headers()
self._change_state(self.STATE_HEADERS)
def close(self):
self._close_sources()
self._socket.close()
self._change_state(self.STATE_CLOSED)
print('Closed')
@property
def metadata(self):
return self._metadata
@property
def peer_progress(self):
if self._peer_have is None:
return None
return self._peer_have.count()
@property
def piece_count(self):
if self._metadata is None:
return None
return (self.data_length + self._metadata['piece length'] - 1) // self._metadata['piece length']
@property
def data_length(self):
if self._metadata is None:
return None
if 'files' in self._metadata:
return sum(f['length'] for f in self._metadata['files'])
else:
return self._metadata['length']
def _change_state(self, state):
self._state = state
self.emit('state-changed', self._state)
def _close_sources(self):
for source in (self._hangup_source, self._connect_source,
self._input_source, self._output_source,
self._packet_timeout):
if source is not None:
GLib.source_remove(source)
def _socket_connect_cb(self, source, cond):
err = self._socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
if err != 0:
print 'Unable to connect: {}'.format(errno.errorcode[err])
self.close()
return False
def _socket_hangup_cb(self, source, cond):
print('Hangup')
self.close()
return False
def _socket_input_cb(self, source, cond):
self._packet += self._socket.recv(self._packet_len-len(self._packet))
if len(self._packet) == self._packet_len:
GLib.source_remove(self._packet_timeout)
packet = self._packet
self._packet = ''
self._packet_callback(packet)
return False
return True
def _socket_output_cb(self, source, cond):
while len(self._socket_queue) > 0:
packet = self._socket_queue[0]
n = self._socket.send(packet)
if n < len(packet):
self._socket_queue[0] = packet[n:]
return True
else:
self._socket_queue.pop(0)
return False
def _packet_timeout_cb(self):
print('No activity')
self.close()
return False
def _packet_expect_input(self, length, callback, timeout):
self._packet_len = length
self._packet_callback = callback
self._packet_timeout = GLib.timeout_add_seconds(timeout, self._packet_timeout_cb)
self._input_source = GLib.io_add_watch(self._socket, GLib.IO_IN, self._socket_input_cb)
def _packet_send(self, packet):
self._socket_queue.append(packet)
if len(self._socket_queue) == 1:
GLib.io_add_watch(self._socket, GLib.IO_OUT, self._socket_output_cb)
def _send_headers(self):
bt_header = chr(19) + 'BitTorrent protocol'
ext_bytes = '\x00\x00\x00\x00\x00\x10\x00\x04'
self._packet_send(bt_header + ext_bytes + self._infohash + self._my_id)
def _send_message(self, msg):
msg_len = struct.pack('>L', len(msg))
self._packet_send(msg_len + msg)
def _send_ext_headers(self):
msg = chr(20) + chr(0) + bencode({'m': dict((v, k) for k, v in self._my_exts.iteritems())})
self._send_message(msg)
def _send_initial_have(self):
if self.BYTE_EXT_FAST_PEERS in self._peer_byte_exts:
msg = chr(self.MSG_TYPE_HAVE_NONE)
self._send_message(msg)
def _ut_metadata_send_request(self, piece):
ext_id = self._peer_exts['ut_metadata']
msg = chr(20) + chr(ext_id) + bencode({'msg_type': 0, 'piece': piece})
self._ut_metadata_last_req = piece
self._send_message(msg)
def _ut_metadata_validate(self):
def validate_files_list(files):
if len(files) == 0:
return False
for f in files:
if not (type(f) is dict and
'length' in f and type(f['length']) is int and
'path' in f and type(f['path']) is list and
len(f['path']) > 0 and all(f['path'])):
return False
return True
if hashlib.sha1(self._ut_metadata_buffer).digest() == self._infohash:
info_dict = bdecode(self._ut_metadata_buffer)
if ('name' in info_dict and type(info_dict['name']) is str and
'piece length' in info_dict and type(info_dict['piece length']) is int and
'pieces' in info_dict and type(info_dict['pieces']) is str and
(('length' in info_dict and type(info_dict['length']) is int) or
('files' in info_dict and type(info_dict['files']) is list and
validate_files_list(info_dict['files'])))):
self._ut_metadata_buffer = None
self._metadata = info_dict
if len(self._metadata['pieces']) != 20*self.piece_count:
self._metadata = None
return False
self.emit('metadata-changed')
self._play_have_queue()
return True
return False
def _handle_headers(self, packet):
bt_header_len, packet = ord(packet[:1]), packet[1:]
if bt_header_len != 19:
self.close()
return
bt_header, packet = packet[:bt_header_len], packet[bt_header_len:]
if bt_header != 'BitTorrent protocol':
self.close()
return
print('Connected to {!r}'.format(self._socket.getpeername()))
ext_bytes, packet = packet[:8], packet[8:]
print('Extension bytes {!r}'.format(ext_bytes))
if ord(ext_bytes[7]) & 0x4:
self._peer_byte_exts.add(self.BYTE_EXT_FAST_PEERS)
if ord(ext_bytes[5]) & 0x10:
self._peer_byte_exts.add(self.BYTE_EXT_EXTENSION)
infohash, packet = packet[:20], packet[20:]
if infohash != self._infohash:
self.close()
return
self._peer_id = packet[:20]
print('Peer id {!r}'.format(self._peer_id))
if self.BYTE_EXT_EXTENSION in self._peer_byte_exts:
self._change_state(self.STATE_EXT_HEADERS)
self._msg_callback = self._handle_ext_headers
self._send_ext_headers()
else:
self._change_state(self.STATE_RUNNING)
self._msg_callback = self._handle_message
self._send_initial_have()
self._packet_expect_input(4, self._handle_message_input, 240)
def _handle_message_input(self, packet):
if self._msg_len is None:
self._msg_len = struct.unpack('>L', packet)[0]
if self._msg_len == 0:
self._msg_len = None
self._packet_expect_input(4, self._handle_message_input, 240)
if self._msg_len > 64*1024*1024:
self.close()
return
else:
self._packet_expect_input(self._msg_len, self._handle_message_input, 60)
else:
self._msg_callback(packet)
self._msg_len = None
self._packet_expect_input(4, self._handle_message_input, 240)
def _handle_ext_headers(self, msg):
msg_type, msg = ord(msg[:1]), msg[1:]
if msg_type != self.MSG_TYPE_EXTENDED or len(msg) < 2:
self.close()
return
msg_ext_type, msg = ord(msg[:1]), msg[1:]
if msg_ext_type != 0:
self.close()
return
msg = bdecode(msg)
print('Extended handshake: {!r}'.format(msg))
if 'm' in msg and type(msg['m']) is dict:
for ext, ext_id in msg['m'].iteritems():
self._peer_exts[ext] = ext_id
if 'metadata_size' in msg and type(msg['metadata_size']) is int:
self._ut_metadata_size = msg['metadata_size']
self._change_state(self.STATE_RUNNING)
self._msg_callback = self._handle_message
self._send_initial_have()
if self._peer_exts.get('ut_metadata', 0) > 0:
self._ut_metadata_send_request(0)
def _play_have_queue(self):
if len(self._peer_have_queue) > 0:
msg_type, msg = self._peer_have_queue.pop(0)
self._handle_first_have_message(msg_type, msg)
while len(self._peer_have_queue) > 0:
msg_type, msg = self._peer_have_queue.pop(0)
self._handle_have_message(msg_type, msg)
def _handle_first_have_message(self, msg_type, msg):
def handle_bitfield(msg):
if 8*len(msg) < self.piece_count:
self.close()
return
self._peer_have = Bitfield(self.piece_count, msg)
def handle_have_all():
self._peer_have = Bitfield(self.piece_count)
for i in range(len(self._peer_have)):
self._peer_have.set(i)
def handle_have_none():
self._peer_have = Bitfield(self.piece_count)
if msg_type == self.MSG_TYPE_BITFIELD:
handle_bitfield(msg)
elif msg_type == self.MSG_TYPE_HAVE_ALL:
handle_have_all()
elif msg_type == self.MSG_TYPE_HAVE_NONE:
handle_have_none()
elif (msg_type == self.MSG_TYPE_HAVE and
not self.BYTE_EXT_FAST_PEERS in self._peer_byte_exts):
self._peer_have = Bitfield(self.piece_count)
self._handle_have_message(msg_type, msg)
else:
self.close()
return
self.emit('peer-progress-changed')
def _handle_have_message(self, msg_type, msg):
if msg_type == self.MSG_TYPE_HAVE:
index = struct.unpack('>L', msg)[0]
self._peer_have.set(index)
else:
self.close()
return
self.emit('peer-progress-changed')
def _handle_message(self, msg):
msg_type, msg = ord(msg[:1]), msg[1:]
def print_message():
print('Message: {}, {!r}'.format(msg_type, msg))
if ((msg_type == self.MSG_TYPE_HAVE and len(msg) == 4) or
(msg_type == self.MSG_TYPE_HAVE_ALL and len(msg) == 1) or
(msg_type == self.MSG_TYPE_HAVE_NONE and len(msg) == 1) or
msg_type == self.MSG_TYPE_BITFIELD):
if self.piece_count is None:
self._peer_have_queue.append((msg_type, msg))
elif self._peer_have is None:
self._handle_first_have_message(msg_type, msg)
else:
self._handle_have_message(msg_type, msg)
elif msg_type == self.MSG_TYPE_EXTENDED:
if len(msg) < 1:
self.close()
return
msg_ext_id, msg = ord(msg[:1]), msg[1:]
if msg_ext_id > 0 and msg_ext_id in self._my_exts:
msg_ext = self._my_exts[msg_ext_id]
if msg_ext == 'ut_metadata':
msg, rest = bdecode_all(msg)
total_pieces = (self._ut_metadata_size + (2**14-1)) / (2**14)
last_piece_size = self._ut_metadata_size - (2**14)*(total_pieces-1)
if 'msg_type' in msg and type(msg['msg_type']) is int:
if msg['msg_type'] == 0:
pass
elif msg['msg_type'] == 1:
if ('piece' in msg and type(msg['piece']) is int and
msg['piece'] == self._ut_metadata_last_req and
((msg['piece'] < total_pieces - 1 and
len(rest) == 2**14) or
(msg['piece'] == total_pieces - 1 and
len(rest) == last_piece_size))):
self._ut_metadata_buffer += rest
print('Metadata download: {}%'.format(int(100*float(self._ut_metadata_last_req+1)/total_pieces)))
if msg['piece'] == total_pieces - 1:
self._ut_metadata_last_req = None
self._ut_metadata_validate()
else:
self._ut_metadata_send_request(self._ut_metadata_last_req+1)
elif msg['msg_type'] == 2:
pass
else:
self.close()
return
elif msg_ext_id == 0:
print_message()
else:
self.close()
return
else:
print_message()
```
#### File: jonls/lpd-monitor/lpd.py
```python
import socket
import struct
from hashutils import bintohex, hextobin
class MulticastUDPSocket(socket.socket):
def __init__(self, local_port, reuse=False):
socket.socket.__init__(self, socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
if reuse:
self.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(socket, 'SO_REUSEPORT'):
self.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
self.bind(('', local_port))
def mcast_add(self, addr):
mreq = struct.pack('=4sl', socket.inet_aton(addr), socket.INADDR_ANY)
self.setsockopt(socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
class LPDSocket(MulticastUDPSocket):
ADDRESS = '172.16.17.32'
PORT = 6771
def __init__(self):
MulticastUDPSocket.__init__(self, LPDSocket.PORT, True)
self.mcast_add(LPDSocket.ADDRESS)
self.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, 16)
self.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
def send_announce(self, infohash, port):
msg = ('BT-SEARCH * HTTP/1.1\r\n' +
'Host: {}:{}\r\n' +
'Port: {}\r\n' +
'Infohash: {}\r\n' +
'\r\n\r\n').format(LPDSocket.ADDRESS, LPDSocket.PORT, port, bintohex(infohash))
self.sendto(msg, 0, (LPDSocket.ADDRESS, LPDSocket.PORT))
def recv_announce(self):
data, sender = self.recvfrom(1280)
lines = data.split('\r\n')
if lines[0] != 'BT-SEARCH * HTTP/1.1':
return None, sender
port = None
infohash = None
for line in lines[1:]:
p = line.split(':', 1)
if len(p) < 2:
continue
name, value = p[0].rstrip(), p[1].strip()
if name == 'Port':
try:
port = int(value)
except ValueError:
return None, sender
elif name == 'Infohash':
if len(value) != 40:
return None, sender
try:
infohash = hextobin(value)
except ValueError:
return None, sender
if port is None or infohash is None:
return None, sender
return (infohash, port), sender
``` |
{
"source": "jonls/torque-slack",
"score": 2
} |
#### File: torque-slack/torque_slack/torque.py
```python
import os
import re
import operator
import heapq
from datetime import datetime
import logging
import pyinotify
logger = logging.getLogger(__name__)
DEFAULT_TORQUE_HOME = '/var/spool/torque'
def heapq_merge(*iters, **kwargs):
"""Drop-in replacement for heapq.merge with key support"""
if kwargs.get('key') is None:
return heapq.merge(*iters)
def wrap(x, key=kwargs.get('key')):
return key(x), x
def unwrap(x):
_, value = x
return value
iters = tuple((wrap(x) for x in it) for it in iters)
return (unwrap(x) for x in heapq.merge(*iters))
class LogCollectorError(Exception):
"""Raised on errors on collecting log entries"""
class FilesWatcher(pyinotify.ProcessEvent):
"""Callback on created and modified files
When a file is created, it is set to be the watched file.
When a file is modified, the each new lines in the file is
passed to the callback. If a file is modified that is not
the currently watched file, an error is raised.
"""
def __init__(self, callback):
self._file = None
self._filepath = None
self._callback = callback
self._buffer = ''
def set_current(self, filepath, f=None):
"""Set currently watched file"""
if self._file is not None:
self._file.close()
self._filepath = filepath
self._file = f if f is not None else open(filepath, 'r')
self._buffer = ''
def process_IN_CREATE(self, event):
self.set_current(event.pathname)
def process_IN_MODIFY(self, event):
if self._file is None:
self.set_current(event.pathname)
elif self._filepath != event.pathname:
raise LogCollectorError('Unexpected modifications to {}'.format(
event.pathname))
buffer = self._buffer + self._file.read()
buffer, _, self._buffer = buffer.rpartition('\n')
if buffer != '':
for line in buffer.split('\n'):
self._callback(line)
class TorqueLogCollector(object):
def __init__(self, queue, torque_home=None):
if torque_home is None:
torque_home = os.environ.get('TORQUE_HOME', DEFAULT_TORQUE_HOME)
logger.info('Collecting log messages from {}'.format(torque_home))
self._torque_home = torque_home
self._queue = queue
# Create listener for server logs
server_logs = os.path.join(self._torque_home, 'server_logs')
self._server_notifier, server_replay = self._directory_listen(
server_logs, self._server_cb)
# Create listener for accounting logs
acct_logs = os.path.join(self._torque_home, 'server_priv/accounting')
self._acct_notifier, acct_replay = self._directory_listen(
acct_logs, self._acct_cb)
# Replay log messages in order
server_replay_parsed = (
self._parse_server_entry(line) for line in server_replay)
acct_replay_parsed = (
self._parse_acct_entry(line) for line in acct_replay)
for entry in heapq_merge(server_replay_parsed, acct_replay_parsed,
key=operator.itemgetter('timestamp')):
self._queue.put(entry)
# Start listeners
self._server_notifier.start()
self._acct_notifier.start()
def stop(self):
"""Stop listening for new log entries"""
logger.info('Stopping accounting logs notifier')
self._acct_notifier.stop()
logger.info('Stopping server logs notifier')
self._server_notifier.stop()
def _directory_listen(self, directory, callback):
"""Listen for log changes in a directory
Returns a notifier thread and a replay generator as a tuple.
The notifier thread is started using the start() method. Each new log
file line will be passed to the callback. The log entries of the last
modified files will be replayed from the generator.
"""
wm = pyinotify.WatchManager()
watcher = FilesWatcher(callback)
notifier = pyinotify.ThreadedNotifier(wm, watcher)
mask = pyinotify.IN_CREATE | pyinotify.IN_MODIFY
wdd = wm.add_watch(directory, mask, rec=True)
def files_mtime(directory):
for name in os.listdir(directory):
path = os.path.join(directory, name)
yield path, os.path.getmtime(path)
def replay():
recent = sorted(files_mtime(directory), key=operator.itemgetter(1))
for path, _ in recent[-7:]:
logger.info('Replaying file {}...'.format(path))
f = open(path, 'r')
try:
for line in f:
yield line.rstrip()
# Closes the previous file
watcher.set_current(path, f)
except:
f.close()
raise
return notifier, replay()
def _parse_log_date(self, line):
"""Parse date of log entry
Return date as a datetime object and the remaining log entry.
"""
m = re.match(r'^(\d{2})/(\d{2})/(\d{4}) ' +
'(\d{2}):(\d{2}):(\d{2});(.*)$', line)
if not m:
raise LogCollectorError('Unable to match date on log message: {}'.format(line))
# Parse time stamp
month = int(m.group(1))
day = int(m.group(2))
year = int(m.group(3))
hour = int(m.group(4))
minute = int(m.group(5))
second = int(m.group(6))
dt = datetime(year=year, month=month, day=day,
hour=hour, minute=minute, second=second)
return dt, m.group(7)
def _parse_server_entry(self, line):
"""Parse a server log entry"""
# Example:
# 02/27/2015 00:59:44;0100;PBS_Server.23657;Job;22495[].clusterhn.cluster.com;enqueuing into default, state 1 hop 1
dt, line = self._parse_log_date(line)
log_type, server, section, about, message = line.split(';', 4)
event = {'log': 'server',
'timestamp': dt,
'type': log_type,
'server': server,
'section': section,
'about': about,
'message': message}
return event
def _parse_acct_entry(self, line):
"""Parse an accounting log entry"""
# Example:
# 02/26/2015 00:04:48;Q;22320.clusterhn.cluster.com;queue=default
dt, line = self._parse_log_date(line)
state, job_id, properties = line.split(';', 2)
properties = dict(self._parse_properties(properties.rstrip()))
event = {'log': 'accounting',
'timestamp': dt,
'job_id': job_id,
'state': state,
'properties': properties}
return event
def _server_cb(self, line):
"""Callback when a server log entry appears"""
self._queue.put(self._parse_server_entry(line))
def _acct_cb(self, line):
"""Callback when an accounting log entry appears"""
self._queue.put(self._parse_acct_entry(line))
def _parse_properties(self, s):
""""Parse list of properties separated by space"""
if s == '':
return
for prop in s.split(' '):
key, value = prop.split('=', 1)
yield key, value
``` |
{
"source": "jonluntzel/pulse2percept",
"score": 2
} |
#### File: pulse2percept/tests/test_api.py
```python
import numpy as np
import numpy.testing as npt
import pytest
from .. import api as p2p
from .. import implants
from .. import stimuli
from .. import utils
def test_Simulation___init__():
implant = implants.Electrode("epiretinal", 10, 0, 0, 0)
with pytest.raises(TypeError):
p2p.Simulation(implant)
def test_Simulation_pulse2percept():
implant = implants.ElectrodeArray("epiretinal", 10, 0, 0, 0)
sim = p2p.Simulation(implant, engine='serial')
sim.set_optic_fiber_layer(x_range=[0, 0], y_range=[0, 0])
pt = stimuli.BiphasicPulse('cathodicfirst', 0.45 / 1000, 0.005 / 1000)
sim.pulse2percept(pt)
sim.pulse2percept(pt, layers=['GCL'])
sim.pulse2percept(pt, layers=['INL'])
# PulseTrain must have the same tsample as (implicitly set up) GCL
pt = stimuli.BiphasicPulse("cathodicfirst", 0.1, 0.001)
with pytest.raises(ValueError):
sim.pulse2percept(pt)
pt = stimuli.BiphasicPulse("cathodicfirst", 0.1, 0.005 / 1000)
with pytest.raises(ValueError):
sim.pulse2percept(pt, layers=['GCL', 'invalid'])
def test_Simulation_set_optic_fiber_layer():
sim = p2p.Simulation(implants.ArgusI(), engine='serial')
# Invalid grid ranges
with pytest.raises(ValueError):
sim.set_optic_fiber_layer(x_range=(10, 0))
with pytest.raises(ValueError):
sim.set_optic_fiber_layer(x_range=(1, 2, 3))
with pytest.raises(ValueError):
sim.set_optic_fiber_layer(x_range='invalid')
with pytest.raises(ValueError):
sim.set_optic_fiber_layer(y_range=(10, 0))
with pytest.raises(ValueError):
sim.set_optic_fiber_layer(y_range=(1, 2, 3))
with pytest.raises(ValueError):
sim.set_optic_fiber_layer(y_range='invalid')
x_range = (-100, 100)
y_range = (0, 200)
sim.set_optic_fiber_layer(x_range=x_range, y_range=y_range,
save_data=False, alpha=14000)
npt.assert_equal(sim.ofl.gridx.min(), x_range[0])
npt.assert_equal(sim.ofl.gridx.max(), x_range[1])
npt.assert_equal(sim.ofl.gridy.min(), y_range[0])
npt.assert_equal(sim.ofl.gridy.max(), y_range[1])
npt.assert_equal(sim.ofl.x_range, x_range)
npt.assert_equal(sim.ofl.y_range, y_range)
npt.assert_equal(sim.ofl.alpha, 14000)
# Smoke test
implant = implants.ElectrodeArray('epiretinal', 10, 0, 0, 0)
sim = p2p.Simulation(implant, engine='serial')
sim.set_optic_fiber_layer(x_range=0, y_range=0)
sim.set_optic_fiber_layer(x_range=[0, 0], y_range=[0, 0])
sim.set_optic_fiber_layer()
def test_Simulation_set_ganglion_cell_layer():
# A valid ganglion cell model
class Valid(p2p.retina.BaseModel):
def model_cascade(self, inval):
return inval
# Smoke test custom model
implant = implants.ElectrodeArray('epiretinal', 10, 0, 0, 0)
sim = p2p.Simulation(implant, engine='serial')
sim.set_optic_fiber_layer(x_range=0, y_range=0)
valid_model = Valid(tsample=0.2)
sim.set_ganglion_cell_layer(valid_model)
npt.assert_equal(isinstance(sim.gcl, p2p.retina.BaseModel), True)
npt.assert_equal(sim.gcl.tsample, 0.2)
# Smoke test latest model
for modelstr in ['latest', 'Latest', 'LATEST']:
sim.set_ganglion_cell_layer(modelstr, lweight=42)
npt.assert_equal(isinstance(sim.gcl, p2p.retina.TemporalModel), True)
npt.assert_equal(sim.gcl.lweight, 42)
sim.set_ganglion_cell_layer(modelstr, unknown_param=2) # smoke
# Smoke test Nanduri model
for modelstr in ['Nanduri2012', 'nanduri2012', 'NANDURI2012']:
sim.set_ganglion_cell_layer(modelstr, tau3=42)
npt.assert_equal(isinstance(sim.gcl, p2p.retina.Nanduri2012), True)
npt.assert_equal(sim.gcl.tau3, 42)
sim.set_ganglion_cell_layer(modelstr, unknown_param=2) # smoke
# Smoke test Horsager model
for modelstr in ['Horsager2009', 'horsager', 'HORSAGER2009']:
sim.set_ganglion_cell_layer(modelstr, tau3=42)
npt.assert_equal(isinstance(sim.gcl, p2p.retina.Horsager2009), True)
npt.assert_equal(sim.gcl.tau3, 42)
sim.set_ganglion_cell_layer(modelstr, unknown_param=2) # smoke
# Model unknown
with pytest.raises(ValueError):
sim.set_ganglion_cell_layer('unknown-model')
with pytest.raises(ValueError):
sim.set_ganglion_cell_layer(implants.ArgusII())
def test_get_brightest_frame():
# Pick a few raindom frames and place brightest pixel therein
num_frames = 10
rand_idx = np.random.randint(0, num_frames, 5)
for idx in rand_idx:
# Set the brightes pixel in frame `idx` of a random vector
tsdata = np.random.rand(2, 2, num_frames)
tsdata[1, 1, idx] = 2.0
# Make sure function returns the right frame
ts = utils.TimeSeries(1, tsdata)
brightest = p2p.get_brightest_frame(ts)
npt.assert_equal(brightest.data.max(), tsdata.max())
npt.assert_equal(brightest.data, tsdata[:, :, idx])
```
#### File: pulse2percept/pulse2percept/viz.py
```python
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import patches
import logging
from . import implants
from . import utils
from . import retina
def plot_fundus(implant, stim=None, ax=None, loc_od=(15.5, 1.5), n_bundles=100,
upside_down=False, annot_array=True, annot_quadr=True):
"""Plot an implant on top of the axon map
This function plots an electrode array on top of the axon map, akin to a
fundus photograph. If `stim` is passed, activated electrodes will be
highlighted.
Parameters
----------
implant : implants.ElectrodeArray
An implants.ElectrodeArray object that describes the implant.
stim : utils.TimeSeries|list|dict, optional, default: None
An input stimulus, as passed to ``p2p.pulse2percept``. If given,
activated electrodes will be highlighted in the plot.
ax : matplotlib.axes._subplots.AxesSubplot, optional, default: None
A Matplotlib axes object. If None given, a new one will be created.
loc_od : (x_od, y_od), optional, default: (15.5, 1.5)
Location of the optic disc center (deg).
n_bundles : int, optional, default: 100
Number of nerve fiber bundles to plot.
upside_down : bool, optional, default: False
Flag whether to plot the retina upside-down, such that the upper
half of the plot corresponds to the upper visual field. In general,
inferior retina == upper visual field (and superior == lower).
annot_array : bool, optional, default: True
Flag whether to label electrodes and the tack.
annot_quadr : bool, optional, default: True
Flag whether to annotate the four retinal quadrants
(inferior/superior x temporal/nasal).
Returns
-------
Returns a handle to the created figure (`fig`) and axes element (`ax`).
"""
if not isinstance(implant, implants.ElectrodeArray):
e_s = "`implant` must be of type implants.ElectrodeArray"
raise TypeError(e_s)
if n_bundles < 1:
raise ValueError('Number of nerve fiber bundles must be >= 1.')
phi_range = (-180.0, 180.0)
n_rho = 801
rho_range = (2.0, 45.0)
# Make sure x-coord of optic disc has the correct sign for LE/RE:
if (implant.eye == 'RE' and loc_od[0] <= 0
or implant.eye == 'LE' and loc_od[0] > 0):
logstr = ("For eye==%s, expected opposite sign of x-coordinate of "
"the optic disc; changing %.3f to %.3f" % (implant.eye,
loc_od[0],
-loc_od[0]))
logging.getLogger(__name__).info(logstr)
loc_od = (-loc_od[0], loc_od[1])
if ax is None:
# No axes object given: create
fig, ax = plt.subplots(1, figsize=(10, 8))
else:
fig = ax.figure
# Matplotlib<2 compatibility
if hasattr(ax, 'set_facecolor'):
ax.set_facecolor('black')
elif hasattr(ax, 'set_axis_bgcolor'):
ax.set_axis_bgcolor('black')
# Draw axon pathways:
phi = np.linspace(*phi_range, num=n_bundles)
func_kwargs = {'n_rho': n_rho, 'loc_od': loc_od,
'rho_range': rho_range, 'eye': implant.eye}
axon_bundles = utils.parfor(retina.jansonius2009, phi,
func_kwargs=func_kwargs)
for bundle in axon_bundles:
ax.plot(retina.dva2ret(bundle[:, 0]), retina.dva2ret(bundle[:, 1]),
c=(0.5, 1.0, 0.5))
# Highlight location of stimulated electrodes
if stim is not None:
for key in stim:
el = implant[key]
if el is not None:
ax.plot(el.x_center, el.y_center, 'oy',
markersize=np.sqrt(el.radius) * 2)
# Plot all electrodes and label them (optional):
for e in implant.electrodes:
if annot_array:
ax.text(e.x_center + 100, e.y_center + 50, e.name,
color='white', size='x-large')
ax.plot(e.x_center, e.y_center, 'ow', markersize=np.sqrt(e.radius))
# Plot the location of the array's tack and annotate it (optional):
if implant.tack:
tx, ty = implant.tack
ax.plot(tx, ty, 'ow')
if annot_array:
if upside_down:
offset = 100
else:
offset = -100
ax.text(tx, ty + offset, 'tack',
horizontalalignment='center',
verticalalignment='top',
color='white', size='large')
# Show circular optic disc:
ax.add_patch(patches.Circle(retina.dva2ret(loc_od), radius=900, alpha=1,
color='black', zorder=10))
xmin, xmax, ymin, ymax = retina.dva2ret([-20, 20, -15, 15])
ax.set_aspect('equal')
ax.set_xlim(xmin, xmax)
ax.set_xlabel('x (microns)')
ax.set_ylim(ymin, ymax)
ax.set_ylabel('y (microns)')
eyestr = {'LE': 'left', 'RE': 'right'}
ax.set_title('%s in %s eye' % (implant, eyestr[implant.eye]))
ax.grid('off')
# Annotate the four retinal quadrants near the corners of the plot:
# superior/inferior x temporal/nasal
if annot_quadr:
if upside_down:
topbottom = ['bottom', 'top']
else:
topbottom = ['top', 'bottom']
if implant.eye == 'RE':
temporalnasal = ['temporal', 'nasal']
else:
temporalnasal = ['nasal', 'temporal']
for yy, valign, si in zip([ymax, ymin], topbottom,
['superior', 'inferior']):
for xx, halign, tn in zip([xmin, xmax], ['left', 'right'],
temporalnasal):
ax.text(xx, yy, si + ' ' + tn,
color='black', fontsize=14,
horizontalalignment=halign,
verticalalignment=valign,
backgroundcolor=(1, 1, 1, 0.8))
# Need to flip y axis to have upper half == upper visual field
if upside_down:
ax.invert_yaxis()
return fig, ax
```
#### File: pulse2percept/scripts/CalculateChronaxies.py
```python
import numpy as np
from pulse2percept import electrode2currentmap as e2cm
from pulse2percept import effectivecurrent2brightness as ec2b
from pulse2percept import utils
from pulse2percept import files as n2sf
from scipy.optimize import minimize
from scipy.optimize import minimize_scalar# import npy2savedformats as n2sf
import matplotlib.pyplot as plt
import importlib as imp
#imp.reload(n2sf)
def findampval(amp, ecs, retina, rsample, whichlayer):
pt=e2cm.Psycho2Pulsetrain(tsample=tm.tsample, current_amplitude=amp,dur=.6, delay=10/1000,
pulse_dur=pd / 1000,interphase_dur=10/1000, freq=2)
resp = ec2b.pulse2percept(tm, ecs,r, [pt], rsample=rsample, dolayer=whichlayer, dojit=True, engine='serial')
return (( (np.max(resp.data)*1000) - 67.89) ** 2)
xlist=[]
ylist=[]
rlist=[] #electrode radius, microns
hlist=[] # lift of electrode from retinal surface, microns
e_spacing=525 # spacing in microns
for x in np.arange(-1, 1, e_spacing):
for y in np.arange(-1, 1, e_spacing):
xlist.append(x)
ylist.append(y)
rlist.append(100) # electrode radiues
hlist.append(0); #179.6) # electrode lift from retinal surface,
# epiretinal array - distance to the ganglion layer
# subretinal array - distance to the bipolar layer
# in Argus 1 179.6 is a good approx of height in a better patient
e_all = e2cm.ElectrodeArray(rlist,xlist,ylist,hlist, ptype='epiretinal')
# create retina, input variables include the sampling and how much of the retina is simulated, in microns
# (0,0 represents the fovea)
retinaname='SmallL80S75WL500'
r = e2cm.Retina(axon_map=None,sampling=75, ylo=-500, yhi=500, xlo=-500, xhi=500, axon_lambda=8)
# the effective current spread that incorporates axonal stimulation
myout=[]
d=.1
fps=30
pt=[]
inl_out=[]
nfl_out=[]
modelver='Krishnan'
#for d in [.1, .2, .45, .75, 1., 2., 4., 8., 16., 32.]:
tm = ec2b.TemporalModel()
rsample=int(np.round((1/tm.tsample) / 60 )) # resampling of the output to fps
# at 0 off the retinal surface a 0.45 pulse in the nfl gives a response of 1
[ecs, cs] = r.electrode_ecs(e_all)
inl_amp = []
nfl_amp = []
for pd in [.01, .02, .04, .08, .16, .32, .64, 1.28, 2.56, 5.12, 10.24, 20.48]:
xamp=120
dolayer='INL'
tmp=minimize(findampval, xamp, args=(ecs, r, rsample, 'INL', ))
inl_amp.append(tmp.x)
print(pd)
print('minimized inl layer')
print(tmp.x)
dolayer='NFL'
tmp=minimize(findampval, xamp, args=(ecs, r, rsample, 'NFL', ))
inl_amp.append(tmp.x)
print('minimized nfl layer')
print(tmp.x)
#inl_r = ec2b.pulse2percept(tm, ecs, r, [pt_2], rsample=rsample, dolayer='INL', dojit=False, engine='serial')
#def pulse2percept(tm, ecs, retina, ptrain, rsample, dolayer,
# engine='joblib', dojit=True, n_jobs=-1, tol=.05):
#inl_r = ec2b.pulse2percept(tm, ecs, r, [pt_2], rsample=rsample, dolayer='INL', dojit=False, engine='serial')
#
#omparenflinl(.636, ecs, r, [pt_2], [pt_01], rsample, False, 'serial')
#myout=minimize(comparenflinl, x0, args=(ecs, r, [pt_2], [pt_01], rsample, False, 'serial', ))
``` |
{
"source": "jonlwowski012/DropboxROS",
"score": 2
} |
#### File: DropboxROS/src/filenames_server.py
```python
from dropboxros.srv import *
from dropboxros.msg import username, filenames
import rospy
import os
def handle_checkfiles(req):
filenames_cli = filenames()
all_filenames = [f for f in os.listdir('.') if os.path.isfile(f)]
client_files = []
filetimes=[]
for filename in all_filenames:
print req
if req.username.username == filename.split("_",1)[0]:
time = os.path.getmtime(filename)
filetimes.append(time)
client_files.append(filename.split("_",1)[1])
filenames_cli.filenames = client_files
resp = CheckFilesResponse()
resp.filenames = filenames_cli
resp.filetimes = filetimes
return resp
def send_filenames_server():
rospy.init_node('filenames_server', anonymous=True)
s = rospy.Service('/server/check_filenames', CheckFiles, handle_checkfiles)
print "Ready to send files."
rospy.spin()
if __name__ == "__main__":
send_filenames_server()
```
#### File: DropboxROS/src/get_key_server.py
```python
from dropboxros.srv import *
from dropboxros.msg import username, filenames, files
import rospy
import os
from shutil import copyfile
def handle_getkey(req):
print req
with open(req.username+'_key_'+req.filename, 'r') as myfile:
key = myfile.read()
resp = GetKeyResponse()
resp.key = key
return resp
def update_server_service():
rospy.init_node('key_server', anonymous=True)
s = rospy.Service('/server/get_key', GetKey, handle_getkey)
print "Ready to share files."
rospy.spin()
if __name__ == "__main__":
update_server_service()
``` |
{
"source": "jonlwowski012/UGV-Wheel-Slip-Detection-Using-LSTM-and-DNN",
"score": 2
} |
#### File: CatFaultDetection/LSTM/Test_LSTM.py
```python
import numpy as np
from scipy.misc import imread, imsave, imresize
from keras.models import model_from_json
from os.path import join
import matplotlib.pyplot as plt
import pandas as pd
import time
def shuffler(filename):
df = pd.read_csv(filename, header=0)
# return the pandas dataframe
return df.reindex(np.random.permutation(df.index))
num_classes = 4
# Read Dataset
data = pd.read_csv('../dataset/fault_dataset.csv')
data = shuffler('../dataset/fault_dataset.csv')
X = np.asarray(data[['posex','posey','orix','oriy','oriz','oriw']])
y_norm = np.asarray(data['labels'])
y = np.zeros((len(y_norm), num_classes))
y[np.arange(len(y_norm)), y_norm] = 1
# Define Paths and Variables
model_dir = 'model'
#%% Load model and weights separately due to error in keras
model = model_from_json(open(model_dir+"/model_weights.json").read())
model.load_weights(model_dir+"/model_weights.h5")
#%% Predict Output
t0 = time.time()
output_org = model.predict(np.reshape(X, (X.shape[0], 1, X.shape[1])))
print "Time to predict all ", len(X), " samples: ", time.time()-t0
print "Average time to predict a sample: ", (time.time()-t0)/len(X)
output = np.zeros_like(output_org)
output[np.arange(len(output_org)), output_org.argmax(1)] = 1
correct = 0
for i in range(len(output)):
if np.array_equal(output[i],y[i]):
correct += 1
print "Acc: ", correct/float(len(output))
output_index = []
for row in output:
output_index.append(np.argmax(row))
plt.plot(y_norm, color='red',linewidth=3)
plt.plot(output_index, color='blue', linewidth=1)
plt.show()
```
#### File: CatFaultDetection/LSTM/Train_LSTM.py
```python
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras.layers import Embedding
from keras.layers import LSTM
from keras.optimizers import Adam
from keras import losses
from os import listdir
from os.path import join
from scipy import misc
import numpy as np
from keras.callbacks import EarlyStopping
from numpy import genfromtxt
import pandas as pd
import tensorflow as tf
def shuffler(filename):
df = pd.read_csv(filename, header=0)
# return the pandas dataframe
return df.reindex(np.random.permutation(df.index))
# Constant Variables
epochs = 75
batch_size = 100
data_dim = 6
timesteps = 100000
num_classes = 4
# Read Dataset
data = pd.read_csv('../dataset/fault_dataset.csv')
data = shuffler('../dataset/fault_dataset.csv')
X = data[['posex','posey','orix','oriy','oriz','oriw']]
y_norm = np.asarray(data['labels'])
y = np.zeros((len(y_norm), num_classes))
y[np.arange(len(y_norm)), y_norm] = 1
print X.shape
print y.shape
model_dir = 'model'
# Create LSTM
# expected input data shape: (batch_size, timesteps, data_dim)
model = Sequential()
#model.add(Embedding(batch_size, timesteps, input_length=data_dim))
model.add(LSTM(500, input_shape=(1, data_dim), return_sequences=True))
model.add(Dropout(0.2))
model.add(LSTM(375, return_sequences=True))
model.add(Dropout(0.2))
model.add(LSTM(325, return_sequences=True))
model.add(Dropout(0.2))
model.add(LSTM(275))
model.add(Dense(50, activation='relu'))
model.add(Dense(35, activation='relu'))
model.add(Dense(12, activation='relu'))
model.add(Dense(12, activation='relu'))
model.add(Dense(12, activation='relu'))
model.add(Dense(12, activation='relu'))
model.add(Dense(12, activation='relu'))
model.add(Dense(num_classes, activation='sigmoid'))
model.compile(loss='categorical_crossentropy',
optimizer='adam',
metrics=['accuracy'])
model.fit(np.reshape(X.as_matrix(), (X.as_matrix().shape[0], 1, X.as_matrix().shape[1])),y, batch_size=batch_size, epochs=epochs, validation_split= .3)
# Save parameters
config = model.to_json()
open(join(model_dir, 'model_weights.json'), "w").write(config)
model.save_weights(join(model_dir,'model_weights.h5'))
``` |
{
"source": "Jonlysun/3DWarping_Synthesis",
"score": 3
} |
#### File: Jonlysun/3DWarping_Synthesis/semantic.py
```python
import cv2
from PIL import Image
import numpy as np
import os.path as osp
from utils import getInstanceColorImage, make_palette, color_seg
import argparse
def getDispSeman(ins_img_mask):
disp_img = cv2.imread("disparity/6_l.png")
disp_img = cv2.cvtColor(disp_img, cv2.COLOR_BGR2GRAY)
sub_disp_img = ins_img_mask * disp_img
cv2.imwrite("sub_disp_img.jpg", sub_disp_img)
def getSimpleMask(obj_num, instance_id):
instance_id[instance_id != obj_num] = 0
ins_img_mask = np.clip(instance_id, 0, 1)
return ins_img_mask
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--id', type=int)
args = parser.parse_args()
pid = args.id
sem_dir_name = "Semantic"
img_dir_name = "image"
pic_name = "{}_l.png".format(pid)
img_name = "{}_l.jpg".format(pid)
sem_path = osp.join(sem_dir_name, pic_name)
img_path = osp.join(img_dir_name, img_name)
sem_img = Image.open(sem_path, "r")
sem_img = np.array(sem_img)
semantic_id = (sem_img >> 8).astype(np.uint8)
instance_id = sem_img.astype(np.uint8)
instance_id += semantic_id
color_num = np.max(semantic_id) + np.max(instance_id)
palette = make_palette(color_num)
ins_img = color_seg(instance_id, palette)
cv2.imwrite("Semantic_color/{}_l.jpg".format(pid), ins_img)
obj_num = 34
instance_id[instance_id != obj_num] = 0
instance_id = Image.fromarray(instance_id)
instance_id.save("2_l_simple_mask.png")
img = cv2.imread(img_path)
ins_img_mask = getSimpleMask(34, np.array(instance_id))
sub_img_mask = cv2.merge([ins_img_mask, ins_img_mask, ins_img_mask])
sub_img = sub_img_mask * img
cv2.imwrite("{}_l.jpg".format(pid), sub_img)
```
#### File: Jonlysun/3DWarping_Synthesis/utils.py
```python
import cv2
import numpy as np
from PIL import Image
import os
import imageio
def index(m, n):
if m >= 0 and m < n:
return m
elif m < 0:
return 0
elif m >= n:
return n-1
def make_palette(num_classes):
"""
Maps classes to colors in the style of PASCAL VOC.
Close values are mapped to far colors for segmentation visualization.
See http://host.robots.ox.ac.uk/pascal/VOC/voc2012/index.html#devkit
Takes:
num_classes: the number of classes 输入为类别数目
Gives:
palette: the colormap as a k x 3 array of RGB colors 输出为k×3大小的RGB颜色数组
"""
palette = np.zeros((num_classes, 3), dtype=np.uint8)
for k in range(0, num_classes):
label = k
i = 0
while label: #按一定规则移位产生调色板
palette[k, 0] |= (((label >> 0) & 1) << (7 - i)) #>>为二进制右移
palette[k, 1] |= (((label >> 1) & 1) << (7 - i))
palette[k, 2] |= (((label >> 2) & 1) << (7 - i))
label >>= 3
i += 1
return palette
def color_seg(seg, palette):
"""
Replace classes with their colors.
Takes:
seg: H x W segmentation image of class IDs
Gives:
H x W x 3 image of class colors
"""
return palette[seg.flat].reshape(seg.shape + (3,))
def getInstanceColorImage(sem_img):
semantic_id = (sem_img >> 8).astype(np.uint8)
instance_id = sem_img.astype(np.uint8)
print(semantic_id.dtype)
print(instance_id.dtype)
print(np.min(semantic_id))
print(np.min(instance_id))
color_num = np.max(semantic_id) + np.max(instance_id)
palette = make_palette(color_num)
"""
img = color_seg(semantic_id, palette)
ins_img = color_seg(instance_id, palette)
"""
instance_id += semantic_id
# cv2.imwrite('test.jpg', img)
ins_img = color_seg(instance_id, palette)
return ins_img
def read_MiDaS_depth(disp_fi, disp_rescale=10., h=None, w=None):
if 'npy' in os.path.splitext(disp_fi)[-1]:
disp = np.load(disp_fi)
else:
disp = imageio.imread(disp_fi).astype(np.float32)[:, :, 0]
disp = disp - disp.min()
disp = cv2.blur(disp / disp.max(), ksize=(3, 3)) * disp.max()
disp = (disp / disp.max()) * disp_rescale
if h is not None and w is not None:
disp = resize(disp / disp.max(), (h, w), order=1) * disp.max()
depth = 1. / np.maximum(disp, 0.05)
return depth
def depth2disp(depth):
# depth = depth - depth.min()
# depth - (depth / depth.max()) * 10.0
disp = 1. / depth
return disp
def write_depth(path, depth, bits=1):
"""Write depth map to pfm and png file.
Args:
path (str): filepath without extension
depth (array): depth
"""
# write_pfm(path + ".pfm", depth.astype(np.float32))
depth_min = depth.min()
depth_max = depth.max()
max_val = (2**(8*bits))-1
if depth_max - depth_min > np.finfo("float").eps:
out = max_val * (depth - depth_min) / (depth_max - depth_min)
else:
out = 0
if bits == 1:
cv2.imwrite(path + ".png", out.astype("uint8"))
elif bits == 2:
cv2.imwrite(path + ".png", out.astype("uint16"))
return
def insertDepth(img):
Height, Width = img.shape
new_depth = img.copy()
# integral = img.copy()
ptsMap = np.zeros((Height, Width))
integral = np.zeros((Height, Width))
# ptsMap[np.nonzero(img)] = 1
for i in range(Height):
for j in range(Width):
if img[i, j] > 1e-3:
integral[i, j] = img[i, j]
ptsMap[i, j] = 1
# integral
for i in range(0, Height):
for j in range(1, Width):
integral[i, j] += integral[i, j-1]
ptsMap[i, j] += ptsMap[i, j-1]
for i in range(1, Height):
for j in range(0, Width):
integral[i, j] += integral[i-1, j]
ptsMap[i, j] += ptsMap[i-1, j]
# median filter using integral graph
filter_size = 10
while filter_size > 1:
wnd = int(filter_size)
filter_size /= 2
for i in range(Height):
for j in range(Width):
left = max(0, j - wnd - 1)
right = min(Width - 1, j + wnd)
up = max(0, i - wnd - 1)
bot = min(Height - 1, i + wnd)
ptsCnt = int(ptsMap[bot, right]) + int(ptsMap[up, left]) - \
(int(ptsMap[up, right]) + int(ptsMap[bot, left]))
sumGray = int(integral[bot, right]) + int(integral[up, left]) - \
(int(integral[up, right]) + int(integral[bot, left]))
if ptsCnt <= 0:
continue
new_depth[i, j] = float(sumGray / ptsCnt)
# new_depth = cv2.GaussianBlur(new_depth, (3, 3), 0)
s = int(wnd / 2) * 2 + 1
new_depth = cv2.GaussianBlur(new_depth, (s, s), s, s)
return new_depth
``` |
{
"source": "Jonlysun/CBIR",
"score": 3
} |
#### File: Jonlysun/CBIR/Train.py
```python
import cv2
import numpy as np
from PIL import Image
import argparse
import os
from sklearn.cluster import KMeans
from matplotlib import pyplot as plt
class Train():
def __init__(self, dataset_path, num_words, method):
self.dataset_path = dataset_path
self.num_words = num_words
self.method = method
if self.method == 'sift' or self.method == 'test':
self.feat_extract = cv2.xfeatures2d.SIFT_create()
self.feature_num = 128
elif self.method == 'surf':
self.feat_extract = cv2.xfeatures2d.SURF_create()
self.feature_num = 64
elif self.method == 'orb':
self.feat_extract = cv2.ORB_create(1000)
self.feature_num = 32
if not os.path.exists(self.method):
os.makedirs(self.method)
def get_img_path(self, training_path):
print("dataset : {} is loading ......".format(training_path))
classes = os.listdir(training_path)
image_number = 0
img_paths = []
for each in classes:
dirs = os.path.join(training_path, each)
training_names = os.listdir(dirs)
image_number += len(training_names)
for name in training_names:
img_path = os.path.join(dirs, name)
img_paths.append(img_path)
print('Image number: {}'.format(image_number))
print('Classes number: {}'.format(len(classes)))
return img_paths
def getClusterCentures(self, img_paths, dataset_matrix, num_words):
des_list = [] # 特征描述
des_matrix = np.zeros((1, self.feature_num))
# sift_det = cv2.xfeatures2d.SIFT_create()
# sift_det = cv2.SIFT_create()
count = 1
print(f'{self.method} features extracting ......')
# img_paths = img_paths[:20]
if img_paths != None:
for path in img_paths:
img = Image.open(path)
if img.mode != 'RGB':
img = img.convert('RGB')
img = np.array(img)
gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
kp, des = self.feat_extract.detectAndCompute(gray, None)
if des != []:
des_matrix = np.row_stack((des_matrix, des))
des_list.append(des)
count += 1
if count % 50 == 0:
print('\t{} has finished'.format(count))
elif dataset_matrix != None:
for gray in range(dataset_matrix.shape[0]):
kp, des = self.feat_extract.detectAndCompute(gray, None)
if des != []:
des_matrix = np.row_stack((des_matrix, des))
des_list.append(des)
else:
raise ValueError('输入不合法')
des_matrix = des_matrix[1:, :] # the des matrix of sift
# 计算聚类中心 构造视觉单词词典
print('Calculate Kmeans center: ......')
kmeans = KMeans(n_clusters=num_words, random_state=33)
kmeans.fit(des_matrix)
centres = kmeans.cluster_centers_ # 视觉聚类中心
return centres, des_list
def VLAD_des2feature(self, des, num_words, centures):
img_features = np.zeros((num_words, self.feature_num), 'float32')
for i in range(des.shape[0]):
# select the nearest center
feature_k_rows = np.ones((num_words, self.feature_num), 'float32')
feature = des[i]
feature_k_rows = feature_k_rows*feature
feature_k_rows = np.sum((feature_k_rows-centures)**2, 1)
index = np.argmax(feature_k_rows)
nearest_center = centures[index]
# caculate the residual
residual = feature - nearest_center
img_features[index] += residual
norm = np.linalg.norm(img_features)
img_features = img_features / norm
# PCA TODO
img_features = img_features.flatten()
return img_features
def des2feature(self, des, num_words, centures):
img_feature_vec = np.zeros((1, num_words), 'float32')
for i in range(des.shape[0]):
feature_k_rows = np.ones((num_words, self.feature_num), 'float32')
feature = des[i]
feature_k_rows = feature_k_rows*feature
feature_k_rows = np.sum((feature_k_rows-centures)**2, 1)
index = np.argmax(feature_k_rows)
img_feature_vec[0][index] += 1
return img_feature_vec
def get_all_features(self, des_list, num_words, centres):
print(f'{self.method} feature encoding ......')
allvec = np.zeros((len(des_list), num_words), 'float32')
for i in range(len(des_list)):
if des_list[i] != []:
allvec[i] = self.des2feature(
centures=centres, des=des_list[i], num_words=num_words)
if i % 50 == 0:
print('\t{} encode has finished'.format(i))
return allvec
def getNearestImg(self, feature, dataset, num_close):
features = np.ones((dataset.shape[0], len(feature)), 'float32')
features = features*feature
dist = np.sum((features-dataset)**2, 1)
dist_index = np.argsort(dist)
return dist_index[:num_close]
def train(self):
img_paths = self.get_img_path(self.dataset_path)
np.save(os.path.join(self.method, 'image_paths.npy'), np.array(img_paths))
centres, des_list = self.getClusterCentures(
img_paths=img_paths, num_words=self.num_words, dataset_matrix=None)
matrix = np.array(des_list)
np.save(os.path.join(self.method, 'features_bases.npy'), matrix)
np.save(os.path.join(self.method, 'centres.npy'), np.array(centres))
img_features = self.get_all_features(
des_list=des_list, num_words=num_words, centres=centres)
np.save(os.path.join(self.method, 'code_bases.npy'),
np.array(img_features))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--method', type=str, default='sift',
choices=['sift', 'surf', 'orb', 'test'])
config = parser.parse_args()
train_dataset_path = './ImageBase/train'
num_words = 12
sift_train = Train(dataset_path=train_dataset_path,
num_words=num_words, method=config.method)
sift_train.train()
``` |
{
"source": "JonM0/pytest-mock-resources",
"score": 2
} |
#### File: pytest_mock_resources/container/base.py
```python
import contextlib
import json
import pathlib
import time
import responses
from pytest_mock_resources.config import get_env_config
from pytest_mock_resources.hooks import get_pytest_flag, use_multiprocess_safe_mode
DEFAULT_RETRIES = 40
DEFAULT_INTERVAL = 0.5
class ContainerCheckFailed(Exception):
"""Unable to connect to a Container."""
def retry(func=None, *, args=(), kwargs={}, retries=1, interval=DEFAULT_INTERVAL, on_exc=Exception):
while retries:
retries -= 1
try:
result = func(*args, **kwargs)
except on_exc:
if not retries:
raise
time.sleep(interval)
else:
return result
def get_container(pytestconfig, config, *, retries=DEFAULT_RETRIES, interval=DEFAULT_INTERVAL):
import docker
import docker.errors
multiprocess_safe_mode = use_multiprocess_safe_mode(pytestconfig)
# XXX: moto library may over-mock responses. SEE: https://github.com/spulec/moto/issues/1026
responses.add_passthru("http+docker")
# Recent versions of the `docker` client make API calls to `docker` at this point
# if provided the "auto" version. This leads to potential startup failure if
# the docker socket isn't yet available.
version = get_env_config("docker", "api_version", "auto")
client = retry(docker.from_env, kwargs=dict(version=version), retries=5, interval=1)
# The creation of container can fail and leave us in a situation where it's
# we will need to know whether it's been created already or not.
container = None
run_kwargs = dict(
ports=config.ports(), environment=config.environment(), name=container_name(config.name)
)
try:
if multiprocess_safe_mode:
from filelock import FileLock
# get the temp directory shared by all workers (assuming pytest-xdist)
root_tmp_dir = pytestconfig._tmp_path_factory.getbasetemp().parent
fn = root_tmp_dir / f"pmr_create_container_{config.port}.lock"
# wait for the container one process at a time
with FileLock(str(fn)):
container = wait_for_container(
client,
config.check_fn,
run_args=(config.image,),
run_kwargs=run_kwargs,
retries=retries,
interval=interval,
)
if container:
record_container_creation(pytestconfig, container)
else:
container = wait_for_container(
client,
config.check_fn,
run_args=(config.image,),
run_kwargs=run_kwargs,
retries=retries,
interval=interval,
)
yield
finally:
cleanup_container = get_pytest_flag(pytestconfig, "pmr_cleanup_container", default=True)
if cleanup_container and container and not multiprocess_safe_mode:
container.kill()
client.close()
def wait_for_container(
client, check_fn, *, run_args, run_kwargs, retries=DEFAULT_RETRIES, interval=DEFAULT_INTERVAL
):
"""Wait for evidence that the container is up and healthy.
The caller must provide a `check_fn` which should `raise ContainerCheckFailed` if
it finds that the container is not yet up.
"""
import docker.errors
try:
# Perform a single attempt, for the happy-path where the container already exists.
retry(check_fn, retries=1, interval=interval, on_exc=ContainerCheckFailed)
except ContainerCheckFailed:
# In the event it doesn't exist, we attempt to start the container
try:
container = client.containers.run(*run_args, **run_kwargs, detach=True, remove=True)
except docker.errors.APIError as e:
container = None
# This sometimes happens if multiple container fixtures race for the first
# creation of the container, we want to still retry wait in this case.
port_allocated_error = "port is already allocated"
name_allocated_error = "to be able to reuse that name"
error = str(e)
if port_allocated_error not in error and name_allocated_error not in error:
raise
# And then we perform more lengthy retry cycle.
retry(check_fn, retries=retries, interval=interval, on_exc=ContainerCheckFailed)
return container
return None
def container_name(name: str) -> str:
return f"pmr_{name}"
def record_container_creation(pytestconfig, container):
"""Record the fact of the creation of a container.
Record both a local reference to the container in pytest's `config` fixture,
as well as a global PMR lock file of created containers.
"""
pytestconfig._pmr_containers.append(container)
fn = get_tmp_root(pytestconfig, parent=True)
with load_container_lockfile(fn) as data:
data.append(container.id)
fn.write_text(json.dumps(data))
def get_tmp_root(pytestconfig, *, parent=False):
"""Get the path to the PMR lock file."""
tmp_path_factory = pytestconfig._tmp_path_factory
root_tmp_dir = tmp_path_factory.getbasetemp().parent
if parent:
root_tmp_dir = root_tmp_dir.parent
return root_tmp_dir / "pmr.json"
@contextlib.contextmanager
def load_container_lockfile(path: pathlib.Path):
"""Produce the contents of the given file behind a file lock."""
import filelock
with filelock.FileLock(str(path) + ".lock"):
if path.is_file():
with open(path, "rb") as f:
yield json.load(f)
else:
yield []
```
#### File: fixture/database/mongo.py
```python
import pytest
from pytest_mock_resources.compat import pymongo
from pytest_mock_resources.container.base import get_container
from pytest_mock_resources.container.mongo import MongoConfig
from pytest_mock_resources.fixture.database.generic import assign_fixture_credentials
@pytest.fixture(scope="session")
def pmr_mongo_config():
"""Override this fixture with a :class:`MongoConfig` instance to specify different defaults.
Examples:
>>> @pytest.fixture(scope='session')
... def pmr_mongo_config():
... return MongoConfig(image="mongo:3.4", root_database="foo")
"""
return MongoConfig()
@pytest.fixture(scope="session")
def _mongo_container(pytestconfig, pmr_mongo_config):
yield from get_container(pytestconfig, pmr_mongo_config)
def create_mongo_fixture(scope="function"):
"""Produce a mongo fixture.
Any number of fixture functions can be created. Under the hood they will all share the same
database server.
Arguments:
scope: Passthrough pytest's fixture scope.
"""
@pytest.fixture(scope=scope)
def _(_mongo_container, pmr_mongo_config):
return _create_clean_database(pmr_mongo_config)
return _
def _create_clean_database(config):
root_client = pymongo.MongoClient(config.host, config.port)
root_db = root_client[config.root_database]
# Create a collection called `pytestMockResourceDbs' in the admin tab if not already created.
db_collection = root_db["pytestMockResourcesDbs"]
# Create a Document in the `pytestMockResourcesDbs` collection.
result = db_collection.insert_one({})
# Create a database where the name is equal to that ID.
db_id = str(result.inserted_id)
new_database = root_client[db_id]
# Create a user as that databases owner
password = "password" # nosec
new_database.command("createUser", db_id, pwd=password, roles=["dbOwner"])
# pass back an authenticated db connection
limited_client = pymongo.MongoClient(
config.host, config.port, username=db_id, password=password, authSource=db_id
)
limited_db = limited_client[db_id]
assign_fixture_credentials(
limited_db,
drivername="mongodb",
host=config.host,
port=config.port,
database=db_id,
username=db_id,
password="password",
)
return limited_db
```
#### File: database/relational/generic.py
```python
import abc
import fnmatch
from typing import Tuple
import attr
import six
from sqlalchemy import MetaData
from sqlalchemy.ext.declarative import declarative_base, DeclarativeMeta
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.sql.ddl import CreateSchema
from sqlalchemy.sql.schema import Table
from pytest_mock_resources import compat
@six.add_metaclass(abc.ABCMeta)
class AbstractAction(object):
@abc.abstractmethod
def run(self, engine_manager):
"""Run an action on a database via the passed-in engine_manager instance."""
class Rows(AbstractAction):
def __init__(self, *rows):
self.rows = rows
def run(self, engine_manager):
rows = self._get_stateless_rows(self.rows)
metadatas = self._get_metadatas(rows)
for metadata in metadatas:
engine_manager.create_ddl(metadata)
self._create_rows(engine_manager.engine, rows)
@staticmethod
def _get_stateless_rows(rows):
"""Create rows that aren't associated with any other SQLAlchemy session."""
stateless_rows = []
for row in rows:
row_args = row.__dict__
row_args.pop("_sa_instance_state", None)
stateless_row = type(row)(**row_args)
stateless_rows.append(stateless_row)
return stateless_rows
@staticmethod
def _get_metadatas(rows):
return {row.metadata for row in rows}
@staticmethod
def _create_rows(engine, rows):
Session = sessionmaker(bind=engine)
session = Session()
session.add_all(rows)
session.commit()
session.close()
class Statements(AbstractAction):
def __init__(self, *statements):
self.statements = statements
def run(self, engine_manager):
for statement in self.statements:
engine_manager.engine.execute(statement)
@attr.s
class EngineManager(object):
engine = attr.ib()
ordered_actions = attr.ib(default=attr.Factory(tuple))
tables: Tuple = attr.ib(default=None, converter=attr.converters.optional(tuple))
session = attr.ib(default=False)
default_schema = attr.ib(default=None)
_ddl_created = False
def _run_actions(self):
BaseType = type(declarative_base())
for action in self.ordered_actions:
if isinstance(action, MetaData):
self.create_ddl(action)
elif isinstance(action, BaseType):
self.create_ddl(action.metadata)
elif isinstance(action, AbstractAction):
action.run(self)
elif callable(action):
self._execute_function(action)
else:
raise ValueError(
"create_fixture function takes in sqlalchemy.MetaData or actions as inputs only."
)
def _create_schemas(self, metadata):
if self._ddl_created:
return
all_schemas = {table.schema for table in metadata.tables.values() if table.schema}
for schema in all_schemas:
if self.default_schema == schema:
continue
statement = CreateSchema(schema, quote=True)
self.engine.execute(statement)
def _create_tables(self, metadata):
if not self.tables:
metadata.create_all(self.engine)
return
table_objects = {
table_object
for table in self.tables
for table_object in identify_matching_tables(metadata, table)
}
metadata.create_all(self.engine, tables=list(table_objects))
def _execute_function(self, fn):
Session = sessionmaker(bind=self.engine)
session = Session()
fn(session)
session.commit()
session.close()
def create_ddl(self, metadata):
self._create_schemas(metadata)
self._create_tables(metadata)
self._ddl_created = True
def manage_sync(self, session=None):
try:
self._run_actions()
if session:
if isinstance(session, sessionmaker):
session_factory = session
else:
session_factory = sessionmaker(bind=self.engine)
Session = scoped_session(session_factory)
session = Session(bind=self.engine)
yield session
session.close()
else:
yield self.engine
finally:
self.engine.dispose()
async def manage_async(self, session=None):
try:
self._run_actions()
async_engine = self._get_async_engine()
if session:
if isinstance(session, sessionmaker):
session_factory = session
else:
session_factory = sessionmaker(
async_engine,
expire_on_commit=False,
class_=compat.sqlalchemy.asyncio.AsyncSession,
)
async with session_factory() as session:
yield session
else:
yield async_engine
finally:
self.engine.dispose()
def _get_async_engine(self, isolation_level=None):
url = compat.sqlalchemy.URL(
drivername="postgresql+asyncpg",
username=self.engine.pmr_credentials.username,
password=<PASSWORD>,
host=self.engine.pmr_credentials.host,
port=self.engine.pmr_credentials.port,
database=self.engine.pmr_credentials.database,
query=dict(ssl="disable"),
)
options = {}
if isolation_level:
options["isolation_level"] = isolation_level
return compat.sqlalchemy.asyncio.create_async_engine(url, **options)
def identify_matching_tables(metadata, table_specifier):
if isinstance(table_specifier, DeclarativeMeta):
return [table_specifier.__table__]
if isinstance(table_specifier, Table):
return [table_specifier]
tables = [
table
for table_name, table in metadata.tables.items()
if fnmatch.fnmatch(table_name, table_specifier)
]
if tables:
return tables
table_names = ", ".join(sorted(metadata.tables.keys()))
raise ValueError(
'Could not identify any tables matching "{}" from: {}'.format(table_specifier, table_names)
)
```
#### File: patch/redshift/psycopg2.py
```python
import contextlib
from unittest import mock
from sqlalchemy.sql.base import Executable
from pytest_mock_resources.compat import psycopg2
from pytest_mock_resources.container.postgres import PostgresConfig
from pytest_mock_resources.patch.redshift.mock_s3_copy import mock_s3_copy_command, strip
from pytest_mock_resources.patch.redshift.mock_s3_unload import mock_s3_unload_command
@contextlib.contextmanager
def patch_connect(config: PostgresConfig, database: str):
new_connect = mock_psycopg2_connect(config, database, _connect=psycopg2._connect)
# We patch `psycopg2._connect` specifically because it allows us to patch the
# connection regardless of the import style used by the caller.
with mock.patch("psycopg2._connect", new=new_connect) as p:
yield p
def mock_psycopg2_connect(config: PostgresConfig, database: str, _connect):
"""Patch `psycopg2._connect`.
Add support for S3 COPY and UNLOAD.
"""
class CustomCursor(psycopg2.extensions.cursor):
"""A custom cursor class to define a custom execute method."""
def execute(self, sql, args=None):
if isinstance(sql, Executable):
return super().execute(sql, args)
if strip(sql).lower().startswith("copy"):
mock_s3_copy_command(sql, self)
sql = "commit"
if strip(sql).lower().startswith("unload"):
mock_s3_unload_command(sql, self)
sql = "commit"
return super().execute(sql, args)
def _mock_psycopg2_connect(*args, **kwargs):
"""Substitute the default cursor with a custom cursor."""
conn = _connect(*args, **kwargs)
dsn_info = conn.get_dsn_parameters()
# We want to be sure to *only* patch the cursor's behavior when we think the
# database connection is for the database we're specifically referencing. This
# should prevent over-patching for connections which are not relevant to our
# fixture.
connection_info_matches = (
config.host == dsn_info["host"]
and str(config.port) == dsn_info["port"]
and database == dsn_info["dbname"]
)
if connection_info_matches:
conn.cursor_factory = CustomCursor
return conn
return _mock_psycopg2_connect
```
#### File: fixture/database/test_rows.py
```python
from sqlalchemy import Column, Integer, SmallInteger
from sqlalchemy.ext.declarative import declarative_base
from pytest_mock_resources import create_mysql_fixture, create_postgres_fixture, Rows
Base = declarative_base()
class Quarter(Base):
__tablename__ = "quarter"
id = Column(Integer, primary_key=True)
year = Column(SmallInteger, nullable=False)
quarter = Column(SmallInteger, nullable=False)
rows = Rows(
Quarter(id=1, year=2012, quarter=1),
Quarter(id=2, year=2012, quarter=2),
Quarter(id=3, year=2012, quarter=3),
Quarter(id=4, year=2012, quarter=4),
)
postgres = create_postgres_fixture(rows)
mysql = create_mysql_fixture(rows)
def test_rows_postgres(postgres):
execute = postgres.execute(
"""
SELECT *
FROM quarter
ORDER BY id
"""
)
assert [(1, 2012, 1), (2, 2012, 2), (3, 2012, 3), (4, 2012, 4)] == list(execute)
def test_rows_mysql(mysql):
execute = mysql.execute(
"""
SELECT *
FROM quarter
ORDER BY id
"""
)
assert [(1, 2012, 1), (2, 2012, 2), (3, 2012, 3), (4, 2012, 4)] == list(execute)
SecondBase = declarative_base()
class Report(SecondBase):
__tablename__ = "report"
id = Column(Integer, primary_key=True)
rows = Rows(Quarter(id=1, year=2012, quarter=1), Quarter(id=2, year=2012, quarter=2), Report(id=3))
base_2_postgres = create_postgres_fixture(rows)
base_2_mysql = create_mysql_fixture(rows)
def test_2_bases_postgres(base_2_postgres):
execute = base_2_postgres.execute(
"""
SELECT *
FROM quarter
ORDER BY id
"""
)
assert [(1, 2012, 1), (2, 2012, 2)] == list(execute)
execute = base_2_postgres.execute(
"""
SELECT *
FROM report
"""
)
assert [(3,)] == list(execute)
def test_2_bases_mysql(base_2_mysql):
execute = base_2_mysql.execute(
"""
SELECT *
FROM quarter
ORDER BY id
"""
)
assert [(1, 2012, 1), (2, 2012, 2)] == list(execute)
execute = base_2_mysql.execute(
"""
SELECT *
FROM report
"""
)
assert [(3,)] == list(execute)
```
#### File: fixture/database/test_statements.py
```python
from pytest_mock_resources import (
create_mysql_fixture,
create_postgres_fixture,
create_redshift_fixture,
create_sqlite_fixture,
Statements,
)
statements = Statements("CREATE VIEW cool_view as select 3", "CREATE VIEW cool_view_2 as select 1")
postgres = create_postgres_fixture(statements)
sqlite = create_sqlite_fixture(statements)
mysql = create_mysql_fixture(statements)
def test_statements_postgres(postgres):
execute = postgres.execute(
"""
SELECT table_name
FROM INFORMATION_SCHEMA.views
WHERE table_name in ('cool_view', 'cool_view_2')
ORDER BY table_name
"""
)
result = [row[0] for row in execute]
assert ["cool_view", "cool_view_2"] == result
def test_statements_mysql(mysql):
execute = mysql.execute(
"""
SELECT table_name
FROM INFORMATION_SCHEMA.views
WHERE table_name in ('cool_view', 'cool_view_2')
AND table_schema = (select database())
ORDER BY table_name
"""
)
result = [row[0] for row in execute]
assert ["cool_view", "cool_view_2"] == result
statements = Statements(
"""
CREATE TABLE account(
user_id serial PRIMARY KEY,
username VARCHAR (50) UNIQUE NOT NULL,
password VARCHAR (50) NOT NULL
);
INSERT INTO account VALUES (1, 'user1', '<PASSWORD>')
"""
)
redshift = create_redshift_fixture(statements)
def test_multi_statement_statements(redshift):
execute = redshift.execute("SELECT password FROM account")
result = sorted([row[0] for row in execute])
assert ["password1"] == result
```
#### File: pytest-mock-resources/tests/test_examples.py
```python
import pytest
@pytest.mark.redis
def test_multiprocess_redis_database(pytester):
pytester.copy_example()
# The `-n 4` are here is tightly coupled with the implementation of `test_split.py`.
args = ["-vv", "-n", "4", "test_split.py"]
result = pytester.inline_run(*args)
result.assertoutcome(passed=4, skipped=0, failed=0)
@pytest.mark.postgres
def test_multiprocess_container_cleanup_race_condition(pytester):
pytester.copy_example()
# The `-n 2` are here is tightly coupled with the implementation of `test_split.py`.
args = ["-vv", "-n", "2", "--pmr-multiprocess-safe", "test_split.py"]
result = pytester.inline_run(*args)
result.assertoutcome(passed=2, skipped=0, failed=0)
``` |
{
"source": "jonmabale/automate-the-boring-stuff",
"score": 5
} |
#### File: automate-the-boring-stuff/projects/collatz-exception.py
```python
def collatz(number):
if number % 2 == 0:
print(number // 2)
return number // 2
elif number % 2 == 1:
print(3 * number + 1)
return 3 * number + 1
return
# Then write a program that lets the user type in an integer and that
# keeps calling collatz() on that number until the function returns the
# value 1. (Amazingly enough, this sequence actually works for any integer —
# sooner or later, using this sequence, you’ll arrive at 1! Even
# mathematicians aren’t sure why. Your program is exploring what’s
# called the Collatz sequence, sometimes called “the simplest impossible
# math problem.”)
# Remember to convert the return value from input() to an integer with
# the int() function otherwise, it will be a string value.
# n = input('Enter a number: ')
# while n != 1:
# n = collatz(int(n))
# Add try and except statements to the previous project to detect whether the
# user types in a noninteger string. Normally, the int() function will raise
# a ValueError error if it is passed a noninteger string, as in int('puppy').
# In the except clause, print a message to the user saying they must enter
# an integer.
try:
n = input('Enter a number: ')
while n != 1:
n = collatz(int(n))
except ValueError:
print('Error: Invalid integer type.')
```
#### File: automate-the-boring-stuff/projects/table-printer.py
```python
def print_table(table):
# Create a list containing the same number of 0 values as
# the number of inner lists in tableData
col_width = [0] * len(table)
inner_list = 0 # inner list count
# Find the longest word in each of the inner_list so that
# the col_width is wide enough to fit all the words
while inner_list < len(table):
for word in table_data[inner_list]:
if len(word) > col_width[inner_list]:
col_width[inner_list] = len(word)
inner_list += 1
# Iterate over inner_lists and print each corresponding word from
# each inner_lists and right aligned with the col_width value.
# A space was added to both sides of each word to create column spaces.
for each_word in range(len(table[0])):
new_row = ""
for inner_list in range(len(table)):
new_row += (
" "
+ table[inner_list][each_word].rjust(col_width[inner_list])
+ " "
)
print(new_row)
table_data = [
["apples", "oranges", "cherries", "banana"],
["Alice", "Bob", "Carol", "David"],
["dogs", "cats", "moose", "goose"],
]
print_table(table_data)
``` |
{
"source": "jonmaddock/enigma",
"score": 4
} |
#### File: enigma/enigma/keyer.py
```python
import numpy as np
import simpleaudio as sa
# Dit and dah timings
MORSE_DIT_FREQ = 10 # dits per second
MORSE_DIT = 1
MORSE_DAH = 3
# Audio settings
FREQUENCY = 440 # 440 Hz
SAMPLE_RATE = 44100
class Keyer:
"""Convert Morse code to audio and play it."""
def __init__(self, morse):
"""Convert Morse to playable audio.
:param morse: dot-and-dash Morse code
:type morse: str
"""
self.signal = self.create_binary_signal(morse)
self.audio = self.convert_audio()
def create_binary_signal(self, morse):
"""Converts Morse code into a binary signal.
For example, ".- ." becomes "1011100001"
:param morse: dot-and-dash Morse code
:type morse: str
:return: binary Morse code signal
:rtype: np.ndarray
"""
signal_list = []
# Convert to binary dit, dah or space
# Always add a space of one dit
for char in morse:
if char == ".":
signal_list += MORSE_DIT * [1]
elif char == "-":
signal_list += MORSE_DAH * [1]
signal_list += MORSE_DIT * [0]
# TODO Correct number of spaces: consider end of char/word following
# dit/dah: has one too many spaces currently
# signal_list is now list of binary digits, each representing a dit
# duration of on or off
signal = np.array(signal_list)
return signal
def convert_audio(self):
"""Convert binary signal to audio.
Encode sine wave with binary signal and create playable audio.
:return: 16-bit audio waveform
:rtype: np.ndarray
"""
# Stretch signal array to match the required sample rate and duration
samples_per_dit = int(round(SAMPLE_RATE / MORSE_DIT_FREQ))
signal_stretched = np.repeat(self.signal, samples_per_dit)
# Create increasing time value array of equivalent length
duration = signal_stretched.size / SAMPLE_RATE
t = np.linspace(
0.0, duration, num=signal_stretched.size, endpoint=False
)
# Create a sine wave at 440 Hz
sine = np.sin(2 * np.pi * FREQUENCY * t)
# Encode sine wave with signal
enc_sine = sine * signal_stretched
# Ensure that sine is in 16-bit range, normalised to maximum amplitude
audio = (2 ** 15 - 1) * enc_sine / (np.max(np.abs(enc_sine)))
# Convert to 16-bit data
audio = audio.astype(np.int16)
return audio
def play(self):
"""Play Morse code.
In the case of audio errors (i.e. on CI system with no sound card),
catch exception and notify.
"""
try:
# Start playback
play_obj = sa.play_buffer(self.audio, 1, 2, SAMPLE_RATE)
# Wait for playback to finish before exiting
play_obj.wait_done()
except sa._simpleaudio.SimpleaudioError:
print("There was an error with audio playback.")
```
#### File: tests/unit/test_keyer.py
```python
import pytest
import numpy as np
from enigma.keyer import Keyer
def mock_signal(*args):
"""Mock creation of a binary signal array.
:return: binary array
:rtype: np.ndarray
"""
signal = np.array([1, 0, 1])
return signal
def mock_audio(*args):
"""Return random 16-bit audio array.
:return: 16-bit audio array
:rtype: np.ndarray
"""
audio = np.random.rand(3)
return audio.astype(np.int16)
@pytest.fixture
def keyer(monkeypatch):
"""Create an instance of Keyer.
:param monkeypatch: fixture for mocking
:type monkeypatch: _pytest.monkeypatch.MonkeyPatch
:return: Keyer object
:rtype: enigma.keyer.Keyer
"""
monkeypatch.setattr(Keyer, "create_binary_signal", mock_signal)
monkeypatch.setattr(Keyer, "convert_audio", mock_audio)
morse_code = ".- ."
keyer = Keyer(morse_code)
return keyer
def test_init(keyer):
"""Test instantiation of Keyer.
:param keyer: Keyer object
:type keyer: enigma.keyer.Keyer
"""
# Test attributes set by mocked methods
# Test morse converted to binary
signal_exp = np.array([1, 0, 1])
np.testing.assert_array_equal(keyer.signal, signal_exp)
# Test morse converted to 16-bit audio array
assert keyer.audio.dtype == np.dtype("int16")
def test_create_binary_signal(monkeypatch):
"""Test morse to binary conversion.
:param monkeypatch: fixture for mocking
:type monkeypatch: _pytest.monkeypatch.MonkeyPatch
"""
# Keyer.create_binary_signal() is run in init(); don't mock so it can be
# tested. Init with empty morse string
monkeypatch.setattr(Keyer, "convert_audio", mock_audio)
keyer = Keyer("")
morse = ".- ."
signal_exp = np.array([1, 0, 1, 1, 1, 0, 0, 0, 0, 1, 0])
signal = keyer.create_binary_signal(morse)
np.testing.assert_array_equal(signal, signal_exp)
def test_convert_audio(monkeypatch):
"""Test conversion of binary to audio.
:param monkeypatch: fixture for mocking
:type monkeypatch: _pytest.monkeypatch.MonkeyPatch
"""
# Keyer.convert_audio() is run in init(); don't mock so it can be
# tested.
monkeypatch.setattr(Keyer, "create_binary_signal", mock_signal)
morse = ".- ."
keyer = Keyer(morse)
# Test morse converted to 16-bit audio array
# TODO convert_audio() is actually run twice; once in init() and again
# explicitly. Could this be improved?
audio = keyer.convert_audio()
assert audio.dtype == np.dtype("int16")
def test_play(keyer):
"""Check audio can be played.
:param keyer: Keyer object
:type keyer: enigma.keyer.Keyer
"""
# Just check no exceptions are thrown
keyer.play()
``` |
{
"source": "Jonmainhart/sdev400_hw3",
"score": 4
} |
#### File: sdev400_hw3/hw3/dynamo_functions.py
```python
import json
import boto3
from boto3.dynamodb.conditions import Key, Attr
from operator import itemgetter
def table_exists(table_name):
"""
Checks if a table exists with a given name.
:param table_name: str
:return: list of available tables
# Use only in a clean DynamoDB as it will list all tables not just sports
# might be weird to see Hockey, Baseball, BankingInfo, Soccer show up in your output
Some portions of this code Copyright 2010-2019
Amazon.com, Inc. or its affiliates. All Rights Reserved.
MoviesListTables.py
"""
dynamodb = boto3.resource('dynamodb')
tables_available = []
for table in dynamodb.tables.all():
tables_available.append(table.name) # Amazon
return tables_available
def find_team(sport_name, team_name):
"""
Scans a DynamoDB table for a specific team. Returns all scores if they exists,
otherwise returns a formatted message specifying which teams are available to
search for.
:param sport_name: str
:param team_name: str
:return: list of dict objects if true, str if false
"""
dynamodb = boto3.resource('dynamodb')
result_set = dynamodb.Table(sport_name).scan(
FilterExpression=Attr("HomeTeamName").eq(team_name) | Attr("AwayTeamName").eq(team_name) & Attr("GameDate").exists())
for item in result_set["Items"]:
if team_name in item["HomeTeamName"] or team_name in item["AwayTeamName"]:
return result_set
# the requested team is not in the table, get the available teams
teams_available = dynamodb.Table(sport_name).scan(ProjectionExpression="HomeTeamName, AwayTeamName")
# get rid of the duplicate names
teams = []
for team in teams_available["Items"]:
if team["HomeTeamName"] not in teams:
teams.append(team["HomeTeamName"])
elif team["AwayTeamName"] not in teams:
teams.append(team["AwayTeamName"])
# return a formatted string containing the names
return "{} not in {}. Try {}.".format(team_name, sport_name, ', '.join([str(team) for team in teams]))
def get_scores(sport_name, team_name):
"""
Searches DynamoDB for specific sports and teams and returns a formatted message
containing the last several scores if the team exists in the data set. The maximum
number of scores is controlled by the MAX_SCORES constant.
:param sport_name: str
:param team_name: str
:return: str
"""
MAX_SCORES = 5
dynamodb = boto3.resource('dynamodb')
# sport and team name must be present
if sport_name == '' or sport_name == None or team_name == '' or team_name == None:
return "You must enter a sport and team name!"
# check for sport
sports_available = table_exists(sport_name)
if sport_name not in sports_available:
return ("{} not found. Try searching for {}".format(sport_name, sports_available))
# get the scores - the scores may be a message
scores = find_team(sport_name, team_name)
formatted_scores = []
score_template = '{} {} {} {} to {} on {}'
# do the following if the scores are scores and not a message
if type(scores) is not str:
# sort the scores by GameDate - most recent on top
# https://stackoverflow.com/questions/72899/how-do-i-sort-a-list-of-dictionaries-by-a-value-of-the-dictionary#73050
sorted_scores = sorted(scores["Items"], key=itemgetter('GameDate'), reverse=True) # thank you S.O.
# get the latest n scores
sorted_scores = sorted_scores[:MAX_SCORES]
for item in sorted_scores:
# assign values to variables
home_team = item["HomeTeamName"]
away_team = item["AwayTeamName"]
home_score = item["HomeTeamScore"]
away_score = item["AwayTeamScore"]
game_date = item["GameDate"]
# win - lose - tie conditions accounting for whether the requested team
# is the home team or the away team - probably a cleaner way to handle this...
if team_name == home_team and home_score > away_score:
# requested team is home team - wins
formatted_scores.append(score_template.format(home_team, 'beat', away_team, home_score, away_score, game_date))
elif team_name == away_team and away_score > home_score:
# requested team is away team - wins
formatted_scores.append(score_template.format(away_team, 'beat', home_team, away_score, home_score, game_date))
elif team_name == home_team and home_score < away_score:
# requested team is home team - loses
formatted_scores.append(score_template.format(home_team, 'lost to', away_team, home_score, away_score, game_date))
elif team_name == away_team and away_score < home_score:
# requested team is away team - loses
formatted_scores.append(score_template.format(away_team, 'lost to', home_team, away_score, home_score, game_date))
elif team_name == home_team and home_score == away_score:
# requested team is home team - ties
formatted_scores.append(score_template.format(home_team, 'tied', away_team, home_score, away_score, game_date))
elif team_name == away_team and home_score == away_score:
# requested team is away team - ties
formatted_scores.append(score_template.format(away_team, 'tied', home_team, away_score, home_score, game_date))
# create message
# this will make a string with newline separators which don't render - looks sloppy
# tried using <br> but that didn't work either - looks as sloppy as \n
# message = "The latest game results for {} {} are: {}".format(team_name, sport_name, '\n'.join([str(score) for score in formatted_scores]))
# this will keep the list intact - looks better for this assignment
message = "The latest game results for {} {} are: {}".format(team_name, sport_name, formatted_scores)
# this returns the formatted scores message
return message
# otherwise return the message returned find_team()
return scores
``` |
{
"source": "jonmarkprice/permutations",
"score": 3
} |
#### File: permutations/src/test.py
```python
import unittest
from unittest.case import skip
from main import compose, make_map, Permutation, identity, predecessor, successor
class TestCase(unittest.TestCase):
def test_str(self):
p1 = make_map([[1], [2], [3]])
self.assertEqual(str(p1), 'id')
def test_make_map(self):
p1 = make_map([[1, 2], [3]])
self.assertEqual(p1.cardinality, 3)
self.assertDictEqual(p1.mapping, {1: 2, 2: 1, 3: 3})
p1_abbrev = make_map([[1, 2]], n=3)
self.assertEqual(p1, p1_abbrev)
p2 = make_map([[1, 3], [2], [4]])
p2_abbrev = make_map([[1, 3]], n=4)
self.assertEqual(p2, p2_abbrev)
def test_str_make_map(self):
p1 = make_map([[1, 2], [3]])
self.assertEqual(str(p1), '(1 2)')
# TODO: More tests
# @unittest.skip('')
def test_compose(self):
s = Permutation(3, {1: 2, 2: 3, 3: 1}, name='s')
t = Permutation(3, {1: 2, 2: 1, 3: 3}, name='t')
self.assertEqual(str(s), '(1 2 3)')
self.assertEqual(str(t), '(1 2)')
st = Permutation(3, {1: 3, 2: 2, 3: 1})
self.assertDictEqual(st.mapping, compose(s, t).mapping)
self.assertEqual(str(compose(s, t)), '(1 3)')
self.assertEqual(str(compose(t, s)), '(2 3)')
def test_compose_different_cardinalities(self):
t = make_map([[1, 2]])
s = successor(3)
self.assertEqual(str(compose(s, t)), '(1 3)')
self.assertEqual(str(compose(t, s)), '(2 3)')
def test_power(self):
tr = make_map([[1, 2]])
self.assertEqual(tr.pow(1), tr)
self.assertEqual(tr.pow(2), identity(2))
succ = make_map([[1, 2, 3]])
pred = make_map([[1, 3, 2]])
self.assertEqual(succ.pow(1), succ)
self.assertEqual(succ.pow(2), pred)
self.assertEqual(succ.pow(3), identity(3))
self.assertEqual(pred.pow(2), succ)
def test_negative_power(self):
p1 = make_map([[1, 3], [2, 4, 5]])
self.assertEqual(compose(p1.pow(-3), p1.pow(3)), identity(5))
# @unittest.skip('')
def test_inverse(self):
p1 = make_map([[1, 3, 2]])
self.assertEqual(p1.inverse(), make_map([[1, 2, 3]]))
self.assertEqual(compose(p1, p1.inverse()), identity(3))
self.assertEqual(compose(p1.inverse(), p1), identity(3))
self.assertEqual(identity(4), identity(4).inverse())
p2 = make_map([[1, 2]])
self.assertEqual(p2, p2.inverse())
def test_ext(self):
p1 = make_map([[1, 2]])
# p1_ext = p1.ext(4)make_map([[1, 2], [3], [4]])
self.assertEqual(p1.ext(4), make_map([[1, 2], [3], [4]]))
# self.assertDictEqual(p1.ext(4).mapping, make_map([[1, 2], [3], [4]]).mapping)
def test_composition(self):
# TODO: can we define compose as * as an operator
# what about inverse?
# TODO: write test to show that extending a pair
# compose(p, tr(i, j).ext(len(p))) = compose(p, tr(i, j))
# for any permutation p, and integer i, j <= n
# In other words, extending is unnecessary when composing.
# TODO: test [1, 1] = id
# Do similar thing with .ext and id
t = make_map([[1, 2]]) # TODO:
tr = lambda i, j: make_map([[i, j]])
sigma = lambda n: compose(t, successor(n))
for n in range(1, 5):
self.assertEqual(sigma(n).inverse(), compose(predecessor(n), t))
for k in range(2, n):
self.assertEqual(sigma(n).pow(-k), sigma(n).pow(k).inverse())
self.assertEqual(sigma(n).pow(-k), sigma(n).inverse().pow(k))
self.assertEqual(sigma(n).pow(-k) * sigma(n).pow(k), identity(n))
# This works!
# but it's the wrong equation
self.assertEqual(compose(sigma(n).pow(-k), t, sigma(n).pow(k)), tr(1, n - k + 1).ext(n))
# TODO: try this one
# self.assertEqual(compose(sigma(n).pow(k), t, sigma(n).pow(-k)), tr(1, k + 1).ext(n))
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "jonmartinstorm/encryption-demo",
"score": 3
} |
#### File: encryption-demo/message encryption/server_asym.py
```python
import socket
import Crypto
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_OAEP
from Crypto.Signature import pkcs1_15
from Crypto.Hash import MD5
from Crypto import Random
import base64
HOST = '127.0.0.1'
PORT = 6543
def krypter_asym(rsa_offentlignøkkel, klartekst):
"""
Denne funksjonen generer ciphertekst (kryptert tekst) fra klartekst
(ukryptert tekst) ved hjelp av en offentlig nøkkel og returnerer en
base64 enkoded streng.
"""
ciphertekst=PKCS1_OAEP.new(rsa_offentlignøkkel).encrypt(klartekst)
b64cipher=base64.b64encode(ciphertekst)
return b64cipher
def dekrypter_asym(rsa_privatnøkkel, b64cipher):
"""
Denne funksjonen dekrypterer ciphertekst (kryptert tekst) til klartekst
(ukryptert tekst) ved hjelp av en privat nøkkel og returnerer klarteksten.
"""
dekodet_ciphertekst = base64.b64decode(b64cipher)
klartekst = PKCS1_OAEP.new(rsa_privatnøkkel).decrypt(dekodet_ciphertekst)
return klartekst
privat_nøkkel_server = b'-----<KEY>'
privat_nøkkel_server = RSA.import_key(privat_nøkkel_server)
offentlig_nøkkel_server = b'-----<KEY>'
offentlig_nøkkel_server = RSA.import_key(offentlig_nøkkel_server)
offentlig_nøkkel_client = b'-----<KEY>'
offentlig_nøkkel_client = RSA.import_key(offentlig_nøkkel_client)
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
s.bind((HOST, PORT))
s.listen()
conn, addr = s.accept()
with conn:
print('\nConnected by', addr, '\n')
while True:
data = conn.recv(1024)
if not data:
break
print("Fikk: ", dekrypter_asym(privat_nøkkel_server, data))
data = krypter_asym(offentlig_nøkkel_client, dekrypter_asym(privat_nøkkel_server, data))
conn.sendall(data)
```
#### File: encryption-demo/signing/signing.py
```python
import Crypto
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_OAEP
from Crypto.Signature import pkcs1_15
from Crypto.Hash import MD5
from Crypto import Random
import base64
def rsanøkler():
"""
Denne funksjonen lager en privat nøkkel og en offentlig nøkkel
"""
lengde=1024
privatnøkkel = RSA.generate(lengde, Random.new().read)
offentlignøkkel = privatnøkkel.publickey()
return privatnøkkel, offentlignøkkel
def krypter(rsa_offentlignøkkel, klartekst):
"""
Denne funksjonen generer ciphertekst (kryptert tekst) fra klartekst
(ukryptert tekst) ved hjelp av en offentlig nøkkel og returnerer en
base64 enkoded streng.
"""
ciphertekst=PKCS1_OAEP.new(rsa_offentlignøkkel).encrypt(klartekst)
b64cipher=base64.b64encode(ciphertekst)
return b64cipher
def dekrypter(rsa_privatnøkkel, b64cipher):
"""
Denne funksjonen dekrypterer ciphertekst (kryptert tekst) til klartekst
(ukryptert tekst) ved hjelp av en privat nøkkel og returnerer klarteksten.
"""
dekodet_ciphertekst = base64.b64decode(b64cipher)
klartekst = PKCS1_OAEP.new(rsa_privatnøkkel).decrypt(dekodet_ciphertekst)
return klartekst
def signer(rsa_privatnøkkel, data):
"""
Denne funksjonen signerer data med en privat nøkkel
"""
hashet_data = MD5.new(data)
signert_data = pkcs1_15.new(rsa_privatnøkkel).sign(hashet_data)
b64_data = base64.b64encode(signert_data)
return b64_data
def verifiser(rsa_offentlignøkkel,data,signert_data):
"""
Denne funksjonen verifiserer at data er signert med en privat nøkkel
"""
hashet_data = MD5.new(data)
signert_data = base64.b64decode(signert_data)
try:
pkcs1_15.new(rsa_offentlignøkkel).verify(hashet_data, signert_data)
return True
except ValueError:
return False
def hovedfunksjon():
pn, on = rsanøkler()
tekst = b"Hei hele TBB!"
print("\nTekst: " + tekst.decode('utf-8'))
print("\nKrypterer")
ct = krypter(on, tekst)
print("Ciphertekst: " + ct.decode('utf-8'))
print("\nDekrypterer")
dt = dekrypter(pn, ct)
print("Dekryptert: " + dt.decode('utf-8'))
print()
print("\nSignerer")
signert = signer(pn, tekst)
print("Signert tekst: " + signert.decode())
print("\nVerifiserer")
verifisert = verifiser(on, tekst, signert)
print(f"Verifisert?: {verifisert}")
# private keys for the public ones in sertifikat.md
pkeyserv = b'-----<KEY> PRIVATE KEY-----'
pkeyserv = RSA.import_key(pkeyserv)
hash = b'bf35f93df686f28ea3aa7cbf4f7f4a5c'
signert_hash = signer(pkeyserv, hash)
print(f"Signert tekst: {signert_hash}")
pkeyca = b'-----<KEY>'
pkeyca = RSA.import_key(pkeyca)
offkeyserv = b'-----<KEY>'
signert_hash = signer(pkeyca, offkeyserv)
print(f"Signert nøkkel: {signert_hash}")
if __name__ == '__main__':
hovedfunksjon()
``` |
{
"source": "jonmarty/ffbo.neuroarch_component",
"score": 2
} |
#### File: ffbo.neuroarch_component/neuroarch_component/neuroarch_component.py
```python
import sys
import re
from math import isnan
from twisted.internet.defer import inlineCallbacks, returnValue
from twisted.logger import Logger
from autobahn.twisted.util import sleep
from autobahn.twisted.wamp import ApplicationSession, ApplicationRunner
from autobahn.wamp.exception import ApplicationError
from autobahn.wamp.types import RegisterOptions
from operator import itemgetter
import os
import argparse
import six
import txaio
import time
# Neuroarch Imports
import argparse
import numpy as np
import simplejson as json
from pyorient.ogm import Graph, Config
import pyorient.ogm.graph
import numpy as np
import time
from collections import Counter
from configparser import ConfigParser
# Grab configuration from file
root = os.path.expanduser("/")
home = os.path.expanduser("~")
filepath = os.path.dirname(os.path.abspath(__file__))
config_files = []
config_files.append(os.path.join(home, "config", "ffbo.neuroarch_component.ini"))
config_files.append(os.path.join(root, "config", "ffbo.neuroarch_component.ini"))
config_files.append(os.path.join(home, "config", "config.ini"))
config_files.append(os.path.join(root, "config", "config.ini"))
config_files.append(os.path.join(filepath, "..", "config.ini"))
config = ConfigParser()
configured = False
file_type = 0
for config_file in config_files:
if os.path.exists(config_file):
config.read(config_file)
configured = True
break
file_type += 1
if not configured:
raise Exception("No config file exists for this component")
user = config["USER"]["user"]
secret = config["USER"]["secret"]
ssl = eval(config["AUTH"]["ssl"])
websockets = "wss" if ssl else "ws"
if "ip" in config["SERVER"]:
ip = config["SERVER"]["ip"]
else:
ip = "ffbo.processor"
port = config["NLP"]["expose-port"]
url = "%(ws)s://%(ip)s:%(port)s/ws" % {"ws":websockets, "ip":ip, "port":port}
realm = config["SERVER"]["realm"]
authentication = eval(config["AUTH"]["authentication"])
debug = eval(config["DEBUG"]["debug"])
ca_cert_file = config["AUTH"]["ca_cert_file"]
intermediate_cert_file = config["AUTH"]["intermediate_cert_file"]
# Required to handle dill's inability to serialize namedtuple class generator:
setattr(pyorient.ogm.graph, 'orientdb_version',
pyorient.ogm.graph.ServerVersion)
from neuroarch.models import *
from neuroarch.query import QueryWrapper, QueryString, _list_repr
from autobahn.wamp import auth
# User access
import state
from pyorient.serializations import OrientSerialization
import uuid
from twisted.internet import reactor, threads
from itertools import islice
def byteify(input):
if isinstance(input, dict):
return {byteify(key): byteify(value)
for key, value in input.iteritems()}
elif isinstance(input, list):
return [byteify(element) for element in input]
elif isinstance(input, unicode):
return input.encode('utf-8')
else:
return input
def chunks(data, SIZE=1000):
it = iter(data)
for i in xrange(0, len(data), SIZE):
yield {k:data[k] for k in islice(it, SIZE)}
class neuroarch_server(object):
""" Methods to process neuroarch json tasks """
def __init__(self,database='/na_server',username='root',password='<PASSWORD>', user=None):
try:
self.graph = Graph(Config.from_url(database, username, password, initial_drop=False,serialization_type=OrientSerialization.Binary))
except:
#print "WARNING: Serialisation flag ignored"
self.graph = Graph(Config.from_url(database, username, password, initial_drop=False))
self.graph.include(Node.registry)
self.graph.include(Relationship.registry)
self.user = user
self.query_processor = query_processor(self.graph)
self._busy = False
def retrieve_neuron(self,nid):
# WIP: Currently retrieves all information for the get_as method, this will be refined when we know what data we want to store and pull out here
try:
n = self.graph.get_element(nid)
if n == None:
return {}
else:
output = QueryWrapper.from_objs(self.graph,[n])
return output.get_as()[0].to_json()
except Exception as e:
raise e
def process_query(self,task):
""" configure a task processing, and format the results as desired """
# WIP: Expand type of information that can be retrieved
assert 'query'in task.keys()
try:
self.query_processor.process(task['query'],self.user)
return True
except Exception as e:
print e
return False
@staticmethod
def process_verb(output, user, verb):
if verb == 'add':
assert(len(user.state)>=2)
user.state[-1] = output+user.state[-2]
elif verb == 'keep':
assert(len(user.state)>=2)
user.state[-1] = output & user.state[-2]
output = user.state[-2] - user.state[-1]
elif verb == 'remove':
assert(len(user.state)>=2)
user.state[-1] = user.state[-2] - output
else:
assert(len(user.state)>=2)
cmd = {'undo':{'states':1}}
output = output & user.state[-2]
user.process_command(cmd)
return output
def receive_task(self,task, threshold=None, query_results=True):
""" process a task of form
{'query':...} or {'command': ...}
update the user states, and query neuroarch
This is the default access route
"""
while(self._busy):
time.sleep(1)
try:
self._busy = True
if not type(task) == dict:
task = json.loads(task)
task = byteify(task)
if 'format' not in task:
task['format'] = 'morphology'
assert 'query' in task or 'command' in task
user = self.user
if 'command' in task:
output = user.process_command(task['command'])
if 'verb' in task and not task['verb'] == 'show':
try:
output = self.process_verb(output, user, task['verb'])
except Exception as e:
print e
if not task['verb'] == 'add':
if task['format'] == 'morphology':
output = output.get_data_rids(cls='MorphologyData')
else:
output = output._records_to_list(output.nodes)
self._busy = False
return (output, True)
if isinstance(output, QueryWrapper):
#print output._records_to_list(output.nodes)
if task['format'] == 'morphology':
#df = output.get_data(cls='MorphologyData')[0]
try:
#output= df[['sample','identifier','x','y','z','r','parent','name']].to_dict(orient='index')
#output= df.to_dict(orient='index')
output = output.get_data(cls='MorphologyData', as_type='nx').node
except KeyError:
output = {}
elif task['format'] == 'no_result':
output = {}
elif task['format'] == 'get_data':
if 'cls' in task:
output = output.get_data(cls=task['cls'])[0].to_dict(orient='index')
else:
output = output.get_data()[0].to_dict(orient='index')
elif task['format'] == 'nx':
nx_graph = output.get_as('nx')
output = {'nodes': nx_graph.node, 'edges': nx_graph.edge}
elif task['format'] == 'nk':
output = output.traverse_owned_by_get_toplevel()
for x in output['LPU']:
g = output['LPU'][x].get_as('nx')
output['LPU'][x] = {'nodes': g.node, 'edges': g.edge}
for x in output['Pattern']:
g = output['Pattern'][x].get_as('nx')
output['Pattern'][x] = {'nodes': g.node, 'edges': g.edge}
elif task['format'] == 'df':
dfs = output.get_as()
output = {}
if 'node_cols' in task:
output['nodes'] = dfs[0][task['node_cols']].to_dict(orient='index')
else:
output['nodes'] = dfs[0].to_dict(orient='index')
if 'edge_cols' in task:
output['edges'] = dfs[1][task['edge_cols']].to_dict(orient='index')
else:
output['edges'] = dfs[1].to_dict(orient='index')
elif task['format'] == 'qw':
pass
# Default to nodes and edges df
else:
dfs = output.get_as()
output = {'nodes':dfs[0].to_dict(orient='index'),
'edges': dfs[1].to_dict(orient='index')}
else:
output = str(output)
if threshold and isinstance(output, dict):
chunked_output = []
for c in chunks(output, threshold):
chunked_output.append(c)
output = chunked_output
self._busy = False
return (output, True)
elif 'query' in task:
succ = self.process_query(task)
if query_results:
task['command'] = {"retrieve":{"state":0}}
output = (None,)
try:
self._busy = False
output = self.receive_task(task, threshold)
if output[0]==None:
succ=False
except Exception as e:
print e
succ = False
self._busy = False
if 'temp' in task and task['temp'] and len(user.state)>=2:
user.process_command({'undo':{'states':1}})
return (output[0], succ)
self._busy = False
return succ
except Exception as e:
print e
self._busy = False
class query_processor():
def __init__(self, graph):
self.class_list = {}
self.graph = graph
self.load_class_list()
def load_class_list(self):
# Dynamically build acceptable methods from the registry
# This could potentially be made stricter with a custom hardcoded subset
for k in Node.registry:
try:
plural = eval(k + ".registry_plural")
self.class_list[k]=eval("self.graph." + plural)
except:
print "Warning:Class %s left out of class list" % k
e = sys.exc_info()[0]
print e
#print self.class_list
def process(self,query_list,user):
""" take a query of the form
[{'object':...:,'action...'}]
"""
assert type(query_list) is list
task_memory = []
for q in query_list:
# Assume each query must act on the previous result, is this valid?
task_memory.append(self.process_single(q,user,task_memory))
if 'temp' in query_list[-1] and query_list[-1]['temp']:
return task_memory[-1]
output = task_memory[-1]
user.append(output)
return output
def process_single(self,query,user,task_memory):
""" accetpt a single query object or form
[{'object':...:,'action...'}]
"""
assert 'object' in query and 'action' in query
assert 'class' in query['object'] or 'state' in query['object'] or 'memory' in query['object']
'''
if 'class' in query['object']:
# Retrieve Class
class_name = query['object']['class']
na_object = self.class_list[class_name]
# convert result to a query wrapper to save
'''
if 'state' in query['object']:
state_num = query['object']['state']
if type(state_num) is long:
state_num = int(state_num)
assert type(state_num) in [int,long]
na_object = user.retrieve(index = state_num)
elif 'memory' in query['object']:
assert task_memory is not []
memory_index = query['object']['memory']
if type(memory_index) is long:
memory_index = int(memory_index)
assert type(memory_index) is int
assert len(task_memory) > memory_index
na_object = task_memory[-1-memory_index]
# Retrieve method
if 'method' in query['action']: # A class query can only take a method.
if 'class' in query['object']:
method_call = query['action']['method']
assert len(method_call.keys()) == 1
method_name = method_call.keys()[0]
method_args = method_call[method_name]
columns = ""
attrs = []
for k, v in method_args.iteritems():
if not(isinstance(v, list)):
if isinstance(v, (basestring, numbers.Number)):
v = [str(v)]
else:
# To prevent issues with unicode objects
if v and isinstance(v[0],basestring): v = [str(val) for val in v]
if len(v) == 1 and isinstance(v[0],(unicode,str)) and len(v[0])>=2 and str(v[0][:2]) == '/r':
attrs.append("%s matches '%s'" % (str(k), str(v[0][2:])))
else:
attrs.append("%s in %s" % (str(k), str(v)))
attrs = " and ".join(attrs)
if attrs: attrs = "where " + attrs
query['object']['class'] = _list_repr(query['object']['class'])
q = {}
for i, a in enumerate(query['object']['class']):
var = '$q'+str(i)
q[var] = "{var} = (select from {cls} {attrs})".format(var=var,
cls=str(a),
attrs=str(attrs))
query_str = "select from (select expand($a) let %s, $a = unionall(%s))" % \
(", ".join(q.values()), ", ".join(q.keys()) )
query_str = QueryString(query_str,'sql')
query_result = QueryWrapper(self.graph, query_str)
else:
method_call = query['action']['method']
assert len(method_call.keys()) == 1
method_name = method_call.keys()[0]
# check method is valid
assert method_name in dir(type(na_object))
# Retrieve arguments
method_args = byteify(method_call[method_name])
if 'pass_through' in method_args:
pass_through = method_args.pop('pass_through')
if isinstance(pass_through,list) and pass_through and isinstance(pass_through[0],list):
query_result = getattr(na_object, method_name)(*pass_through,**method_args)
else:
query_result = getattr(na_object, method_name)(pass_through,**method_args)
else:
query_result = getattr(na_object, method_name)(**method_args)
elif 'op' in query['action']:
method_call = query['action']['op']
assert len(method_call.keys()) == 1
method_name = method_call.keys()[0]
# WIP: Check which operators are supported
# What if we want to be a op between two past states!
# retieve past external state or internal memory state
if 'state' in method_call[method_name]:
state_num = method_call[method_name]['state']
assert type(state_num) in [int,long]
past_object = user.retrieve(index = state_num)
elif 'memory' in method_call[method_name]:
assert task_memory is not []
memory_index = method_call[method_name]['memory']
if type(memory_index) is long:
memory_index = int(memory_index)
assert type(memory_index) is int
assert len(task_memory) > memory_index
past_object = task_memory[-1-memory_index]
#query_result = getattr(na_object, method_name)(method_args)
## INVERSE THIS na and method argis (WHY?)
query_result = getattr(na_object, method_name)(past_object)
# convert result to a query wrapper to save
if type(query_result) is not QueryWrapper:
output = QueryWrapper.from_objs(self.graph,query_result.all())
else:
output = query_result
return output
class user_list():
def __init__(self,state_limit=10):
self.list = {}
self.state_limit = state_limit
pass
def user(self,user_id, database='/na_server',username='root',password='<PASSWORD>'):
if user_id not in self.list:
st = state.State(user_id)
self.list[user_id] = {'state': st,
'server': neuroarch_server(user=st)}
return self.list[user_id]
def cleanup(self):
cleansed = []
for user in self.list:
x = self.list[user]['state'].memory_management()
if x:
cleansed.append(user)
for user in cleansed:
del self.list[user]
return cleansed
class AppSession(ApplicationSession):
log = Logger()
def onConnect(self):
if self.config.extra['auth']:
self.join(self.config.realm, [u"wampcra"], user)
else:
self.join(self.config.realm, [], user)
def onChallenge(self, challenge):
if challenge.method == u"wampcra":
#print("WAMP-CRA challenge received: {}".format(challenge))
if u'salt' in challenge.extra:
# salted secret
key = auth.derive_key(secret,
challenge.extra['salt'],
challenge.extra['iterations'],
challenge.extra['keylen'])
else:
# plain, unsalted secret
key = secret
# compute signature for challenge, using the key
signature = auth.compute_wcs(key, challenge.extra['challenge'])
# return the signature to the router for verification
return signature
else:
raise Exception("Invalid authmethod {}".format(challenge.method))
def na_query_on_end(self):
self._current_concurrency -= 1
self.log.info('na_query() ended ({invocations} invocations, current concurrency {current_concurrency} of max {max_concurrency})', invocations=self._invocations_served, current_concurrency=self._current_concurrency, max_concurrency=self._max_concurrency)
@inlineCallbacks
def onJoin(self, details):
self._max_concurrency = 10
self._current_concurrency = 0
self._invocations_served = 0
self.user_list = user_list()
arg_kws = ['color']
reactor.suggestThreadPoolSize(self._max_concurrency*2)
verb_translations = {'unhide': 'show',
'color': 'setcolor',
'keep' : 'remove',
'blink' : 'animate',
'unblink' : 'unanimate'}
@inlineCallbacks
def na_query(task,details=None):
self._invocations_served += 1
self._current_concurrency += 1
if not isinstance(task, dict):
task = json.loads(task)
task = byteify(task)
user_id = task['user'] if (details.caller_authrole == 'processor' and 'user' in task) \
else details.caller
if not 'format' in task: task['format'] = 'morphology'
threshold = None
if details.progress:
threshold = task['threshold'] if 'threshold' in task else 20
if 'verb' in task and task['verb'] not in ['add','show']: threshold=None
if task['format'] != 'morphology': threshold=None
self.log.info("na_query() called with task: {task} ,(current concurrency {current_concurrency} of max {max_concurrency})", current_concurrency=self._current_concurrency, max_concurrency=self._max_concurrency, task=task)
server = self.user_list.user(user_id)['server']
(res, succ) = yield threads.deferToThread(server.receive_task, task, threshold)
uri = 'ffbo.ui.receive_msg.%s' % user_id
if not(type(uri)==six.text_type): uri = six.u(uri)
cmd_uri = 'ffbo.ui.receive_cmd.%s' % user_id
if not(type(cmd_uri)==six.text_type): cmd_uri = six.u(cmd_uri)
try:
if succ:
yield self.call(uri, {'info':{'success':
'Fetching results from NeuroArch'}})
else:
yield self.call(uri, {'info':{'error':
'Error executing query on NeuroArch'}})
except Exception as e:
print e
try:
if(task['format'] == 'morphology' and (not 'verb' in task or task['verb'] == 'show')):
yield self.call(cmd_uri,
{'commands': {'reset':''}})
except Exception as e:
print e
if('verb' in task and task['verb'] not in ['add','show']):
try:
task['verb'] = verb_translations[task['verb']]
except Exception as e:
pass
args = []
if 'color' in task: task['color'] = '#' + task['color']
for kw in arg_kws:
if kw in task: args.append(task[kw])
if len(args)==1: args=args[0]
yield self.call(cmd_uri, {'commands': {task['verb']: [res, args]}})
returnValue({'info':{'success':'Finished processing command'}})
else:
if ('data_callback_uri' in task and 'queryID' in task):
if threshold:
for c in res:
yield self.call(six.u(task['data_callback_uri'] + '.%s' % details.caller),
{'data': c, 'queryID': task['queryID']})
else:
yield self.call(six.u(task['data_callback_uri'] + '.%s' % details.caller),
{'data': res, 'queryID': task['queryID']})
self.na_query_on_end()
returnValue({'info': {'success':'Finished fetching all results from database'}})
else:
if details.progress and threshold:
for c in res:
details.progress(c)
self.na_query_on_end()
returnValue({'info': {'success':'Finished fetching all results from database'}})
else:
self.na_query_on_end()
returnValue({'info': {'success':'Finished fetching all results from database'},
'data': res})
uri = six.u( 'ffbo.na.query.%s' % str(details.session) )
yield self.register(na_query, uri, RegisterOptions(details_arg='details',concurrency=self._max_concurrency/2))
@inlineCallbacks
def get_data_sub(q):
res = q.get_as('nx').node.values()[0]
ds = q.owned_by(cls='DataSource')
if ds.nodes:
res['data_source'] = [x.name for x in ds.nodes]
else:
ds = q.get_data_qw().owned_by(cls='DataSource')
if ds.nodes:
res['data_source'] = [x.name for x in ds.nodes]
else:
res['data_source'] = ['Unknown']
subdata = q.get_data(cls=['NeurotransmitterData', 'GeneticData'],as_type='nx').node
ignore = ['name','uname','label','class']
key_map = {'Transmitters': 'transmitters'}#'transgenic_lines': 'Transgenic Lines'}
for x in subdata.values():
up_data = {(key_map[k] if k in key_map else k ):x[k] for k in x if k not in ignore}
res.update(up_data)
res = {'summary': res}
if 'FlyCircuit' in res['summary']['data_source']:
try:
flycircuit_data = yield self.call(six.u( 'ffbo.processor.fetch_flycircuit' ), res['summary']['name'])
res['summary']['flycircuit_data'] = flycircuit_data
except:
pass
arborization_data = q.get_data(cls='ArborizationData', as_type='nx').node
ignore = ['name','uname','label','class']
up_data = {}
for x in arborization_data.values():
key_map = {k:k for k in x}
if 'FlyCircuit' in res['summary']['data_source']:
key_map['dendrites'] = 'inferred_dendritic_segments'
key_map['axons'] = 'inferred_axonal_segments'
else:
key_map['dendrites'] = 'input_synapses'
key_map['axons'] = 'output_synapses'
up_data.update({key_map[k]:x[k] for k in x if k not in ignore})
if up_data: res['summary']['arborization_data'] = up_data
post_syn_q = q.gen_traversal_out(['SendsTo',['InferredSynapse', 'Synapse']],['SendsTo','Neuron'],min_depth=1)
pre_syn_q = q.gen_traversal_in(['SendsTo',['InferredSynapse', 'Synapse']],['SendsTo','Neuron'],min_depth=1)
post_syn = post_syn_q.get_as('nx')
pre_syn = pre_syn_q.get_as('nx')
if post_syn.nodes() or pre_syn.nodes():
post_rids = str(post_syn.nodes()).replace("'","")
pre_rids = str(pre_syn.nodes()).replace("'","")
post_map_command = "select $path from (traverse out('HasData') from %s while $depth<=1) where @class='MorphologyData'" % post_rids
pre_map_command = "select $path from (traverse out('HasData') from %s while $depth<=1) where @class='MorphologyData'" % pre_rids
post_map_l = [x.oRecordData['$path'] for x in q._graph.client.command(post_map_command)]
pre_map_l = [x.oRecordData['$path'] for x in q._graph.client.command(pre_map_command)]
post_map = {}
pre_map = {}
for p in post_map_l:
m = re.findall('\#\d+\:\d+', p)
if len(m)==2:
post_map[m[0]] = m[1]
for p in pre_map_l:
m = re.findall('\#\d+\:\d+', p)
if len(m)==2:
pre_map[m[0]] = m[1]
post_data = []
for (syn, neu) in post_syn.edges():
if not (post_syn.node[syn]['class'] == 'InferredSynapse' or
post_syn.node[syn]['class'] == 'Synapse'):
continue
info = {'has_morph': 0, 'has_syn_morph': 0}
if 'N' not in post_syn.node[syn]:
print post_syn.node[syn]
info['number'] = 1
else:
info['number'] = post_syn.node[syn]['N']
if neu in post_map:
info['has_morph'] = 1
info['rid'] = post_map[neu]
if syn in post_map:
info['has_syn_morph'] = 1
info['syn_rid'] = post_map[syn]
if 'uname' in post_syn.node[syn]:
info['syn_uname'] = post_syn.node[syn]['uname']
info['inferred'] = (post_syn.node[syn]['class'] == 'InferredSynapse')
info.update(post_syn.node[neu])
post_data.append(info)
post_data = sorted(post_data, key=lambda x: x['number'])
pre_data = []
for (neu, syn) in pre_syn.edges():
if not (pre_syn.node[syn]['class'] == 'InferredSynapse' or
pre_syn.node[syn]['class'] == 'Synapse'):
continue
info = {'has_morph': 0, 'has_syn_morph': 0}
if 'N' not in pre_syn.node[syn]:
print pre_syn.node[syn]
info['number'] = 1
else:
info['number'] = pre_syn.node[syn]['N']
if neu in pre_map:
info['has_morph'] = 1
info['rid'] = pre_map[neu]
if syn in pre_map:
info['has_syn_morph'] = 1
info['syn_rid'] = pre_map[syn]
if 'uname' in pre_syn.node[syn]:
info['syn_uname'] = pre_syn.node[syn]['uname']
info['inferred'] = (pre_syn.node[syn]['class'] == 'InferredSynapse')
info.update(pre_syn.node[neu])
pre_data.append(info)
pre_data = sorted(pre_data, key=lambda x: x['number'])
# Summary PreSyn Information
pre_sum = {}
for x in pre_data:
cls = x['name'].split('-')[0]
try:
if cls=='5': cls = x['name'].split('-')[:2].join('-')
except Exception as e:
pass
if cls in pre_sum: pre_sum[cls] += x['number']
else: pre_sum[cls] = x['number']
pre_N = np.sum(pre_sum.values())
pre_sum = {k: 100*float(v)/pre_N for (k,v) in pre_sum.items()}
# Summary PostSyn Information
post_sum = {}
for x in post_data:
cls = x['name'].split('-')[0]
if cls in post_sum: post_sum[cls] += x['number']
else: post_sum[cls] = x['number']
post_N = np.sum(post_sum.values())
post_sum = {k: 100*float(v)/post_N for (k,v) in post_sum.items()}
res.update({
'connectivity':{
'post': {
'details': post_data,
'summary': {
'number': post_N,
'profile': post_sum
}
}, 'pre': {
'details': pre_data,
'summary': {
'number': pre_N,
'profile': pre_sum
}
}
}
})
returnValue({'data':res})
def is_rid(rid):
if isinstance(rid, basestring) and re.search('^\#\d+\:\d+$', rid):
return True
else:
return False
def get_syn_data_sub(q):
res = q.get_as('nx').node.values()[0]
ds = q.owned_by(cls='DataSource')
if ds.nodes:
res['data_source'] = [x.name for x in ds.nodes]
else:
ds = q.get_data_qw().owned_by(cls='DataSource')
res['data_source'] = [x.name for x in ds.nodes]
subdata = q.get_data(cls=['NeurotransmitterData', 'GeneticData', 'MorphologyData'],as_type='nx').node
ignore = ['name','uname','label','class', 'x', 'y', 'z', 'r', 'parent', 'identifier', 'sample', 'morph_type']
key_map = {'Transmitters': 'transmitters', 'N': 'number'}#'transgenic_lines': 'Transgenic Lines'}
for x in subdata.values():
up_data = {(key_map[k] if k in key_map else k ):x[k] for k in x if k not in ignore}
res.update(up_data)
for x in res:
if x in key_map:
res[key_map[x]] = res[x]
del res[x]
if 'region' in res:
res['synapse_locations'] = Counter(res['region'])
del res['region']
res = {'data':{'summary': res}}
return res
@inlineCallbacks
def na_get_data(task,details=None):
if not isinstance(task, dict):
task = json.loads(task)
task = byteify(task)
user_id = task['user'] if (details.caller_authrole == 'processor' and 'user' in task) \
else details.caller
threshold = None
self.log.info("na_get_data() called with task: {task}",task=task)
server = self.user_list.user(user_id)['server']
try:
if not is_rid(task['id']):
returnValue({})
elem = server.graph.get_element(task['id'])
q = QueryWrapper.from_objs(server.graph,[elem])
callback = get_data_sub if elem.element_type == 'Neuron' else get_syn_data_sub
if not (elem.element_type == 'Neuron' or elem.element_type == 'Synapse' or elem.element_type=='InferredSynapse'):
qn = q.gen_traversal_in(['HasData','Neuron'],min_depth=1)
if not qn:
q = q.gen_traversal_in(['HasData',['Synapse', 'InferredSynapse']],min_depth=1)
else:
q = qn
callback = get_data_sub
#res = yield threads.deferToThread(get_data_sub, q)
res = yield callback(q)
except Exception as e:
print e
self.log.failure("Error Retrieveing Data")
res = {}
returnValue(res)
uri = six.u( 'ffbo.na.get_data.%s' % str(details.session) )
yield self.register(na_get_data, uri, RegisterOptions(details_arg='details',concurrency=1))
# These users can mark a tag as feautured or assign a tag to a festured list
approved_featured_tag_creators = []
def create_tag(task, details=None):
if not isinstance(task, dict):
task = json.loads(task)
task = byteify(task)
if not "tag" in task:
if "name" in task:
task["tag"] = task["name"]
del task["name"]
else:
return {"info":{"error":
"tag/name field must be provided"}}
if ('FFBOdata' in task and
details.caller_authrole == 'user' and
details.caller_authid not in approved_featured_tag_creators):
del task['FFBOdata']
user_id = task['user'] if (details.caller_authrole == 'processor' and 'user' in task) \
else details.caller
self.log.info("create_tag() called with task: {task} ",task=task)
server = self.user_list.user(user_id)['server']
(output,succ) = server.receive_task({"command":{"retrieve":{"state":0}},"format":"qw"})
if not succ:
return {"info":{"error":
"There was an error creating the tag"}}
if isinstance(output, QueryWrapper):
if 'metadata' in task:
succ = output.tag_query_result_node(tag=task['tag'],
permanent_flag=True,
**task['metadata'])
else:
succ = output.tag_query_result_node(tag=task['tag'],
permanent_flag=True)
if succ==-1:
return {"info":{"error":"The tag already exists. Please choose a different one"}}
else:
return {"info":{"success":"tag created successfully"}}
else:
return {"info":{"error":
"No data found in current workspace to create tag"}}
uri = six.u( 'ffbo.na.create_tag.%s' % str(details.session) )
yield self.register(create_tag, uri, RegisterOptions(details_arg='details',concurrency=1))
def retrieve_tag(task,details=None):
if not "tag" in task:
return {"info":{"error":
"Tag must be provided"}}
if not isinstance(task, dict):
task = json.loads(task)
task = byteify(task)
user_id = task['user'] if (details.caller_authrole == 'processor' and 'user' in task) \
else details.caller
self.log.info("retrieve_tag() called with task: {task} ",task=task)
server = self.user_list.user(user_id)['server']
tagged_result = QueryWrapper.from_tag(graph=server.graph, tag=task['tag'])
if tagged_result and tagged_result['metadata'] and tagged_result['metadata']!='{}':
server.user.append(tagged_result['qw'])
return {'data':tagged_result['metadata'],
'info':{'success':'Server Retrieved Tag Succesfully'}}
else:
return {"info":{"error":
"No such tag exists in this database server"}}
uri = six.u( 'ffbo.na.retrieve_tag.%s' % str(details.session) )
yield self.register(retrieve_tag, uri, RegisterOptions(details_arg='details',concurrency=1))
# Register a function to retrieve a single neuron information
def retrieve_neuron(nid):
self.log.info("retrieve_neuron() called with neuron id: {nid} ", nid = nid)
res = server.retrieve_neuron(nid)
print "retrieve neuron result: " + str(res)
return res
uri = six.u( 'ffbo.na.retrieve_neuron.%s' % str(details.session) )
yield self.register(retrieve_neuron, uri,RegisterOptions(concurrency=self._max_concurrency))
print "registered %s" % uri
# Listen for ffbo.processor.connected
@inlineCallbacks
def register_component():
self.log.info( "Registering a component")
# CALL server registration
try:
# registered the procedure we would like to call
res = yield self.call(six.u( 'ffbo.server.register' ),details.session,'na','na_server_with_vfb_links')
self.log.info("register new server called with result: {result}",
result=res)
except ApplicationError as e:
if e.error != 'wamp.error.no_such_procedure':
raise e
yield self.subscribe(register_component, six.u( 'ffbo.processor.connected' ))
self.log.info("subscribed to topic 'ffbo.processor.connected'")
# Register for memory management pings
@inlineCallbacks
def memory_management():
clensed_users = yield self.user_list.cleanup()
self.log.info("Memory Manager removed users: {users}", users=clensed_users)
for user in clensed_users:
try:
yield self.publish(six.u( "ffbo.ui.update.%s" % user ), "Inactivity Detected, State Memory has been cleared")
except Exception as e:
self.log.warn("Failed to alert user {user} or State Memory removal, with error {e}",user=user,e=e)
yield self.subscribe(memory_management, six.u( 'ffbo.processor.memory_manager' ))
self.log.info("subscribed to topic 'ffbo.processor.memory_management'")
register_component()
if __name__ == '__main__':
from twisted.internet._sslverify import OpenSSLCertificateAuthorities
from twisted.internet.ssl import CertificateOptions
import OpenSSL.crypto
# parse command line parameters
parser = argparse.ArgumentParser()
parser.add_argument('-d', '--debug', action='store_true', help='Enable debug output.')
parser.add_argument('--url', dest='url', type=six.text_type, default=url,
help='The router URL (defaults to value from config.ini)')
parser.add_argument('--realm', dest='realm', type=six.text_type, default=realm,
help='The realm to join (defaults to value from config.ini).')
parser.add_argument('--ca_cert', dest='ca_cert_file', type=six.text_type,
default=ca_cert_file,
help='Root CA PEM certificate file (defaults to value from config.ini).')
parser.add_argument('--int_cert', dest='intermediate_cert_file', type=six.text_type,
default=intermediate_cert_file,
help='Intermediate PEM certificate file (defaults to value from config.ini).')
parser.add_argument('--no-ssl', dest='ssl', action='store_false')
parser.set_defaults(ssl=ssl)
parser.set_defaults(debug=debug)
args = parser.parse_args()
# start logging
if args.debug:
txaio.start_logging(level='debug')
else:
txaio.start_logging(level='info')
# any extra info we want to forward to our ClientSession (in self.config.extra)
extra = {'auth': True}
if args.ssl:
st_cert=open(args.ca_cert_file, 'rt').read()
c=OpenSSL.crypto
ca_cert=c.load_certificate(c.FILETYPE_PEM, st_cert)
st_cert=open(args.intermediate_cert_file, 'rt').read()
intermediate_cert=c.load_certificate(c.FILETYPE_PEM, st_cert)
certs = OpenSSLCertificateAuthorities([ca_cert, intermediate_cert])
ssl_con = CertificateOptions(trustRoot=certs)
# now actually run a WAMP client using our session class ClientSession
runner = ApplicationRunner(url=args.url, realm=args.realm, extra=extra, ssl=ssl_con)
else:
# now actually run a WAMP client using our session class ClientSession
runner = ApplicationRunner(url=args.url, realm=args.realm, extra=extra)
runner.run(AppSession, auto_reconnect=True)
``` |
{
"source": "jonmason/kas",
"score": 2
} |
#### File: kas/kas/context.py
```python
import os
import logging
try:
import distro
def get_distro_id_base():
"""
Returns a compatible distro id.
"""
return distro.like() or distro.id()
except ImportError:
import platform
def get_distro_id_base():
"""
Wrapper around platform.dist to simulate distro.id
platform.dist is deprecated and will be removed in python 3.7
Use the 'distro' package instead.
"""
return platform.dist()[0]
__context__ = None
def create_global_context(args):
"""
Creates global context as singleton.
"""
global __context__
__context__ = Context(args)
return __context__
def get_context():
"""
Returns singleton global context.
"""
return __context__
class Context:
"""
Implements the kas build context.
"""
def __init__(self, args):
self.__kas_work_dir = os.environ.get('KAS_WORK_DIR', os.getcwd())
self.__kas_build_dir = os.environ.get('KAS_BUILD_DIR',
os.path.join(self.__kas_work_dir,
'build'))
self.__kas_repo_ref_dir = os.environ.get('KAS_REPO_REF_DIR', None)
self.setup_initial_environ()
self.config = None
self.args = args
def setup_initial_environ(self):
"""
Sets the environment variables for processes that are
started by kas.
"""
self.environ = {}
distro_bases = get_distro_id_base().lower().split()
for distro_base in distro_bases:
if distro_base in ['fedora', 'suse', 'opensuse']:
self.environ = {'LC_ALL': 'en_US.utf8',
'LANG': 'en_US.utf8',
'LANGUAGE': 'en_US'}
break
elif distro_base in ['debian', 'ubuntu', 'gentoo']:
self.environ = {'LC_ALL': 'en_US.UTF-8',
'LANG': 'en_US.UTF-8',
'LANGUAGE': 'en_US:en'}
break
if self.environ == {}:
logging.warning('kas: No supported distros found in %s. '
'No default locales set.', distro_bases)
for key in ['http_proxy', 'https_proxy', 'ftp_proxy', 'no_proxy',
'SSH_AUTH_SOCK']:
val = os.environ.get(key, None)
if val:
self.environ[key] = val
@property
def build_dir(self):
"""
The path to the build directory
"""
return self.__kas_build_dir
@property
def kas_work_dir(self):
"""
The path to the kas work directory
"""
return self.__kas_work_dir
@property
def kas_repo_ref_dir(self):
"""
The reference directory for the repo
"""
return self.__kas_repo_ref_dir
@property
def force_checkout(self):
return getattr(self.args, 'force_checkout', None)
@property
def update(self):
return getattr(self.args, 'update', None)
``` |
{
"source": "jonmatthis/napari-DeepLabCut",
"score": 2
} |
#### File: napari-DeepLabCut/dlclabel/io.py
```python
import glob
import numpy as np
import os
import pandas as pd
import yaml
from dask_image.imread import imread
from dlclabel import misc
from itertools import groupby
from napari.layers import Shapes
from napari.plugins._builtins import napari_write_shapes
from napari.types import LayerData
from skimage.io import imsave
from skimage.util import img_as_ubyte
from typing import Any, Dict, List, Optional, Sequence, Union
SUPPORTED_IMAGES = "jpg", "jpeg", "png"
def handle_path(path: Union[str, Sequence[str]]) -> Union[str, Sequence[str]]:
"""Dispatch files in folder to the relevant plugin readers."""
paths = [path] if isinstance(path, str) else path
paths = [os.fspath(path) for path in paths]
if not isinstance(paths, (tuple, list)):
raise ValueError("'path' argument must be a string, list, or tuple")
# Test first whether a 'labeled-data' folder was passed in
if len(paths) == 1:
path = paths[0]
if os.path.isdir(path):
files = os.listdir(path)
images = ""
for file in files:
if any(file.endswith(ext) for ext in SUPPORTED_IMAGES):
images = os.path.join(path, f"*{os.path.splitext(file)[1]}")
break
if not images:
raise IOError("No supported images were found.")
datafile = ""
for file in files:
if file.endswith(".h5"):
datafile = os.path.join(path, "*.h5")
break
if datafile:
return [images, datafile]
return [images]
return paths
def _populate_metadata(
header: misc.DLCHeader,
*,
labels: Optional[Sequence[str]] = None,
ids: Optional[Sequence[str]] = None,
likelihood: Optional[Sequence[float]] = None,
paths: Optional[List[str]] = None,
size: Optional[int] = 8,
pcutoff: Optional[float] = 0.6,
colormap: Optional[str] = "viridis",
) -> Dict:
if labels is None:
labels = header.bodyparts
if ids is None:
ids = header.individuals
if likelihood is None:
likelihood = np.ones(len(labels))
label_colors = misc.build_color_cycle(len(header.bodyparts), colormap)
id_colors = misc.build_color_cycle(len(header.individuals), colormap)
face_color_cycle_maps = {
"label": dict(zip(header.bodyparts, label_colors)),
"id": dict(zip(header.individuals, id_colors)),
}
return {
"name": "keypoints",
"text": "label",
"properties": {
"label": list(labels),
"id": list(ids),
"likelihood": likelihood,
"valid": likelihood > pcutoff,
},
"face_color_cycle": label_colors,
"edge_color": "valid",
"edge_color_cycle": ["black", "red"],
"size": size,
"metadata": {
"header": header,
"face_color_cycle_maps": face_color_cycle_maps,
"paths": paths or [],
},
}
def _load_config(config_path: str):
with open(config_path) as file:
return yaml.safe_load(file)
def read_config(configname: str) -> List[LayerData]:
config = _load_config(configname)
header = misc.DLCHeader.from_config(config)
metadata = _populate_metadata(
header,
size=config["dotsize"],
pcutoff=config["pcutoff"],
colormap=config["colormap"],
)
metadata["name"] = f"CollectedData_{config['scorer']}"
return [(None, metadata, "points")]
def read_images(path: Union[str, List[str]]) -> List[LayerData]:
if isinstance(path, list):
root, ext = os.path.splitext(path[0])
path = os.path.join(os.path.dirname(root), f"*{ext}")
# Retrieve filepaths exactly as parsed by pims
filepaths = []
for filepath in sorted(glob.glob(path)):
_, *relpath = filepath.rsplit(os.sep, 3)
filepaths.append(os.path.join(*relpath))
params = {
"name": "images",
"metadata": {
"paths": filepaths,
"root": os.path.split(path)[0]
}
}
return [(imread(path), params, "image")]
def read_hdf(filename: str) -> List[LayerData]:
layers = []
for filename in glob.glob(filename):
temp = pd.read_hdf(filename)
header = misc.DLCHeader(temp.columns)
temp = temp.droplevel("scorer", axis=1)
if "individuals" not in temp.columns.names:
# Append a fake level to the MultiIndex
# to make it look like a multi-animal DataFrame
old_idx = temp.columns.to_frame()
old_idx.insert(0, "individuals", "")
temp.columns = pd.MultiIndex.from_frame(old_idx)
df = temp.stack(["individuals", "bodyparts"]).reset_index()
nrows = df.shape[0]
data = np.empty((nrows, 3))
image_paths = df["level_0"]
if np.issubdtype(image_paths.dtype, np.number):
image_inds = image_paths.values
paths2inds = []
else:
image_inds, paths2inds = misc.encode_categories(image_paths, return_map=True)
data[:, 0] = image_inds
data[:, 1:] = df[["y", "x"]].to_numpy()
metadata = _populate_metadata(
header,
labels=df["bodyparts"],
ids=df["individuals"],
likelihood=df.get("likelihood"),
paths=list(paths2inds),
)
metadata["name"] = os.path.split(filename)[1].split(".")[0]
metadata["metadata"]["root"] = os.path.split(filename)[0]
layers.append((data, metadata, "points"))
return layers
def write_hdf(filename: str, data: Any, metadata: Dict) -> Optional[str]:
temp = pd.DataFrame(data[:, -1:0:-1], columns=["x", "y"])
properties = metadata["properties"]
meta = metadata["metadata"]
temp["bodyparts"] = properties["label"]
temp["individuals"] = properties["id"]
temp["inds"] = data[:, 0].astype(int)
temp["likelihood"] = properties["likelihood"]
temp["scorer"] = meta["header"].scorer
df = temp.set_index(["scorer", "individuals", "bodyparts", "inds"]).stack()
df.index = df.index.set_names("coords", -1)
df = df.unstack(["scorer", "individuals", "bodyparts", "coords"])
df.index.name = None
if not properties["id"][0]:
df = df.droplevel("individuals", axis=1)
df = df.reindex(meta["header"].columns, axis=1)
if meta["paths"]:
df.index = [meta["paths"][i] for i in df.index]
name = metadata["name"]
root = meta["root"]
if "machine" in name: # We are attempting to save refined model predictions
df.drop("likelihood", axis=1, level="coords", inplace=True)
header = misc.DLCHeader(df.columns)
gt_file = ""
for file in os.listdir(root):
if file.startswith("CollectedData") and file.endswith("h5"):
gt_file = file
break
if gt_file: # Refined predictions must be merged into the existing data
df_gt = pd.read_hdf(os.path.join(root, gt_file))
new_scorer = df_gt.columns.get_level_values("scorer")[0]
header.scorer = new_scorer
df.columns = header.columns
df = pd.concat((df, df_gt))
df = df[~df.index.duplicated(keep="first")]
name = os.path.splitext(gt_file)[0]
else:
# Let us fetch the config.yaml file to get the scorer name...
project_folder = root.rsplit(os.sep, 2)[0]
config = _load_config(os.path.join(project_folder, "config.yaml"))
new_scorer = config["scorer"]
header.scorer = new_scorer
df.columns = header.columns
name = f"CollectedData_{new_scorer}"
df.sort_index(inplace=True)
filename = name + ".h5"
df.to_hdf(os.path.join(root, filename), key="df_with_missing")
return filename
def write_masks(foldername: str, data: Any, metadata: Dict) -> Optional[str]:
folder, _ = os.path.splitext(foldername)
os.makedirs(folder, exist_ok=True)
filename = os.path.join(folder, "{}_obj_{}.png")
shapes = Shapes(data, shape_type="polygon")
meta = metadata["metadata"]
frame_inds = [int(array[0, 0]) for array in data]
shape_inds = []
for _, group in groupby(frame_inds):
shape_inds += range(sum(1 for _ in group))
masks = shapes.to_masks(mask_shape=meta["shape"][1:])
for n, mask in enumerate(masks):
image_name = os.path.basename(meta["paths"][frame_inds[n]])
output_path = filename.format(os.path.splitext(image_name)[0], shape_inds[n])
imsave(output_path, img_as_ubyte(mask).squeeze(), check_contrast=False)
napari_write_shapes(os.path.join(folder, "vertices.csv"), data, metadata)
return folder
``` |
{
"source": "jonmcalder/ontology-development-kit",
"score": 3
} |
#### File: ontology-development-kit/odk/make-release-assets.py
```python
from github import Github
import os
import click
import logging
logging.basicConfig(level=logging.INFO)
@click.command()
@click.option("-k", "--dry-run/--no-dry-run", default=False)
@click.option("-f", "--force/--no-force", default=False)
@click.option("-c", "--create/--no-create", default=False)
@click.option("-t", "--token")
@click.option("-o", "--org")
@click.option("-r", "--repo", default="mondo")
@click.option("-v", "--release", default="v2018-08-24")
@click.argument("paths", nargs=-1)
def make_assets(dry_run, force, create, token, org, repo, release, paths):
if '/' in repo:
[org,repo] = repo.split('/')
if org == None:
org = 'monarch-initiative'
logging.info('org={} repo={} rel={}'.format(org, repo, release))
if token == None:
with open('.token') as f:
logging.info("Reading token from file")
token = f.read().rstrip().lstrip()
G = Github(token)
G_org = G.get_organization(org)
G_repo = G_org.get_repo(repo)
if create:
message = ""
if release == 'current':
message = "Running current release. This will be re-created with each release"
for r in G_repo.get_releases():
if r.tag_name == release:
if force:
logging.info("Release already exists - will delete and re-create")
r.delete_release()
else:
logging.error("Release already exists!")
logging.info("Creating release")
G_repo.create_git_release(release, release, message, draft=False, prerelease=False)
G_rel = G_repo.get_release(release)
existing_assets = {}
print('Existing assets:')
for a in G_rel.get_assets():
print('Asset: {} Size: {} Downloads: {}'.format(a.name, a.size, a.download_count))
existing_assets[a.name] = a
if (dry_run):
print("DRY RUN")
else:
for path in paths:
bn = os.path.basename(path)
logging.info("Testing if {} in {}".format(bn, existing_assets))
if bn in existing_assets.keys():
a = existing_assets[bn]
if force:
logging.info("{} already exists; will replace".format(path))
a.delete_asset()
else:
logging.error("{} already exists".format(path))
# TODO: if asset already exists, skip; add a --force option to explicitly overwrite
print('Uploading: {}'.format(path))
a = G_rel.upload_asset(path=path)
print('Uploaded: {} {} from {}'.format(a.name, a.size, path))
if __name__ == "__main__":
make_assets()
``` |
{
"source": "jonmct123/molssi_best_practices_2021",
"score": 3
} |
#### File: molecool/tests/test_molecool.py
```python
import molecool
import numpy as np
import pytest
import sys
def test_molecool_imported():
"""Sample test, will always pass so long as import statement worked"""
assert "molecool" in sys.modules
def test_build_bond_list():
coord = np.array([[1,1,1],[2.4,1,1],[-0.4,1,1],[1,1,2.4],[1,1,-0.4]])
bonds = molecool.build_bond_list(coord)
assert len(bonds) == 4
for bond_length in bonds.values():
assert bond_length == 1.4
```
#### File: molecool/tests/test_molecule.py
```python
import pytest
import numpy as np
import molecool
def test_molecular_mass():
symbols = ['C', 'H', 'H', 'H', 'H']
calculated_mass = molecool.calculate_molecular_mass(symbols)
actual_mass = 16.04
assert pytest.approx(actual_mass, abs=1e-2) == calculated_mass
"""
def test_center_of_mass():
symbols = np.array(['C', 'H', 'H', 'H', 'H'])
coordinates = np.array([[1,1,1], [2.4,1,1], [-0.4, 1, 1], [1, 1, 2.4], [1, 1, -0.4]])
center_of_mass = molecool.calculate_center_of_mass(symbols, coordinates)
expected_center = np.array([1,1,1])
assert np.array_equal(expected_center, center_of_mass)
"""
``` |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.